示例#1
0
 def _mri_binarize(input, output, tissue):
     if tissue == "wm":
         freesurfer.Binarize(in_file=input, wm=True,
                             binary_file=output).run()
     elif tissue == "gm":
         freesurfer.Binarize(in_file=input, args="--gm",
                             binary_file=output).run()
     elif tissue == "csf":
         freesurfer.Binarize(in_file=input,
                             match=[4, 5, 24, 31, 43, 44, 63, 122, 221],
                             binary_file=output).run()
     else:
         raise NotImplementedError("Invalid tissue to binarize.")
示例#2
0
def create_mgzconvert_pipeline(name='mgzconvert'):
    # workflow
    mgzconvert = Workflow(name='mgzconvert')
    # inputnode
    inputnode = Node(
        util.IdentityInterface(fields=['fs_subjects_dir', 'fs_subject_id']),
        name='inputnode')
    # outputnode
    outputnode = Node(util.IdentityInterface(fields=[
        'anat_head', 'anat_brain', 'anat_brain_mask', 'wmseg', 'wmedge'
    ]),
                      name='outputnode')
    # import files from freesurfer
    fs_import = Node(interface=nio.FreeSurferSource(), name='fs_import')
    # convert Freesurfer T1 file to nifti
    head_convert = Node(fs.MRIConvert(out_type='niigz', out_file='T1.nii.gz'),
                        name='head_convert')

    # create brainmask from aparc+aseg with single dilation
    def get_aparc_aseg(files):
        for name in files:
            if 'aparc+aseg' in name:
                return name

    # create brain by converting only freesurfer output
    brain_convert = Node(fs.MRIConvert(out_type='niigz',
                                       out_file='brain.nii.gz'),
                         name='brain_convert')
    brain_binarize = Node(fsl.ImageMaths(op_string='-bin -fillh',
                                         out_file='T1_brain_mask.nii.gz'),
                          name='brain_binarize')

    # cortical and cerebellar white matter volumes to construct wm edge
    # [lh cerebral wm, lh cerebellar wm, rh cerebral wm, rh cerebellar wm, brain stem]
    wmseg = Node(fs.Binarize(out_type='nii.gz',
                             match=[2, 7, 41, 46, 16],
                             binary_file='T1_brain_wmseg.nii.gz'),
                 name='wmseg')
    # make edge from wmseg to visualize coregistration quality
    edge = Node(fsl.ApplyMask(args='-edge -bin',
                              out_file='T1_brain_wmedge.nii.gz'),
                name='edge')
    # connections
    mgzconvert.connect([
        (inputnode, fs_import, [('fs_subjects_dir', 'subjects_dir'),
                                ('fs_subject_id', 'subject_id')]),
        (fs_import, head_convert, [('T1', 'in_file')]),
        (fs_import, wmseg, [(('aparc_aseg', get_aparc_aseg), 'in_file')]),
        (fs_import, brain_convert, [('brainmask', 'in_file')]),
        (wmseg, edge, [('binary_file', 'in_file'),
                       ('binary_file', 'mask_file')]),
        (head_convert, outputnode, [('out_file', 'anat_head')]),
        (brain_convert, outputnode, [('out_file', 'anat_brain')]),
        (brain_convert, brain_binarize, [('out_file', 'in_file')]),
        (brain_binarize, outputnode, [('out_file', 'anat_brain_mask')]),
        (wmseg, outputnode, [('binary_file', 'wmseg')]),
        (edge, outputnode, [('out_file', 'wmedge')])
    ])

    return mgzconvert
示例#3
0
def mask2surf(name='MaskToSurface', use_ras_coord=True):
    inputnode = pe.Node(niu.IdentityInterface(
        fields=['in_file', 'norm', 'in_filled', 'out_name']),
                        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(fields=['out_surf']),
                         name='outputnode')
    binarize = pe.Node(fs.Binarize(min=0.1), name='binarize')
    fill = pe.Node(FillMask(), name='FillMask')
    pretess = pe.Node(fs.MRIPretess(label=1), name='PreTess')
    tess = pe.Node(fs.MRITessellate(label_value=1,
                                    use_real_RAS_coordinates=use_ras_coord),
                   name='tess')
    smooth = pe.Node(fs.SmoothTessellation(disable_estimates=True),
                     name='mris_smooth')
    rename = pe.Node(niu.Rename(keep_ext=False), name='rename')
    togii = pe.Node(fs.MRIsConvert(out_datatype='gii'), name='toGIFTI')

    wf = pe.Workflow(name=name)
    wf.connect([
        (inputnode, binarize, [('in_file', 'in_file')]),
        (inputnode, pretess, [('norm', 'in_norm')]),
        (inputnode, fill, [('in_filled', 'in_filled')]),
        (inputnode, rename, [('out_name', 'format_string')]),
        (binarize, fill, [('binary_file', 'in_file')]),
        (fill, pretess, [('out_file', 'in_filled')]),
        (pretess, tess, [('out_file', 'in_file')]),
        (tess, smooth, [('surface', 'in_file')]),
        (smooth, rename, [('surface', 'in_file')]),
        (rename, togii, [('out_file', 'in_file')]),
        (togii, outputnode, [('converted', 'out_surf')]),
    ])
    return wf
def fs_segment(name="segment"):
    from nipype.interfaces.io import FreeSurferSource
    from nipype.interfaces.utility import IdentityInterface
    import nipype.interfaces.freesurfer as fs
    import nipype.pipeline.engine as pe
    import os
    wf = pe.Workflow(name=name)
    inputspec = pe.Node(
        IdentityInterface(fields=['subject_id', 'subjects_dir']),
        name="inputspec")
    fssource = pe.Node(FreeSurferSource(), name="fssource")
    wf.connect(inputspec, "subject_id", fssource, "subject_id")
    wf.connect(inputspec, "subjects_dir", fssource, "subjects_dir")
    bin_wm = pe.Node(fs.Binarize(), name="get_wm")
    bin_wm.inputs.out_type = 'nii.gz'
    bin_wm.inputs.match = [2, 41]
    bin_gm = bin_wm.clone("get_gm")
    bin_gm.inputs.out_type = 'nii.gz'
    bin_gm.inputs.match = [3, 42]
    bin_csf = bin_wm.clone("get_csf")
    bin_csf.inputs.out_type = 'nii.gz'
    bin_csf.inputs.match = [4, 5, 14, 15, 24, 31, 43, 44, 63]
    wf.connect(fssource, ("ribbon", pick_file, 'ribbon.mgz'), bin_wm,
               "in_file")
    wf.connect(fssource, ("ribbon", pick_file, 'ribbon.mgz'), bin_gm,
               "in_file")
    wf.connect(fssource, ("aparc_aseg", pick_file, 'aparc+aseg.mgz'), bin_csf,
               "in_file")
    outputspec = pe.Node(IdentityInterface(fields=["gm", "wm", "csf"]),
                         name='outputspec')
    wf.connect(bin_wm, "binary_file", outputspec, "wm")
    wf.connect(bin_gm, "binary_file", outputspec, "gm")
    wf.connect(bin_csf, "binary_file", outputspec, "csf")
    return wf
示例#5
0
def binarize_erode_mask(infile, min=0.05, erode=1):
    """ use freesurfer to binarize and erode a mask"""
    outfile = pp.prefix_filename(infile,
                                 prefix='ero%d_%2.2fbin_' % (erode, min))

    bin = freesurfer.Binarize()
    bin.inputs.in_file = infile
    bin.inputs.min = min
    bin.inputs.binary_file = outfile
    bin.inputs.erode = erode
    binout = bin.run()
    return binout
示例#6
0
def extract_csf_mask():
    """Create a workflow to extract a mask of csf voxels
    
    Inputs
    ------
    inputspec.mean_file :
    inputspec.reg_file :
    inputspec.fsaseg_file :
    
    Outputs
    -------
    outputspec.csf_mask :
    
    Returns
    -------
    workflow : workflow that extracts mask of csf voxels
    """
    import nipype.pipeline.engine as pe
    import nipype.interfaces.freesurfer as fs
    import nipype.interfaces.utility as util

    extract_csf = pe.Workflow(name='extract_csf_mask')
    inputspec = pe.Node(util.IdentityInterface(
        fields=['mean_file', 'reg_file', 'fsaseg_file']),
                        name='inputspec')

    bin = pe.Node(fs.Binarize(), name='binarize')
    bin.inputs.wm_ven_csf = True
    bin.inputs.match = [4, 5, 14, 15, 24, 31, 43, 44, 63]
    bin.inputs.erode = 2

    extract_csf.connect(inputspec, 'fsaseg_file', bin, "in_file")
    voltransform = pe.Node(fs.ApplyVolTransform(inverse=True),
                           name='inverse_transform')
    extract_csf.connect(bin, 'binary_file', voltransform, 'target_file')
    extract_csf.connect(inputspec, 'reg_file', voltransform, 'reg_file')
    extract_csf.connect(inputspec, 'mean_file', voltransform, 'source_file')
    outputspec = pe.Node(util.IdentityInterface(fields=['csf_mask']),
                         name='outputspec')
    extract_csf.connect(voltransform, 'transformed_file', outputspec,
                        'csf_mask')
    return extract_csf
示例#7
0
def create_confound_extraction_workflow(name="confounds", wm_components=6):
    """Extract nuisance variables from anatomical sources."""
    inputnode = Node(
        IdentityInterface(
            ["timeseries", "brain_mask", "reg_file", "subject_id"]), "inputs")

    # Find the subject's Freesurfer segmentation
    # Grab the Freesurfer aparc+aseg file as an anatomical brain mask
    getaseg = Node(
        io.SelectFiles({"aseg": "{subject_id}/mri/aseg.mgz"},
                       base_directory=os.environ["SUBJECTS_DIR"]), "getaseg")

    # Select and erode the white matter to get deep voxels
    selectwm = Node(fs.Binarize(erode=3, wm=True), "selectwm")

    # Transform the mask into functional space
    transform = MapNode(fs.ApplyVolTransform(inverse=True, interp="nearest"),
                        ["reg_file", "source_file"], "transform")

    # Extract eigenvariates of the timeseries from WM and whole brain
    extract = MapNode(ExtractConfounds(n_components=wm_components),
                      ["timeseries", "brain_mask", "wm_mask"], "extract")

    outputnode = Node(IdentityInterface(["confound_file"]), "outputs")

    confounds = Workflow(name)
    confounds.connect([
        (inputnode, getaseg, [("subject_id", "subject_id")]),
        (getaseg, selectwm, [("aseg", "in_file")]),
        (selectwm, transform, [("binary_file", "target_file")]),
        (inputnode, transform, [("reg_file", "reg_file"),
                                ("timeseries", "source_file")]),
        (transform, extract, [("transformed_file", "wm_mask")]),
        (inputnode, extract, [("timeseries", "timeseries"),
                              ("brain_mask", "brain_mask")]),
        (extract, outputnode, [("out_file", "confound_file")]),
    ])

    return confounds
示例#8
0
def create_no_FS_compcor(name='CompCor'):

    import nipype.pipeline.engine as pe
    import nipype.interfaces.utility as util
    import nipype.interfaces.fsl as fsl
    import nipype.interfaces.freesurfer as fs

    compproc = create_compcorr(name=name)
    inputspec = pe.Node(util.IdentityInterface(fields=[
        'num_components', 'realigned_file', 'segment_files',
        'realignment_parameters', 'outlier_files', 'selector',
        'regress_before_PCA'
    ]),
                        name='inputspec')
    acomp_old = compproc.get_node('extract_csf_mask')
    inputspec_old = compproc.get_node('inputspec')
    tsnr = compproc.get_node('tsnr')
    compcor = compproc.get_node('compcor_components')
    outputspec = compproc.get_node('outputspec')

    compproc.remove_nodes([acomp_old, inputspec_old])

    bin = pe.Node(fs.Binarize(match=[1, 3], erode=1), name='binarize')
    compproc.connect(inputspec, 'segment_files', bin, 'in_file')

    compproc.connect(inputspec, 'realigned_file', tsnr, 'in_file')
    compproc.connect(inputspec, 'num_components', compcor, 'num_components')
    compproc.connect(inputspec, 'realignment_parameters', compcor,
                     'realignment_parameters')
    compproc.connect(inputspec, 'outlier_files', compcor, 'outlier_file')
    compproc.connect(inputspec, 'regress_before_PCA', compcor,
                     'regress_before_PCA')
    compproc.connect(inputspec, 'selector', compcor, 'selector')
    compproc.connect(bin, 'binary_file', compcor, 'csf_mask_file')
    compproc.connect(bin, 'binary_file', outputspec, 'csf_mask')
    #compcor needs the csf mask file input

    return compproc
def test_binarize():
    input_map = dict(
        abs=dict(argstr='--abs', ),
        args=dict(argstr='%s', ),
        bin_col_num=dict(argstr='--bincol', ),
        bin_val=dict(argstr='--binval %d', ),
        bin_val_not=dict(argstr='--binvalnot %d', ),
        binary_file=dict(argstr='--o %s', ),
        count_file=dict(argstr='--count %s', ),
        dilate=dict(argstr='--dilate %d', ),
        environ=dict(),
        erode=dict(argstr='--erode  %d', ),
        erode2d=dict(argstr='--erode2d %d', ),
        frame_no=dict(argstr='--frame %s', ),
        in_file=dict(copyfile=False, mandatory=True, argstr='--i %s'),
        invert=dict(argstr='--inv', ),
        mask_file=dict(argstr='--mask maskvol', ),
        mask_thresh=dict(argstr='--mask-thresh %f', ),
        match=dict(argstr='--match %d...', ),
        max=dict(argstr='--max %f', ),
        merge_file=dict(argstr='--merge %s', ),
        min=dict(argstr='--min %f', ),
        out_type=dict(argstr='', ),
        rmax=dict(argstr='--rmax %f', ),
        rmin=dict(argstr='--rmin %f', ),
        subjects_dir=dict(),
        ventricles=dict(argstr='--ventricles', ),
        wm=dict(argstr='--wm', ),
        wm_ven_csf=dict(argstr='--wm+vcsf', ),
        zero_edges=dict(argstr='--zero-edges', ),
        zero_slice_edge=dict(argstr='--zero-slice-edges', ),
    )
    instance = freesurfer.Binarize()
    for key, metadata in input_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(instance.inputs.traits()[key],
                                        metakey), value
示例#10
0
def create_custom_template(c):
    import nipype.pipeline.engine as pe
    #from nipype.interfaces.ants import BuildTemplate
    import nipype.interfaces.io as nio
    import nipype.interfaces.utility as niu
    import nipype.interfaces.freesurfer as fs

    wf = pe.Workflow(name='create_fs_masked_brains')
    #temp = pe.Node(BuildTemplate(parallelization=1), name='create_template')
    fssource = pe.Node(nio.FreeSurferSource(subjects_dir=c.surf_dir),
                       name='fssource')
    infosource = pe.Node(niu.IdentityInterface(fields=["subject_id"]),
                         name="subject_names")
    infosource.iterables = ("subject_id", c.subjects)
    wf.connect(infosource, "subject_id", fssource, "subject_id")
    sink = pe.Node(nio.DataSink(base_directory=c.sink_dir), name='sinker')
    applymask = pe.Node(fs.ApplyMask(mask_thresh=0.5), name='applymask')
    binarize = pe.Node(fs.Binarize(dilate=1, min=0.5, subjects_dir=c.surf_dir),
                       name='binarize')
    convert = pe.Node(fs.MRIConvert(out_type='niigz'), name='convert')
    wf.connect(fssource, 'orig', applymask, 'in_file')
    wf.connect(fssource, ('aparc_aseg', pickaparc), binarize, 'in_file')
    wf.connect(binarize, 'binary_file', applymask, 'mask_file')
    wf.connect(applymask, 'out_file', convert, 'in_file')
    wf.connect(convert, "out_file", sink, "masked_images")

    def getsubs(subject_id):
        subs = []
        subs.append(('_subject_id_%s/' % subject_id, '%s_' % subject_id))
        return subs

    wf.connect(infosource, ("subject_id", getsubs), sink, "substitutions")
    #wf.connect(convert,'out_file',temp,'in_files')
    #wf.connect(temp,'final_template_file',sink,'custom_template.final_template_file')
    #wf.connect(temp,'subject_outfiles',sink,'custom_template.subject_outfiles')
    #wf.connect(temp,'template_files',sink,'template_files')
    return wf
示例#11
0
                         iterfield=['in_file'],
                         name='temp_smooth')
temp_smooth.inputs.adaptive = 5
temp_smooth.inputs.lin = True
temp_smooth.inputs.med = True
temp_smooth.inputs.outputtype = 'NIFTI_GZ'
psb6351_wf.connect(Blur, 'out_file', temp_smooth, 'in_file')

# Register a source file to fs space and create a brain mask in source space
# The node below creates the Freesurfer source
fssource = pe.Node(nio.FreeSurferSource(), name='fssource')
fssource.inputs.subject_id = f'sub-{sids[0]}'
fssource.inputs.subjects_dir = fs_dir

# Extract aparc+aseg brain mask, binarize, and dilate by 1 voxel
fs_threshold = pe.Node(fs.Binarize(min=0.5, out_type='nii'),
                       name='fs_threshold')
fs_threshold.inputs.dilate = 1
psb6351_wf.connect(fssource, ('aparc_aseg', get_aparc_aseg), fs_threshold,
                   'in_file')

# Transform the binarized aparc+aseg file to the EPI space
# use a nearest neighbor interpolation
fs_voltransform = pe.Node(fs.ApplyVolTransform(inverse=True),
                          name='fs_transform')
fs_voltransform.inputs.subjects_dir = fs_dir
fs_voltransform.inputs.interp = 'nearest'
psb6351_wf.connect(extractref, 'roi_file', fs_voltransform, 'source_file')
psb6351_wf.connect(fs_register, 'out_reg_file', fs_voltransform, 'reg_file')
psb6351_wf.connect(fs_threshold, 'binary_file', fs_voltransform, 'target_file')
示例#12
0
def create_fs_reg_workflow(name='registration'):
    """Create a FEAT preprocessing workflow together with freesurfer

    Parameters
    ----------

        name : name of workflow (default: 'registration')

    Inputs::

        inputspec.source_files : files (filename or list of filenames to register)
        inputspec.mean_image : reference image to use
        inputspec.target_image : registration target

    Outputs::

        outputspec.func2anat_transform : FLIRT transform
        outputspec.anat2target_transform : FLIRT+FNIRT transform
        outputspec.transformed_files : transformed files in target space
        outputspec.transformed_mean : mean image in target space
    """

    register = Workflow(name=name)

    inputnode = Node(interface=IdentityInterface(fields=['source_files',
                                                         'mean_image',
                                                         'subject_id',
                                                         'subjects_dir',
                                                         'target_image']),
                     name='inputspec')

    outputnode = Node(interface=IdentityInterface(fields=['func2anat_transform',
                                                          'out_reg_file',
                                                          'anat2target_transform',
                                                          'transforms',
                                                          'transformed_mean',
                                                          'transformed_files',
                                                          'min_cost_file',
                                                          'anat2target',
                                                          'aparc',
                                                          'mean2anat_mask'
                                                          ]),
                      name='outputspec')

    # Get the subject's freesurfer source directory
    fssource = Node(FreeSurferSource(),
                    name='fssource')
    fssource.run_without_submitting = True
    register.connect(inputnode, 'subject_id', fssource, 'subject_id')
    register.connect(inputnode, 'subjects_dir', fssource, 'subjects_dir')

    convert = Node(freesurfer.MRIConvert(out_type='nii'),
                   name="convert")
    register.connect(fssource, 'T1', convert, 'in_file')

    # Coregister the median to the surface
    bbregister = Node(freesurfer.BBRegister(registered_file=True),
                    name='bbregister')
    bbregister.inputs.init = 'fsl'
    bbregister.inputs.contrast_type = 't2'
    bbregister.inputs.out_fsl_file = True
    bbregister.inputs.epi_mask = True
    register.connect(inputnode, 'subject_id', bbregister, 'subject_id')
    register.connect(inputnode, 'mean_image', bbregister, 'source_file')
    register.connect(inputnode, 'subjects_dir', bbregister, 'subjects_dir')

    # Create a mask of the median coregistered to the anatomical image
    mean2anat_mask = Node(fsl.BET(mask=True), name='mean2anat_mask')
    register.connect(bbregister, 'registered_file', mean2anat_mask, 'in_file')

    """
    use aparc+aseg's brain mask
    """

    binarize = Node(fs.Binarize(min=0.5, out_type="nii.gz", dilate=1), name="binarize_aparc")
    register.connect(fssource, ("aparc_aseg", get_aparc_aseg), binarize, "in_file")

    stripper = Node(fsl.ApplyMask(), name ='stripper')
    register.connect(binarize, "binary_file", stripper, "mask_file")
    register.connect(convert, 'out_file', stripper, 'in_file')

    """
    Apply inverse transform to aparc file
    """

    aparcxfm = Node(freesurfer.ApplyVolTransform(inverse=True,
                                                 interp='nearest'),
                    name='aparc_inverse_transform')
    register.connect(inputnode, 'subjects_dir', aparcxfm, 'subjects_dir')
    register.connect(bbregister, 'out_reg_file', aparcxfm, 'reg_file')
    register.connect(fssource, ('aparc_aseg', get_aparc_aseg),
                     aparcxfm, 'target_file')
    register.connect(inputnode, 'mean_image', aparcxfm, 'source_file')

    """
    Convert the BBRegister transformation to ANTS ITK format
    """

    convert2itk = Node(C3dAffineTool(), name='convert2itk')
    convert2itk.inputs.fsl2ras = True
    convert2itk.inputs.itk_transform = True
    register.connect(bbregister, 'out_fsl_file', convert2itk, 'transform_file')
    register.connect(inputnode, 'mean_image',convert2itk, 'source_file')
    register.connect(stripper, 'out_file', convert2itk, 'reference_file')

    """
    Compute registration between the subject's structural and MNI template
    This is currently set to perform a very quick registration. However, the
    registration can be made significantly more accurate for cortical
    structures by increasing the number of iterations
    All parameters are set using the example from:
    #https://github.com/stnava/ANTs/blob/master/Scripts/newAntsExample.sh
    """

    reg = Node(ants.Registration(), name='antsRegister')
    reg.inputs.output_transform_prefix = "output_"
    reg.inputs.transforms = ['Rigid', 'Affine', 'SyN']
    reg.inputs.transform_parameters = [(0.1,), (0.1,), (0.2, 3.0, 0.0)]
    reg.inputs.number_of_iterations = [[10000, 11110, 11110]] * 2 + [[100, 30, 20]]
    reg.inputs.dimension = 3
    reg.inputs.write_composite_transform = True
    reg.inputs.collapse_output_transforms = True
    reg.inputs.initial_moving_transform_com = True
    reg.inputs.metric = ['Mattes'] * 2 + [['Mattes', 'CC']]
    reg.inputs.metric_weight = [1] * 2 + [[0.5, 0.5]]
    reg.inputs.radius_or_number_of_bins = [32] * 2 + [[32, 4]]
    reg.inputs.sampling_strategy = ['Regular'] * 2 + [[None, None]]
    reg.inputs.sampling_percentage = [0.3] * 2 + [[None, None]]
    reg.inputs.convergence_threshold = [1.e-8] * 2 + [-0.01]
    reg.inputs.convergence_window_size = [20] * 2 + [5]
    reg.inputs.smoothing_sigmas = [[4, 2, 1]] * 2 + [[1, 0.5, 0]]
    reg.inputs.sigma_units = ['vox'] * 3
    reg.inputs.shrink_factors = [[3, 2, 1]]*2 + [[4, 2, 1]]
    reg.inputs.use_estimate_learning_rate_once = [True] * 3
    reg.inputs.use_histogram_matching = [False] * 2 + [True]
    reg.inputs.winsorize_lower_quantile = 0.005
    reg.inputs.winsorize_upper_quantile = 0.995
    reg.inputs.float = True
    reg.inputs.output_warped_image = 'output_warped_image.nii.gz'
    reg.inputs.num_threads = 4
    reg.plugin_args = {'qsub_args': '-pe orte 4',
                       'sbatch_args': '--mem=6G -c 4'}
    register.connect(stripper, 'out_file', reg, 'moving_image')
    register.connect(inputnode,'target_image', reg,'fixed_image')

    """
    Concatenate the affine and ants transforms into a list
    """

    pickfirst = lambda x: x[0]

    merge = Node(Merge(2), iterfield=['in2'], name='mergexfm')
    register.connect(convert2itk, 'itk_transform', merge, 'in2')
    register.connect(reg, 'composite_transform', merge, 'in1')

    """
    Transform the mean image. First to anatomical and then to target
    """

    warpmean = Node(ants.ApplyTransforms(), name='warpmean')
    warpmean.inputs.input_image_type = 0
    warpmean.inputs.interpolation = 'Linear'
    warpmean.inputs.invert_transform_flags = [False, False]
    warpmean.inputs.terminal_output = 'file'
    warpmean.inputs.args = '--float'
    #warpmean.inputs.num_threads = 4
    #warpmean.plugin_args = {'sbatch_args': '--mem=4G -c 4'}

    """
    Transform the remaining images. First to anatomical and then to target
    """

    warpall = pe.MapNode(ants.ApplyTransforms(),
                         iterfield=['input_image'],
                         name='warpall')
    warpall.inputs.input_image_type = 0
    warpall.inputs.interpolation = 'Linear'
    warpall.inputs.invert_transform_flags = [False, False]
    warpall.inputs.terminal_output = 'file'
    warpall.inputs.args = '--float'
    warpall.inputs.num_threads = 2
    warpall.plugin_args = {'sbatch_args': '--mem=6G -c 2'}

    """
    Assign all the output files
    """

    register.connect(warpmean, 'output_image', outputnode, 'transformed_mean')
    register.connect(warpall, 'output_image', outputnode, 'transformed_files')

    register.connect(inputnode,'target_image', warpmean,'reference_image')
    register.connect(inputnode, 'mean_image', warpmean, 'input_image')
    register.connect(merge, 'out', warpmean, 'transforms')
    register.connect(inputnode,'target_image', warpall,'reference_image')
    register.connect(inputnode,'source_files', warpall, 'input_image')
    register.connect(merge, 'out', warpall, 'transforms')

    """
    Assign all the output files
    """

    register.connect(reg, 'warped_image', outputnode, 'anat2target')
    register.connect(aparcxfm, 'transformed_file',
                     outputnode, 'aparc')
    register.connect(bbregister, 'out_fsl_file',
                     outputnode, 'func2anat_transform')
    register.connect(bbregister, 'out_reg_file',
                     outputnode, 'out_reg_file')
    register.connect(bbregister, 'min_cost_file',
                     outputnode, 'min_cost_file')
    register.connect(mean2anat_mask, 'mask_file',
                     outputnode, 'mean2anat_mask')
    register.connect(reg, 'composite_transform',
                     outputnode, 'anat2target_transform')
    register.connect(merge, 'out', outputnode, 'transforms')

    return register
示例#13
0
"""

FreeSurferSource = pe.Node(interface=nio.FreeSurferSource(), name='fssource')
"""
Use :class:`nipype.interfaces.freesurfer.ApplyVolTransform` to convert the
brainmask generated by freesurfer into the realigned functional space.
"""

ApplyVolTransform = pe.Node(interface=fs.ApplyVolTransform(), name='applyreg')
ApplyVolTransform.inputs.inverse = True
"""
Use :class:`nipype.interfaces.freesurfer.Binarize` to extract a binary brain
mask.
"""

Threshold = pe.Node(interface=fs.Binarize(), name='threshold')
Threshold.inputs.min = 10
Threshold.inputs.out_type = 'nii'
"""
Two different types of functional data smoothing are performed in this
workflow. The volume smoothing option performs a standard SPM smoothin. using
:class:`nipype.interfaces.spm.Smooth`. In addition, we use a smoothing routine
from freesurfer (:class:`nipype.interfaces.freesurfer.Binarize`) to project the
functional data from the volume to the subjects' surface, smooth it on the
surface and fit it back into the volume forming the cortical ribbon. The
projection uses the average value along a "cortical column". In addition to the
surface smoothing, the rest of the volume is smoothed with a 3d gaussian kernel.

.. note::

    It is very important to note that the projection to the surface takes a 3d
示例#14
0
                                   out_fsl_file=True),
                     name='coregister')
preproc_wf.connect(subj_iterable, 'subject_id', coregister, 'subject_id')
preproc_wf.connect(motion_correct, ('out_file', pickfirst), coregister,
                   'source_file')
preproc_wf.connect(coregister, 'out_reg_file', outputspec, 'reg_file')
preproc_wf.connect(coregister, 'out_fsl_file', outputspec, 'fsl_reg_file')
preproc_wf.connect(coregister, 'min_cost_file', outputspec, 'reg_cost')

# Register a source file to fs space
fssource = pe.Node(nio.FreeSurferSource(subjects_dir=subjects_dir),
                   name='fssource')
preproc_wf.connect(subj_iterable, 'subject_id', fssource, 'subject_id')

# Extract aparc+aseg brain mask and binarize
fs_threshold = pe.Node(fs.Binarize(min=0.5, out_type='nii'),
                       name='fs_threshold')
preproc_wf.connect(fssource, ('aparc_aseg', get_aparc_aseg), fs_threshold,
                   'in_file')

# Transform the binarized aparc+aseg file to the 1st volume of 1st run space
fs_voltransform = pe.MapNode(fs.ApplyVolTransform(inverse=True,
                                                  subjects_dir=subjects_dir),
                             iterfield=['source_file', 'reg_file'],
                             name='fs_transform')
preproc_wf.connect(extractref, 'roi_file', fs_voltransform, 'source_file')
preproc_wf.connect(coregister, 'out_reg_file', fs_voltransform, 'reg_file')
preproc_wf.connect(fs_threshold, 'binary_file', fs_voltransform, 'target_file')

# Dilate the binarized mask by 1 voxel that is now in the EPI space
fs_threshold2 = pe.MapNode(fs.Binarize(min=0.5, out_type='nii', dilate=1),
示例#15
0
def create_struct_preproc_pipeline(working_dir,
                                   freesurfer_dir,
                                   ds_dir,
                                   use_fs_brainmask,
                                   name='struct_preproc'):
    """

    """

    # initiate workflow
    struct_preproc_wf = Workflow(name=name)
    struct_preproc_wf.base_dir = os.path.join(working_dir, 'LeiCA_resting',
                                              'rsfMRI_preprocessing')
    # set fsl output
    fsl.FSLCommand.set_default_output_type('NIFTI_GZ')

    # inputnode
    inputnode = Node(util.IdentityInterface(fields=['t1w', 'subject_id']),
                     name='inputnode')

    # outputnode
    outputnode = Node(util.IdentityInterface(fields=[
        't1w_brain', 'struct_brain_mask', 'fast_partial_volume_files',
        'wm_mask', 'csf_mask', 'wm_mask_4_bbr', 'gm_mask'
    ]),
                      name='outputnode')

    ds = Node(nio.DataSink(base_directory=ds_dir), name='ds')
    ds.inputs.substitutions = [('_TR_id_', 'TR_')]

    # CREATE BRAIN MASK
    if use_fs_brainmask:
        # brainmask with fs
        fs_source = Node(interface=nio.FreeSurferSource(), name='fs_source')
        fs_source.inputs.subjects_dir = freesurfer_dir
        struct_preproc_wf.connect(inputnode, 'subject_id', fs_source,
                                  'subject_id')

        # get aparc+aseg from list
        def get_aparc_aseg(files):
            for name in files:
                if 'aparc+aseg' in name:
                    return name

        aseg = Node(fs.MRIConvert(out_type='niigz', out_file='aseg.nii.gz'),
                    name='aseg')
        struct_preproc_wf.connect(fs_source, ('aparc_aseg', get_aparc_aseg),
                                  aseg, 'in_file')

        fs_brainmask = Node(
            fs.Binarize(
                min=0.5,  #dilate=1,
                out_type='nii.gz'),
            name='fs_brainmask')
        struct_preproc_wf.connect(aseg, 'out_file', fs_brainmask, 'in_file')

        # fill holes in mask, smooth, rebinarize
        fillholes = Node(fsl.maths.MathsCommand(
            args='-fillh -s 3 -thr 0.1 -bin', out_file='T1_brain_mask.nii.gz'),
                         name='fillholes')

        struct_preproc_wf.connect(fs_brainmask, 'binary_file', fillholes,
                                  'in_file')

        fs_2_struct_mat = Node(util.Function(
            input_names=['moving_image', 'target_image'],
            output_names=['fsl_file'],
            function=tkregister2_fct),
                               name='fs_2_struct_mat')

        struct_preproc_wf.connect([(fs_source, fs_2_struct_mat,
                                    [('T1', 'moving_image'),
                                     ('rawavg', 'target_image')])])

        struct_brain_mask = Node(fsl.ApplyXfm(interp='nearestneighbour'),
                                 name='struct_brain_mask_fs')
        struct_preproc_wf.connect(fillholes, 'out_file', struct_brain_mask,
                                  'in_file')
        struct_preproc_wf.connect(inputnode, 't1w', struct_brain_mask,
                                  'reference')
        struct_preproc_wf.connect(fs_2_struct_mat, 'fsl_file',
                                  struct_brain_mask, 'in_matrix_file')
        struct_preproc_wf.connect(struct_brain_mask, 'out_file', outputnode,
                                  'struct_brain_mask')
        struct_preproc_wf.connect(struct_brain_mask, 'out_file', ds,
                                  'struct_prep.struct_brain_mask')

        # multiply t1w with fs brain mask
        t1w_brain = Node(fsl.maths.BinaryMaths(operation='mul'),
                         name='t1w_brain')
        struct_preproc_wf.connect(inputnode, 't1w', t1w_brain, 'in_file')
        struct_preproc_wf.connect(struct_brain_mask, 'out_file', t1w_brain,
                                  'operand_file')
        struct_preproc_wf.connect(t1w_brain, 'out_file', outputnode,
                                  't1w_brain')
        struct_preproc_wf.connect(t1w_brain, 'out_file', ds,
                                  'struct_prep.t1w_brain')

    else:  # use bet
        t1w_brain = Node(fsl.BET(mask=True, outline=True, surfaces=True),
                         name='t1w_brain')
        struct_preproc_wf.connect(inputnode, 't1w', t1w_brain, 'in_file')
        struct_preproc_wf.connect(t1w_brain, 'out_file', outputnode,
                                  't1w_brain')

        def struct_brain_mask_bet_fct(in_file):
            return in_file

        struct_brain_mask = Node(util.Function(
            input_names=['in_file'],
            output_names=['out_file'],
            function=struct_brain_mask_bet_fct),
                                 name='struct_brain_mask')
        struct_preproc_wf.connect(t1w_brain, 'mask_file', struct_brain_mask,
                                  'in_file')
        struct_preproc_wf.connect(struct_brain_mask, 'out_file', outputnode,
                                  'struct_brain_mask')
        struct_preproc_wf.connect(struct_brain_mask, 'out_file', ds,
                                  'struct_prep.struct_brain_mask')

    # SEGMENTATION WITH FAST
    fast = Node(fsl.FAST(), name='fast')
    struct_preproc_wf.connect(t1w_brain, 'out_file', fast, 'in_files')
    struct_preproc_wf.connect(fast, 'partial_volume_files', outputnode,
                              'fast_partial_volume_files')
    struct_preproc_wf.connect(fast, 'partial_volume_files', ds,
                              'struct_prep.fast')

    # functions to select tissue classes
    def selectindex(files, idx):
        import numpy as np
        from nipype.utils.filemanip import filename_to_list, list_to_filename
        return list_to_filename(
            np.array(filename_to_list(files))[idx].tolist())

    def selectsingle(files, idx):
        return files[idx]

    # pve0: CSF
    # pve1: GM
    # pve2: WM
    # binarize tissue classes
    binarize_tissue = MapNode(
        fsl.ImageMaths(op_string='-nan -thr 0.99 -ero -bin'),
        iterfield=['in_file'],
        name='binarize_tissue')

    struct_preproc_wf.connect(fast,
                              ('partial_volume_files', selectindex, [0, 2]),
                              binarize_tissue, 'in_file')

    # OUTPUT  WM AND CSF MASKS FOR CPAC DENOISING
    struct_preproc_wf.connect([(binarize_tissue, outputnode,
                                [(('out_file', selectsingle, 0), 'csf_mask'),
                                 (('out_file', selectsingle, 1), 'wm_mask')])])

    # WRITE WM MASK WITH P > .5 FOR FSL BBR
    # use threshold of .5 like FSL's epi_reg script
    wm_mask_4_bbr = Node(fsl.ImageMaths(op_string='-thr 0.5 -bin'),
                         name='wm_mask_4_bbr')
    struct_preproc_wf.connect(fast, ('partial_volume_files', selectindex, [2]),
                              wm_mask_4_bbr, 'in_file')
    struct_preproc_wf.connect(wm_mask_4_bbr, 'out_file', outputnode,
                              'wm_mask_4_bbr')

    struct_preproc_wf.write_graph(dotfilename=struct_preproc_wf.name,
                                  graph2use='flat',
                                  format='pdf')

    return struct_preproc_wf
示例#16
0
def create_getmask_flow(name='getmask', dilate_mask=True):
    """Registers a source file to freesurfer space and create a brain mask in
source space

Requires fsl tools for initializing registration

Parameters
----------

name : string
name of workflow
dilate_mask : boolean
indicates whether to dilate mask or not

Example
-------

>>> getmask = create_getmask_flow()
>>> getmask.inputs.inputspec.source_file = 'mean.nii'
>>> getmask.inputs.inputspec.subject_id = 's1'
>>> getmask.inputs.inputspec.subjects_dir = '.'
>>> getmask.inputs.inputspec.contrast_type = 't2'


Inputs::

inputspec.source_file : reference image for mask generation
inputspec.subject_id : freesurfer subject id
inputspec.subjects_dir : freesurfer subjects directory
inputspec.contrast_type : MR contrast of reference image

Outputs::

outputspec.mask_file : binary mask file in reference image space
outputspec.reg_file : registration file that maps reference image to
freesurfer space
outputspec.reg_cost : cost of registration (useful for detecting misalignment)
"""
    import nipype.pipeline.engine as pe
    import nipype.interfaces.utility as niu
    import nipype.interfaces.freesurfer as fs
    import nipype.interfaces.io as nio
    """
Initialize the workflow
"""

    getmask = pe.Workflow(name=name)

    """
Define the inputs to the workflow.
"""

    inputnode = pe.Node(niu.IdentityInterface(fields=['source_file',
                                                      'subject_id',
                                                      'subjects_dir',
                                                      'contrast_type']),
        name='inputspec')

    """
Define all the nodes of the workflow:

fssource: used to retrieve aseg.mgz
threshold : binarize aseg
register : coregister source file to freesurfer space
voltransform: convert binarized aseg to source file space

"""

    fssource = pe.Node(nio.FreeSurferSource(),
        name = 'fssource')
    threshold = pe.Node(fs.Binarize(min=0.5, out_type='nii'),
        name='threshold')
    register = pe.MapNode(fs.BBRegister(init='fsl'),
        iterfield=['source_file'],
        name='register')
    voltransform = pe.MapNode(fs.ApplyVolTransform(inverse=True),
        iterfield=['source_file', 'reg_file'],
        name='transform')

    """
Connect the nodes
"""

    getmask.connect([
        (inputnode, fssource, [('subject_id','subject_id'),
            ('subjects_dir','subjects_dir')]),
        (inputnode, register, [('source_file', 'source_file'),
            ('subject_id', 'subject_id'),
            ('subjects_dir', 'subjects_dir'),
            ('contrast_type', 'contrast_type')]),
        (inputnode, voltransform, [('subjects_dir', 'subjects_dir'),
            ('source_file', 'source_file')]),
        (fssource, threshold, [(('aparc_aseg', get_aparc_aseg), 'in_file')]),
        (register, voltransform, [('out_reg_file','reg_file')]),
        (threshold, voltransform, [('binary_file','target_file')])
    ])


    """
Add remaining nodes and connections

dilate : dilate the transformed file in source space
threshold2 : binarize transformed file
"""

    threshold2 = pe.MapNode(fs.Binarize(min=0.5, out_type='nii'),
        iterfield=['in_file'],
        name='threshold2')
    if dilate_mask:
        threshold2.inputs.dilate = 1
    getmask.connect([
        (voltransform, threshold2, [('transformed_file', 'in_file')])
    ])

    """
Setup an outputnode that defines relevant inputs of the workflow.
"""

    outputnode = pe.Node(niu.IdentityInterface(fields=["mask_file",
                                                       "reg_file",
                                                       "reg_cost"
    ]),
        name="outputspec")
    getmask.connect([
        (register, outputnode, [("out_reg_file", "reg_file")]),
        (register, outputnode, [("min_cost_file", "reg_cost")]),
        (threshold2, outputnode, [("binary_file", "mask_file")]),
    ])
    return getmask
示例#17
0
def create_confound_removal_workflow(workflow_name="confound_removal"):

    inputnode = pe.Node(util.IdentityInterface(
        fields=["subject_id", "timeseries", "reg_file", "motion_parameters"]),
                        name="inputs")

    # Get the Freesurfer aseg volume from the Subjects Directory
    getaseg = pe.Node(io.FreeSurferSource(subjects_dir=fs.Info.subjectsdir()),
                      name="getaseg")

    # Binarize the Aseg to use as a whole brain mask
    asegmask = pe.Node(fs.Binarize(min=0.5, dilate=2), name="asegmask")

    # Extract and erode a mask of the deep cerebral white matter
    extractwm = pe.Node(fs.Binarize(match=[2, 41], erode=3), name="extractwm")

    # Extract and erode a mask of the ventricles and CSF
    extractcsf = pe.Node(fs.Binarize(match=[4, 5, 14, 15, 24, 31, 43, 44, 63],
                                     erode=1),
                         name="extractcsf")

    # Mean the timeseries across the fourth dimension
    meanfunc = pe.MapNode(fsl.MeanImage(),
                          iterfield=["in_file"],
                          name="meanfunc")

    # Invert the anatomical coregistration and resample the masks
    regwm = pe.MapNode(fs.ApplyVolTransform(inverse=True, interp="nearest"),
                       iterfield=["source_file", "reg_file"],
                       name="regwm")

    regcsf = pe.MapNode(fs.ApplyVolTransform(inverse=True, interp="nearest"),
                        iterfield=["source_file", "reg_file"],
                        name="regcsf")

    regbrain = pe.MapNode(fs.ApplyVolTransform(inverse=True, interp="nearest"),
                          iterfield=["source_file", "reg_file"],
                          name="regbrain")

    # Convert to Nifti for FSL tools
    convertwm = pe.MapNode(fs.MRIConvert(out_type="niigz"),
                           iterfield=["in_file"],
                           name="convertwm")

    convertcsf = pe.MapNode(fs.MRIConvert(out_type="niigz"),
                            iterfield=["in_file"],
                            name="convertcsf")

    convertbrain = pe.MapNode(fs.MRIConvert(out_type="niigz"),
                              iterfield=["in_file"],
                              name="convertbrain")

    # Add the mask images together for a report image
    addconfmasks = pe.MapNode(fsl.ImageMaths(suffix="conf",
                                             op_string="-mul 2 -add",
                                             out_data_type="char"),
                              iterfield=["in_file", "in_file2"],
                              name="addconfmasks")

    # Overlay and slice the confound mask overlaied on mean func for reporting
    confoverlay = pe.MapNode(fsl.Overlay(auto_thresh_bg=True,
                                         stat_thresh=(.7, 2)),
                             iterfield=["background_image", "stat_image"],
                             name="confoverlay")

    confslice = pe.MapNode(fsl.Slicer(image_width=800, label_slices=False),
                           iterfield=["in_file"],
                           name="confslice")
    confslice.inputs.sample_axial = 2

    # Extract the mean signal from white matter and CSF masks
    wmtcourse = pe.MapNode(fs.SegStats(exclude_id=0, avgwf_txt_file=True),
                           iterfield=["segmentation_file", "in_file"],
                           name="wmtcourse")

    csftcourse = pe.MapNode(fs.SegStats(exclude_id=0, avgwf_txt_file=True),
                            iterfield=["segmentation_file", "in_file"],
                            name="csftcourse")

    # Extract the mean signal from over the whole brain
    globaltcourse = pe.MapNode(fs.SegStats(exclude_id=0, avgwf_txt_file=True),
                               iterfield=["segmentation_file", "in_file"],
                               name="globaltcourse")

    # Build the confound design matrix
    conf_inputs = [
        "motion_params", "global_waveform", "wm_waveform", "csf_waveform"
    ]
    confmatrix = pe.MapNode(util.Function(input_names=conf_inputs,
                                          output_names=["confound_matrix"],
                                          function=make_confound_matrix),
                            iterfield=conf_inputs,
                            name="confmatrix")

    # Regress the confounds out of the timeseries
    confregress = pe.MapNode(fsl.FilterRegressor(filter_all=True),
                             iterfield=["in_file", "design_file", "mask"],
                             name="confregress")

    # Rename the confound mask png
    renamepng = pe.MapNode(util.Rename(format_string="confound_sources.png"),
                           iterfield=["in_file"],
                           name="renamepng")

    # Define the outputs
    outputnode = pe.Node(
        util.IdentityInterface(fields=["timeseries", "confound_sources"]),
        name="outputs")

    # Define and connect the confound workflow
    confound = pe.Workflow(name=workflow_name)

    confound.connect([
        (inputnode, meanfunc, [("timeseries", "in_file")]),
        (inputnode, getaseg, [("subject_id", "subject_id")]),
        (getaseg, extractwm, [("aseg", "in_file")]),
        (getaseg, extractcsf, [("aseg", "in_file")]),
        (getaseg, asegmask, [("aseg", "in_file")]),
        (extractwm, regwm, [("binary_file", "target_file")]),
        (extractcsf, regcsf, [("binary_file", "target_file")]),
        (asegmask, regbrain, [("binary_file", "target_file")]),
        (meanfunc, regwm, [("out_file", "source_file")]),
        (meanfunc, regcsf, [("out_file", "source_file")]),
        (meanfunc, regbrain, [("out_file", "source_file")]),
        (inputnode, regwm, [("reg_file", "reg_file")]),
        (inputnode, regcsf, [("reg_file", "reg_file")]),
        (inputnode, regbrain, [("reg_file", "reg_file")]),
        (regwm, convertwm, [("transformed_file", "in_file")]),
        (regcsf, convertcsf, [("transformed_file", "in_file")]),
        (regbrain, convertbrain, [("transformed_file", "in_file")]),
        (convertwm, addconfmasks, [("out_file", "in_file")]),
        (convertcsf, addconfmasks, [("out_file", "in_file2")]),
        (addconfmasks, confoverlay, [("out_file", "stat_image")]),
        (meanfunc, confoverlay, [("out_file", "background_image")]),
        (confoverlay, confslice, [("out_file", "in_file")]),
        (confslice, renamepng, [("out_file", "in_file")]),
        (regwm, wmtcourse, [("transformed_file", "segmentation_file")]),
        (inputnode, wmtcourse, [("timeseries", "in_file")]),
        (regcsf, csftcourse, [("transformed_file", "segmentation_file")]),
        (inputnode, csftcourse, [("timeseries", "in_file")]),
        (regbrain, globaltcourse, [("transformed_file", "segmentation_file")]),
        (inputnode, globaltcourse, [("timeseries", "in_file")]),
        (inputnode, confmatrix, [("motion_parameters", "motion_params")]),
        (wmtcourse, confmatrix, [("avgwf_txt_file", "wm_waveform")]),
        (csftcourse, confmatrix, [("avgwf_txt_file", "csf_waveform")]),
        (globaltcourse, confmatrix, [("avgwf_txt_file", "global_waveform")]),
        (confmatrix, confregress, [("confound_matrix", "design_file")]),
        (inputnode, confregress, [("timeseries", "in_file")]),
        (convertbrain, confregress, [("out_file", "mask")]),
        (confregress, outputnode, [("out_file", "timeseries")]),
        (renamepng, outputnode, [("out_file", "confound_sources")]),
    ])

    return confound
def localizer(name='localizer'):
    import nipype.interfaces.freesurfer as fs
    import nipype.interfaces.fsl as fsl
    import nipype.pipeline.engine as pe
    import nipype.interfaces.utility as niu
    import nipype.interfaces.io as nio
    wf = pe.Workflow(name=name)
    inputspec = pe.Node(niu.IdentityInterface(fields=["subject_id",
                                                      "subjects_dir",
                                                      "overlay",
                                                      'reg',
                                                      'mean',
                                                      'thresh',
                                                      'roi',
                                                      "mask_overlay",
                                                      "use_mask_overlay","uthresh"]),name='inputspec')
    surf_label = pe.MapNode(niu.Function(input_names=['vertex',
                                                   'hemi',
                                                   'subject',
                                                   'overlay',
                                                   'reg',
                                                   'sd',
                                                   'thresh'],
                                      output_names=['filename','labels'],
                                      function=get_surface_label),
        name='get_surface_label', iterfield=['hemi','vertex'])
    surf_label.inputs.hemi=['lh','rh']
    #surf_label.inputs.vertex = [61091, 60437]
    #surf_label.inputs.thresh = 1.5

    masker = pe.Node(niu.Function(input_names=['mask',
                                               'overlay',
                                               'use_mask_overlay',
                                               'thresh'],
                                  output_names=['outfile'],function=mask_overlay),
        name='mask_overlay')

    bg = pe.Node(niu.Function(input_names=['overlay','uthresh'],output_names=['outfile'],function=background),name='background')
    wf.connect(inputspec,'overlay',bg,'overlay')
    wf.connect(inputspec,'uthresh',bg,'uthresh')
    wf.connect(inputspec,'overlay',masker,'overlay')
    wf.connect(inputspec,'mask_overlay',masker,'mask')
    wf.connect(inputspec,'use_mask_overlay',masker,'use_mask_overlay')
    wf.connect(inputspec,'thresh',masker,'thresh')
    wf.connect(masker,'outfile',surf_label,'overlay')

    wf.connect(inputspec,"subject_id",surf_label,"subject")
    wf.connect(inputspec,"subjects_dir",surf_label,"sd")
    #wf.connect(inputspec,"overlay",surf_label,"overlay")
    wf.connect(inputspec,"reg",surf_label,"reg")

    label2vol = pe.Node(fs.Label2Vol(),name='labels2vol')
    wf.connect(inputspec,'subjects_dir',label2vol,'subjects_dir')
    wf.connect(inputspec,'mean',label2vol,'template_file')
    wf.connect(inputspec,'reg',label2vol,'reg_file')
    wf.connect(surf_label,'filename',label2vol,'label_file')

    verts = pe.MapNode(niu.Function(input_names=['sub',
                                              'sd',
                                              'overlay',
                                              'reg',
                                              'mean',
                                              'hemi',
                                              'roi',
                                              'thresh'],
                                 output_names=['vertex'],
                                 function=get_vertices),
        name='get_verts',iterfield=['hemi'])
    verts.inputs.hemi = ['lh','rh']
    wf.connect(inputspec,'subject_id',verts,'sub')
    wf.connect(inputspec,'subjects_dir',verts,'sd')
    #wf.connect(inputspec,'overlay',verts,'overlay')
    wf.connect(masker,'outfile',verts,'overlay')
    wf.connect(inputspec,'reg',verts,'reg')
    wf.connect(inputspec,'mean',verts,'mean')
    wf.connect(inputspec,'thresh',verts,'thresh')
    wf.connect(inputspec,'roi',verts,'roi')
    wf.connect(verts,'vertex',surf_label,'vertex')
    wf.connect(inputspec,'thresh',surf_label,'thresh')

    from ...smri.freesurfer_brain_masks import pickaparc

    fssource = pe.Node(nio.FreeSurferSource(),name='fssource')
    wf.connect(inputspec,"subjects_dir",fssource,"subjects_dir")
    wf.connect(inputspec,"subject_id", fssource,"subject_id")

    bg_mask = pe.Node(fs.Binarize(wm_ven_csf=True, erode=2),name="bg_mask")

    wf.connect(fssource,("aparc_aseg",pickaparc),bg_mask,"in_file")

    warp_mask = pe.Node(fs.ApplyVolTransform(inverse=True,interp='nearest'),name="warp_to_func")
    wf.connect(inputspec,"mean",warp_mask,"source_file")
    wf.connect(bg_mask,"binary_file",warp_mask,"target_file")
    wf.connect(inputspec,"reg", warp_mask,"reg_file")
    

    do_bg_mask = pe.Node(fs.ApplyMask(),name="do_bg_mask")
    wf.connect(warp_mask,"transformed_file",do_bg_mask,"mask_file")

    studyref = pe.Node(niu.Function(input_names=['mean'],output_names=['study_ref'], function=study_ref),name='studyref')
    wf.connect(inputspec,'mean',studyref,'mean')

    outputspec = pe.Node(niu.IdentityInterface(fields=['rois','reference','study_ref','labels']),name='outputspec')

    wf.connect(studyref,'study_ref', outputspec, 'study_ref')
    bin = pe.Node(fsl.ImageMaths(op_string = '-bin'),name="binarize_roi")
    changetype = pe.Node(fsl.ChangeDataType(output_datatype='short'),name='to_short')

    wf.connect(bg,'outfile',do_bg_mask,"in_file")
    wf.connect(do_bg_mask,("out_file",shorty), outputspec,'reference')
    wf.connect(label2vol,'vol_label_file',bin,'in_file')
    wf.connect(bin,'out_file', changetype, 'in_file')
    wf.connect(changetype, 'out_file', outputspec, 'rois')
    wf.connect(surf_label,'labels',outputspec,'labels')
    return wf
示例#19
0
        3, 8, 42, 17, 18, 53, 54, 11, 12, 13, 26, 50, 51, 52, 58, 9, 10, 47,
        48, 49, 16, 28, 60
    ]

    # get the freesurfer recon_all segmentation output
    aseg_mgz = os.path.join(freesurfer_dir, subject_id, 'mri', 'aseg.mgz')

    # convert *mgz into *nii.gz
    mricon = fs.MRIConvert(in_file=aseg_mgz,
                           out_file='aseg.nii.gz',
                           out_type='niigz').run()

    aseg_nifti = os.path.abspath('aseg.nii.gz')

    ###### Step # 1: get gray matter mask by binarizing aparc+aseg with gm labels
    coolBinarize = fs.Binarize()
    coolBinarize.inputs.in_file = aseg_nifti
    coolBinarize.inputs.match = gm_labels
    coolBinarize.out_type = 'nii.gz'
    coolBinarize.inputs.binary_file = 'brain_gmseg.nii.gz'
    coolBinarize.run()

    ###### Step #2: normalize GM mask to MNI (1mm) space
    os.chdir(work_dir_con)

    gm_mask = os.path.join(data_dir, subject_id, 'preprocessed/anat',
                           'brain_gmseg.nii.gz')

    at = ants.ApplyTransforms()
    at.inputs.input_image = gm_mask
    at.inputs.reference_image = os.path.join(mni_dir,
示例#20
0
def create_workflow(func_runs,
                    subject_id,
                    subjects_dir,
                    fwhm,
                    slice_times,
                    highpass_frequency,
                    lowpass_frequency,
                    TR,
                    sink_directory,
                    use_fsl_bp,
                    num_components,
                    whichvol,
                    name='wmaze'):
    
    wf = pe.Workflow(name=name)

    datasource = pe.Node(nio.DataGrabber(infields=['subject_id', 'run'],
                                         outfields=['func']),
                         name='datasource')
    datasource.inputs.subject_id = subject_id
    datasource.inputs.run = func_runs
    datasource.inputs.template = '/home/data/madlab/data/mri/wmaze/%s/bold/bold_%03d/bold.nii.gz'
    datasource.inputs.sort_filelist = True
    
    # Rename files in case they are named identically
    name_unique = pe.MapNode(util.Rename(format_string='wmaze_%(run)02d'),
                             iterfield = ['in_file', 'run'],
                             name='rename')
    name_unique.inputs.keep_ext = True
    name_unique.inputs.run = func_runs
    wf.connect(datasource, 'func', name_unique, 'in_file')

    # Define the outputs for the preprocessing workflow
    output_fields = ['reference',
                     'motion_parameters',
                     'motion_parameters_plusDerivs',
                     'motionandoutlier_noise_file',
                     'noise_components',
                     'realigned_files',
                     'motion_plots',
                     'mask_file',
                     'smoothed_files',
                     'bandpassed_files',
                     'reg_file',
                     'reg_cost',
                     'reg_fsl_file',
                     'artnorm_files',
                     'artoutlier_files',
                     'artdisplacement_files',
                     'tsnr_file']
        
    outputnode = pe.Node(util.IdentityInterface(fields=output_fields),
                         name='outputspec')

    # Convert functional images to float representation
    img2float = pe.MapNode(fsl.ImageMaths(out_data_type='float',
                                        op_string = '',
                                        suffix='_dtype'),
                           iterfield=['in_file'],
                           name='img2float')
    wf.connect(name_unique, 'out_file', img2float, 'in_file')

    # Run AFNI's despike. This is always run, however, whether this is fed to
    # realign depends on the input configuration
    despiker = pe.MapNode(afni.Despike(outputtype='NIFTI_GZ'),
                          iterfield=['in_file'],
                          name='despike')
    num_threads = 4
    despiker.inputs.environ = {'OMP_NUM_THREADS': '%d' % num_threads}
    despiker.plugin_args = {'bsub_args': '-n %d' % num_threads}
    despiker.plugin_args = {'bsub_args': '-R "span[hosts=1]"'}
    wf.connect(img2float, 'out_file', despiker, 'in_file')

    # Extract the first volume of the first run as the reference 
    extractref = pe.Node(fsl.ExtractROI(t_size=1),
                         iterfield=['in_file'],
                         name = "extractref")
    wf.connect(despiker, ('out_file', pickfirst), extractref, 'in_file')
    wf.connect(despiker, ('out_file', pickvol, 0, whichvol), extractref, 't_min')
    wf.connect(extractref, 'roi_file', outputnode, 'reference')

    if slice_times is not None:
        # Simultaneous motion and slice timing correction with Nipy algorithm
        motion_correct = pe.Node(nipy.SpaceTimeRealigner(), name='motion_correct')
        motion_correct.inputs.tr = TR
        motion_correct.inputs.slice_times = slice_times
        motion_correct.inputs.slice_info = 2
        motion_correct.plugin_args = {'bsub_args': '-n %s' %os.environ['MKL_NUM_THREADS']}
        motion_correct.plugin_args = {'bsub_args': '-R "span[hosts=1]"'}
        wf.connect(despiker, 'out_file', motion_correct, 'in_file')
        wf.connect(motion_correct, 'par_file', outputnode, 'motion_parameters')
        wf.connect(motion_correct, 'out_file', outputnode, 'realigned_files')
    else:
        # Motion correct functional runs to the reference (1st volume of 1st run)
        motion_correct =  pe.MapNode(fsl.MCFLIRT(save_mats = True,
                                                 save_plots = True,
                                                 interpolation = 'sinc'),
                                     name = 'motion_correct',
                                     iterfield = ['in_file'])
        wf.connect(despiker, 'out_file', motion_correct, 'in_file')
        wf.connect(extractref, 'roi_file', motion_correct, 'ref_file')
        wf.connect(motion_correct, 'par_file', outputnode, 'motion_parameters')
        wf.connect(motion_correct, 'out_file', outputnode, 'realigned_files')

    # Compute TSNR on realigned data regressing polynomials upto order 2
    tsnr = pe.MapNode(TSNR(regress_poly=2), iterfield=['in_file'], name='tsnr')
    wf.connect(motion_correct, 'out_file', tsnr, 'in_file')
    wf.connect(tsnr, 'tsnr_file', outputnode, 'tsnr_file')

    # Plot the estimated motion parameters
    plot_motion = pe.MapNode(fsl.PlotMotionParams(in_source='fsl'),
                             name='plot_motion',
                             iterfield=['in_file'])
    plot_motion.iterables = ('plot_type', ['rotations', 'translations'])
    wf.connect(motion_correct, 'par_file', plot_motion, 'in_file')
    wf.connect(plot_motion, 'out_file', outputnode, 'motion_plots')

    # Register a source file to fs space and create a brain mask in source space
    fssource = pe.Node(nio.FreeSurferSource(),
                       name ='fssource')
    fssource.inputs.subject_id = subject_id
    fssource.inputs.subjects_dir = subjects_dir

    # Extract aparc+aseg brain mask and binarize
    fs_threshold = pe.Node(fs.Binarize(min=0.5, out_type='nii'),
                           name ='fs_threshold')
    wf.connect(fssource, ('aparc_aseg', get_aparc_aseg), fs_threshold, 'in_file')

    # Calculate the transformation matrix from EPI space to FreeSurfer space
    # using the BBRegister command
    fs_register = pe.MapNode(fs.BBRegister(init='fsl'),
                             iterfield=['source_file'],
                             name ='fs_register')
    fs_register.inputs.contrast_type = 't2'
    fs_register.inputs.out_fsl_file = True
    fs_register.inputs.subject_id = subject_id
    fs_register.inputs.subjects_dir = subjects_dir
    wf.connect(extractref, 'roi_file', fs_register, 'source_file')
    wf.connect(fs_register, 'out_reg_file', outputnode, 'reg_file')
    wf.connect(fs_register, 'min_cost_file', outputnode, 'reg_cost')
    wf.connect(fs_register, 'out_fsl_file', outputnode, 'reg_fsl_file')

    # Extract wm+csf, brain masks by eroding freesurfer lables
    wmcsf = pe.MapNode(fs.Binarize(), 
                       iterfield=['match', 'binary_file', 'erode'], name='wmcsfmask')
    #wmcsf.inputs.wm_ven_csf = True
    wmcsf.inputs.match = [[2, 41], [4, 5, 14, 15, 24, 31, 43, 44, 63]]
    wmcsf.inputs.binary_file = ['wm.nii.gz', 'csf.nii.gz']
    wmcsf.inputs.erode = [2, 2] #int(np.ceil(slice_thickness))
    wf.connect(fssource, ('aparc_aseg', get_aparc_aseg), wmcsf, 'in_file')

    # Now transform the wm and csf masks to 1st volume of 1st run
    wmcsftransform = pe.MapNode(fs.ApplyVolTransform(inverse=True,
                                                     interp='nearest'),
                                iterfield=['target_file'],
                                name='wmcsftransform')
    wmcsftransform.inputs.subjects_dir = subjects_dir
    wf.connect(extractref, 'roi_file', wmcsftransform, 'source_file')
    wf.connect(fs_register, ('out_reg_file', pickfirst), wmcsftransform, 'reg_file')
    wf.connect(wmcsf, 'binary_file', wmcsftransform, 'target_file')

    # Transform the binarized aparc+aseg file to the 1st volume of 1st run space
    fs_voltransform = pe.MapNode(fs.ApplyVolTransform(inverse=True),
                                 iterfield = ['source_file', 'reg_file'],
                                 name='fs_transform')
    fs_voltransform.inputs.subjects_dir = subjects_dir
    wf.connect(extractref, 'roi_file', fs_voltransform, 'source_file')
    wf.connect(fs_register, 'out_reg_file', fs_voltransform, 'reg_file')
    wf.connect(fs_threshold, 'binary_file', fs_voltransform, 'target_file')

    # Dilate the binarized mask by 1 voxel that is now in the EPI space
    fs_threshold2 = pe.MapNode(fs.Binarize(min=0.5, out_type='nii'),
                               iterfield=['in_file'],
                               name='fs_threshold2')
    fs_threshold2.inputs.dilate = 1
    wf.connect(fs_voltransform, 'transformed_file', fs_threshold2, 'in_file')
    wf.connect(fs_threshold2, 'binary_file', outputnode, 'mask_file')
    
    # Use RapidART to detect motion/intensity outliers
    art = pe.MapNode(ra.ArtifactDetect(use_differences = [True, False],
                                       use_norm = True,
                                       zintensity_threshold = 3,
                                       norm_threshold = 1,
                                       bound_by_brainmask=True,
                                       mask_type = "file"),
                     iterfield=["realignment_parameters","realigned_files"],
                     name="art")
    if slice_times is not None:
        art.inputs.parameter_source = "NiPy"
    else:
        art.inputs.parameter_source = "FSL"
    wf.connect(motion_correct, 'par_file', art, 'realignment_parameters')
    wf.connect(motion_correct, 'out_file', art, 'realigned_files')
    wf.connect(fs_threshold2, ('binary_file', pickfirst), art, 'mask_file')
    wf.connect(art, 'norm_files', outputnode, 'artnorm_files')
    wf.connect(art, 'outlier_files', outputnode, 'artoutlier_files')
    wf.connect(art, 'displacement_files', outputnode, 'artdisplacement_files')

    # Compute motion regressors (save file with 1st and 2nd derivatives)
    motreg = pe.Node(util.Function(input_names=['motion_params', 'order',
                                                'derivatives'],
                                   output_names=['out_files'],
                                   function=motion_regressors,
                                   imports=imports),
                     name='getmotionregress')
    wf.connect(motion_correct, 'par_file', motreg, 'motion_params')
    wf.connect(motreg, 'out_files', outputnode, 'motion_parameters_plusDerivs')

    # Create a filter text file to remove motion (+ derivatives), art confounds,
    # and 1st, 2nd, and 3rd order legendre polynomials.
    createfilter1 = pe.Node(util.Function(input_names=['motion_params', 'comp_norm',
                                                       'outliers', 'detrend_poly'],
                                          output_names=['out_files'],
                                          function=build_filter1,
                                          imports=imports),
                            name='makemotionbasedfilter')
    createfilter1.inputs.detrend_poly = 3
    wf.connect(motreg, 'out_files', createfilter1, 'motion_params')
    wf.connect(art, 'norm_files', createfilter1, 'comp_norm')
    wf.connect(art, 'outlier_files', createfilter1, 'outliers')
    wf.connect(createfilter1, 'out_files', outputnode, 'motionandoutlier_noise_file')

    # Create a filter to remove noise components based on white matter and CSF
    createfilter2 = pe.MapNode(util.Function(input_names=['realigned_file', 'mask_file',
                                                          'num_components',
                                                          'extra_regressors'],
                                             output_names=['out_files'],
                                             function=extract_noise_components,
                                             imports=imports),
                               iterfield=['realigned_file', 'extra_regressors'],
                               name='makecompcorrfilter')
    createfilter2.inputs.num_components = num_components
    wf.connect(createfilter1, 'out_files', createfilter2, 'extra_regressors')
    wf.connect(motion_correct, 'out_file', createfilter2, 'realigned_file')
    wf.connect(wmcsftransform, 'transformed_file', createfilter2, 'mask_file')
    wf.connect(createfilter2, 'out_files', outputnode, 'noise_components')

    # Mask the functional runs with the extracted mask
    maskfunc = pe.MapNode(fsl.ImageMaths(suffix='_bet',
                                         op_string='-mas'),
                          iterfield=['in_file'],
                          name = 'maskfunc')
    wf.connect(motion_correct, 'out_file', maskfunc, 'in_file')
    wf.connect(fs_threshold2, ('binary_file', pickfirst), maskfunc, 'in_file2')
    
    # Smooth each run using SUSAn with the brightness threshold set to 75%
    # of the median value for each run and a mask constituting the mean functional
    smooth_median = pe.MapNode(fsl.ImageStats(op_string='-k %s -p 50'),
                               iterfield = ['in_file'],
                               name='smooth_median')
    wf.connect(maskfunc, 'out_file', smooth_median, 'in_file')
    wf.connect(fs_threshold2, ('binary_file', pickfirst), smooth_median, 'mask_file')
    
    smooth_meanfunc = pe.MapNode(fsl.ImageMaths(op_string='-Tmean',
                                                suffix='_mean'),
                                 iterfield=['in_file'],
                                 name='smooth_meanfunc')
    wf.connect(maskfunc, 'out_file', smooth_meanfunc, 'in_file')

    smooth_merge = pe.Node(util.Merge(2, axis='hstack'),
                           name='smooth_merge')
    wf.connect(smooth_meanfunc, 'out_file', smooth_merge, 'in1')
    wf.connect(smooth_median, 'out_stat', smooth_merge, 'in2')

    smooth = pe.MapNode(fsl.SUSAN(),
                        iterfield=['in_file', 'brightness_threshold', 'usans'],
                        name='smooth')
    smooth.inputs.fwhm=fwhm
    wf.connect(maskfunc, 'out_file', smooth, 'in_file')
    wf.connect(smooth_median, ('out_stat', getbtthresh), smooth, 'brightness_threshold')
    wf.connect(smooth_merge, ('out', getusans), smooth, 'usans')
    
    # Mask the smoothed data with the dilated mask
    maskfunc2 = pe.MapNode(fsl.ImageMaths(suffix='_mask',
                                          op_string='-mas'),
                           iterfield=['in_file'],
                           name='maskfunc2')
    wf.connect(smooth, 'smoothed_file', maskfunc2, 'in_file')
    wf.connect(fs_threshold2, ('binary_file', pickfirst), maskfunc2, 'in_file2')
    wf.connect(maskfunc2, 'out_file', outputnode, 'smoothed_files')

    # Band-pass filter the timeseries
    if use_fsl_bp == 'True':
        determine_bp_sigmas = pe.Node(util.Function(input_names=['tr',
                                                                 'highpass_freq',
                                                                 'lowpass_freq'],
                                                    output_names = ['out_sigmas'],
                                                    function=calc_fslbp_sigmas),
                                      name='determine_bp_sigmas')
        determine_bp_sigmas.inputs.tr = float(TR)
        determine_bp_sigmas.inputs.highpass_freq = float(highpass_frequency)
        determine_bp_sigmas.inputs.lowpass_freq = float(lowpass_frequency)

        bandpass = pe.MapNode(fsl.ImageMaths(suffix='_tempfilt'),
                              iterfield=["in_file"],
                              name="bandpass")
        wf.connect(determine_bp_sigmas, ('out_sigmas', highpass_operand), bandpass, 'op_string')
        wf.connect(maskfunc2, 'out_file', bandpass, 'in_file')
        wf.connect(bandpass, 'out_file', outputnode, 'bandpassed_files')
    else:
        bandpass = pe.Node(util.Function(input_names=['files',
                                                      'lowpass_freq',
                                                      'highpass_freq',
                                                      'fs'],
                                         output_names=['out_files'],
                                         function=bandpass_filter,
                                         imports=imports),
                           name='bandpass')
        bandpass.inputs.fs = 1./TR
        if highpass_frequency < 0:
            bandpass.inputs.highpass_freq = -1
        else:
            bandpass.inputs.highpass_freq = highpass_frequency
        if lowpass_frequency < 0:
            bandpass.inputs.lowpass_freq = -1
        else:
            bandpass.inputs.lowpass_freq = lowpass_frequency
        wf.connect(maskfunc2, 'out_file', bandpass, 'files')
        wf.connect(bandpass, 'out_files', outputnode, 'bandpassed_files')

    # Save the relevant data into an output directory
    datasink = pe.Node(nio.DataSink(), name="datasink")
    datasink.inputs.base_directory = sink_directory
    datasink.inputs.container = subject_id
    wf.connect(outputnode, 'reference', datasink, 'ref')
    wf.connect(outputnode, 'motion_parameters', datasink, 'motion')
    wf.connect(outputnode, 'realigned_files', datasink, 'func.realigned')
    wf.connect(outputnode, 'motion_plots', datasink, 'motion.@plots')
    wf.connect(outputnode, 'mask_file', datasink, 'ref.@mask')
    wf.connect(outputnode, 'smoothed_files', datasink, 'func.smoothed_fullspectrum')
    wf.connect(outputnode, 'bandpassed_files', datasink, 'func.smoothed_bandpassed')
    wf.connect(outputnode, 'reg_file', datasink, 'bbreg.@reg')
    wf.connect(outputnode, 'reg_cost', datasink, 'bbreg.@cost')
    wf.connect(outputnode, 'reg_fsl_file', datasink, 'bbreg.@regfsl')
    wf.connect(outputnode, 'artnorm_files', datasink, 'art.@norm_files')
    wf.connect(outputnode, 'artoutlier_files', datasink, 'art.@outlier_files')
    wf.connect(outputnode, 'artdisplacement_files', datasink, 'art.@displacement_files')
    wf.connect(outputnode, 'motion_parameters_plusDerivs', datasink, 'noise.@motionplusDerivs')
    wf.connect(outputnode, 'motionandoutlier_noise_file', datasink, 'noise.@motionplusoutliers')
    wf.connect(outputnode, 'noise_components', datasink, 'compcor')
    wf.connect(outputnode, 'tsnr_file', datasink, 'tsnr')    

    return wf
示例#21
0
def create_skullstrip_workflow(name="skullstrip"):
    """Remove non-brain voxels from the timeseries."""

    # Define the workflow inputs
    inputnode = Node(
        IdentityInterface(["subject_id", "timeseries", "reg_file"]), "inputs")

    # Mean the timeseries across the fourth dimension
    origmean = MapNode(fsl.MeanImage(), "in_file", "origmean")

    # Grab the Freesurfer aparc+aseg file as an anatomical brain mask
    getaseg = Node(
        io.SelectFiles({"aseg": "{subject_id}/mri/aparc+aseg.mgz"},
                       base_directory=os.environ["SUBJECTS_DIR"]), "getaseg")

    # Threshold the aseg volume to get a boolean mask
    makemask = Node(fs.Binarize(dilate=4, min=0.5), "makemask")

    # Transform the brain mask into functional space
    transform = MapNode(fs.ApplyVolTransform(inverse=True, interp="nearest"),
                        ["reg_file", "source_file"], "transform")

    # Convert the mask to nifti and rename
    convertmask = MapNode(fs.MRIConvert(out_file="functional_mask.nii.gz"),
                          "in_file", "convertmask")

    # Use the mask to skullstrip the timeseries
    stripts = MapNode(fs.ApplyMask(), ["in_file", "mask_file"], "stripts")

    # Use the mask to skullstrip the mean image
    stripmean = MapNode(fs.ApplyMask(), ["in_file", "mask_file"], "stripmean")

    # Generate images summarizing the skullstrip and resulting data
    reportmask = MapNode(MaskReport(), ["mask_file", "orig_file", "mean_file"],
                         "reportmask")

    # Define the workflow outputs
    outputnode = Node(
        IdentityInterface(["timeseries", "mean_file", "mask_file", "report"]),
        "outputs")

    # Define and connect the workflow
    skullstrip = Workflow(name)

    skullstrip.connect([
        (inputnode, origmean, [("timeseries", "in_file")]),
        (inputnode, getaseg, [("subject_id", "subject_id")]),
        (origmean, transform, [("out_file", "source_file")]),
        (getaseg, makemask, [("aseg", "in_file")]),
        (makemask, transform, [("binary_file", "target_file")]),
        (inputnode, transform, [("reg_file", "reg_file")]),
        (transform, stripts, [("transformed_file", "mask_file")]),
        (transform, stripmean, [("transformed_file", "mask_file")]),
        (inputnode, stripts, [("timeseries", "in_file")]),
        (origmean, stripmean, [("out_file", "in_file")]),
        (stripmean, reportmask, [("out_file", "mean_file")]),
        (origmean, reportmask, [("out_file", "orig_file")]),
        (transform, reportmask, [("transformed_file", "mask_file")]),
        (transform, convertmask, [("transformed_file", "in_file")]),
        (stripts, outputnode, [("out_file", "timeseries")]),
        (stripmean, outputnode, [("out_file", "mean_file")]),
        (convertmask, outputnode, [("out_file", "mask_file")]),
        (reportmask, outputnode, [("out_files", "report")]),
    ])

    return skullstrip
示例#22
0
# go into work dir
os.chdir(work_dir)

#### Step1 # convert (skull-stripped brain) *mgz to nifti
img_struc = os.path.join(data_dir, subject_id, 'freesurfer/mri', 'brain.mgz')

mricon = fs.MRIConvert(in_file=img_struc,
                       out_file='brain.nii.gz',
                       out_type='niigz').run()

brain_nifti = os.path.abspath('brain.nii.gz')

#### Step2 # generate anatomical masks as nifti
# get the skull-stripped brain mask
get_mask = fs.Binarize()
get_mask.inputs.in_file = 'brain.nii.gz'
get_mask.inputs.min = 0.5
get_mask.inputs.dilate = 1
get_mask.inputs.out_type = 'nii.gz'
get_mask.inputs.binary_file = 'brain_mask.nii.gz'
get_mask.run()

# get the wm segmentation from aparc+aseg
aparc_aseg = os.path.join(data_dir, subject_id, 'freesurfer/mri',
                          'aparc+aseg.mgz')

mricon = fs.MRIConvert(in_file=aparc_aseg,
                       out_file='aparc+aseg.nii.gz',
                       out_type='niigz').run()
示例#23
0
def create_nonlinear_pipeline(name='nonlinear'):

    # workflow
    nonlinear = Workflow(name='nonlinear')

    # inputnode
    inputnode = Node(
        util.IdentityInterface(fields=[
            't1_highres',
            'epi2highres_lin',
            'epi2highres_lin_itk',
            'fov_mask',
            'brain_mask',
            #'highres2lowres_itk'
        ]),
        name='inputnode')

    # outputnode
    outputnode = Node(util.IdentityInterface(fields=[
        'epi2highres_warp',
        'epi2highres_invwarp',
        'epi2highres_nonlin',
    ]),
                      name='outputnode')

    #
    #     brainmask = Node(ants.ApplyTransforms(dimension=3,
    #                                           invert_transform_flags=[True],
    #                                           interpolation = 'NearestNeighbor'),
    #                      name='brainmask')
    #
    dil_brainmask = Node(fs.Binarize(min=0.5, out_type='nii.gz', dilate=15),
                         name='dil_brainmask')

    mask_epi = Node(fsl.ApplyMask(out_file='epi2highres_lin_masked.nii.gz'),
                    name='mask_epi')

    nonlinear.connect([  #(inputnode, brainmask, [('brain_mask', 'input_image'),
        #                        ('t1_highres', 'reference_image'),
        #                        ('highres2lowres_itk', 'transforms')]),
        #(brainmask, dil_brainmask, [('output_image', 'in_file')]),
        (inputnode, dil_brainmask, [('brain_mask', 'in_file')]),
        (dil_brainmask, mask_epi, [('binary_file', 'mask_file')]),
        (inputnode, mask_epi, [('epi2highres_lin', 'in_file')])
    ])

    # transform fov mask and apply to t1
    transform_fov = Node(
        ants.ApplyTransforms(
            dimension=3,
            #invert_transform_flags=[True, False],
            output_image='fov_mask_highres.nii.gz',
            interpolation='NearestNeighbor'),
        'transform_fov')

    dilate_fov = Node(fs.Binarize(min=0.5,
                                  dilate=5,
                                  binary_file='fov_mask_highres_dil.nii.gz'),
                      name='dilate_fov')

    mask_t1 = Node(fsl.ApplyMask(out_file='t1_fov_masked.nii.gz'),
                   name='mask_t1')

    nonlinear.connect([
        (inputnode, transform_fov, [('fov_mask', 'input_image'),
                                    ('t1_highres', 'reference_image'),
                                    ('epi2highres_lin_itk', 'transforms')]),
        (transform_fov, dilate_fov, [('output_image', 'in_file')]),
        (dilate_fov, mask_t1, [('binary_file', 'mask_file')]),
        (inputnode, mask_t1, [('t1_highres', 'in_file')]),
    ])

    # normalization with ants
    antsreg = Node(interface=ants.registration.Registration(
        dimension=3,
        metric=['CC'],
        metric_weight=[1.0],
        radius_or_number_of_bins=[4],
        sampling_strategy=['None'],
        transforms=['SyN'],
        args='-g 0.1x1x0.1',
        transform_parameters=[(0.10, 3, 0)],
        number_of_iterations=[[50, 20, 10]],
        convergence_threshold=[1e-06],
        convergence_window_size=[10],
        shrink_factors=[[4, 2, 1]],
        smoothing_sigmas=[[2, 1, 0]],
        sigma_units=['vox'],
        use_estimate_learning_rate_once=[True],
        use_histogram_matching=[True],
        collapse_output_transforms=True,
        output_inverse_warped_image=True,
        output_warped_image=True,
        interpolation='BSpline'),
                   name='antsreg')
    antsreg.plugin_args = {'submit_specs': 'request_memory = 20000'}

    nonlinear.connect([(mask_epi, antsreg, [('out_file', 'moving_image')]),
                       (mask_t1, antsreg, [('out_file', 'fixed_image')]),
                       (antsreg, outputnode,
                        [('reverse_transforms', 'epi2highres_invwarp'),
                         ('forward_transforms', 'epi2highres_warp'),
                         ('warped_image', 'epi2highres_nonlin')])])

    return nonlinear


# test_nonlinear=create_nonlinear_pipeline('nonlinear')
# test_nonlinear.base_dir='/scr/kansas1/huntenburg/7tresting/working/highres_bias_bspline_maskepi/'
# test_nonlinear.config['execution']['crashdump_dir'] = test_nonlinear.base_dir + "/crash_files"
# test_nonlinear.config['execution']['remove_unnecessary_outputs'] = False
# test_nonlinear.inputs.inputnode.anat='/scr/kansas1/huntenburg/7tresting/sub021/preprocessed/coregister/t1_resampled.nii.gz'
# test_nonlinear.inputs.inputnode.epi= '/scr/kansas1/huntenburg/7tresting/sub021/preprocessed/coregister/rest_coregistered_mean.nii.gz'
# #test_nonlinear.inputs.inputnode.anat='/scr/kansas1/huntenburg/7tresting/sub021/highres/t1.nii.gz'
# #test_nonlinear.inputs.inputnode.epi= '/scr/kansas1/huntenburg/7tresting/sub021/coreg_testing/flirt_epi2highres_t1_cr_bbr_onestep.nii.gz'
# test_nonlinear.run()#plugin='CondorDAGMan')
示例#24
0
def run_dmriprep(dwi_file, bvec_file, bval_file, subjects_dir, working_dir,
                 out_dir):
    """
    Runs dmriprep for acquisitions with just one PE direction.

    """
    from glob import glob
    import nibabel as nib
    import nipype.interfaces.freesurfer as fs
    import nipype.interfaces.fsl as fsl
    import nipype.interfaces.io as nio
    import nipype.interfaces.utility as niu
    import nipype.pipeline.engine as pe
    import numpy as np
    from nipype.algorithms.rapidart import ArtifactDetect
    from nipype.interfaces.dipy import DTI
    from nipype.interfaces.fsl.utils import AvScale
    from nipype.utils.filemanip import fname_presuffix
    from nipype.workflows.dmri.fsl.epi import create_dmri_preprocessing

    wf = create_dmri_preprocessing(name='dmriprep',
                                   use_fieldmap=False,
                                   fieldmap_registration=False)
    wf.inputs.inputnode.ref_num = 0
    wf.inputs.inputnode.in_file = dwi_file
    wf.inputs.inputnode.in_bvec = bvec_file

    dwi_fname = op.split(dwi_file)[1].split(".nii.gz")[0]
    bids_sub_name = dwi_fname.split("_")[0]
    assert bids_sub_name.startswith("sub-")

    # inputnode = wf.get_node("inputnode")
    outputspec = wf.get_node("outputnode")

    # QC: FLIRT translation and rotation parameters
    flirt = wf.get_node("motion_correct.coregistration")
    # flirt.inputs.save_mats = True

    get_tensor = pe.Node(DTI(), name="dipy_tensor")
    wf.connect(outputspec, "dmri_corrected", get_tensor, "in_file")
    # wf.connect(inputspec2,"bvals", get_tensor, "in_bval")
    get_tensor.inputs.in_bval = bval_file
    wf.connect(outputspec, "bvec_rotated", get_tensor, "in_bvec")

    scale_tensor = pe.Node(name='scale_tensor', interface=fsl.BinaryMaths())
    scale_tensor.inputs.operation = 'mul'
    scale_tensor.inputs.operand_value = 1000
    wf.connect(get_tensor, 'out_file', scale_tensor, 'in_file')

    fslroi = pe.Node(fsl.ExtractROI(t_min=0, t_size=1), name="fslroi")
    wf.connect(outputspec, "dmri_corrected", fslroi, "in_file")

    bbreg = pe.Node(fs.BBRegister(contrast_type="t2",
                                  init="fsl",
                                  out_fsl_file=True,
                                  subjects_dir=subjects_dir,
                                  epi_mask=True),
                    name="bbreg")
    # wf.connect(inputspec2,"fsid", bbreg,"subject_id")
    bbreg.inputs.subject_id = 'freesurfer'  # bids_sub_name
    wf.connect(fslroi, "roi_file", bbreg, "source_file")

    voltransform = pe.Node(fs.ApplyVolTransform(inverse=True),
                           iterfield=['source_file', 'reg_file'],
                           name='transform')
    voltransform.inputs.subjects_dir = subjects_dir

    vt2 = voltransform.clone("transform_aparcaseg")
    vt2.inputs.interp = "nearest"

    def binarize_aparc(aparc_aseg):
        img = nib.load(aparc_aseg)
        data, aff = img.get_data(), img.affine
        outfile = fname_presuffix(aparc_aseg,
                                  suffix="bin.nii.gz",
                                  newpath=op.abspath("."),
                                  use_ext=False)
        nib.Nifti1Image((data > 0).astype(float), aff).to_filename(outfile)
        return outfile

    # wf.connect(inputspec2, "mask_nii", voltransform, "target_file")
    create_mask = pe.Node(niu.Function(input_names=["aparc_aseg"],
                                       output_names=["outfile"],
                                       function=binarize_aparc),
                          name="bin_aparc")

    def get_aparc_aseg(subjects_dir, sub):
        return op.join(subjects_dir, sub, "mri", "aparc+aseg.mgz")

    create_mask.inputs.aparc_aseg = get_aparc_aseg(subjects_dir, 'freesurfer')
    wf.connect(create_mask, "outfile", voltransform, "target_file")

    wf.connect(fslroi, "roi_file", voltransform, "source_file")
    wf.connect(bbreg, "out_reg_file", voltransform, "reg_file")

    vt2.inputs.target_file = get_aparc_aseg(subjects_dir, 'freesurfer')
    # wf.connect(inputspec2, "aparc_aseg", vt2, "target_file")
    wf.connect(fslroi, "roi_file", vt2, "source_file")
    wf.connect(bbreg, "out_reg_file", vt2, "reg_file")

    # AK (2017): THIS NODE MIGHT NOT BE NECESSARY
    # AK (2018) doesn't know why she said that above..
    threshold2 = pe.Node(fs.Binarize(min=0.5, out_type='nii.gz', dilate=1),
                         iterfield=['in_file'],
                         name='threshold2')
    wf.connect(voltransform, "transformed_file", threshold2, "in_file")

    # wf.connect(getmotion, "motion_params", datasink, "dti.@motparams")

    def get_flirt_motion_parameters(flirt_out_mats):
        def get_params(A):
            """This is a copy of spm's spm_imatrix where
            we already know the rotations and translations matrix,
            shears and zooms (as outputs from fsl FLIRT/avscale)
            Let A = the 4x4 rotation and translation matrix
            R = [          c5*c6,           c5*s6, s5]
                [-s4*s5*c6-c4*s6, -s4*s5*s6+c4*c6, s4*c5]
                [-c4*s5*c6+s4*s6, -c4*s5*s6-s4*c6, c4*c5]
            """
            def rang(b):
                a = min(max(b, -1), 1)
                return a

            Ry = np.arcsin(A[0, 2])
            # Rx = np.arcsin(A[1, 2] / np.cos(Ry))
            # Rz = np.arccos(A[0, 1] / np.sin(Ry))

            if (abs(Ry) - np.pi / 2)**2 < 1e-9:
                Rx = 0
                Rz = np.arctan2(-rang(A[1, 0]), rang(-A[2, 0] / A[0, 2]))
            else:
                c = np.cos(Ry)
                Rx = np.arctan2(rang(A[1, 2] / c), rang(A[2, 2] / c))
                Rz = np.arctan2(rang(A[0, 1] / c), rang(A[0, 0] / c))

            rotations = [Rx, Ry, Rz]
            translations = [A[0, 3], A[1, 3], A[2, 3]]

            return rotations, translations

        motion_params = open(op.abspath('motion_parameters.par'), 'w')
        for mat in flirt_out_mats:
            res = AvScale(mat_file=mat).run()
            A = np.asarray(res.outputs.rotation_translation_matrix)
            rotations, translations = get_params(A)
            for i in rotations + translations:
                motion_params.write('%f ' % i)
            motion_params.write('\n')
        motion_params.close()
        motion_params = op.abspath('motion_parameters.par')
        return motion_params

    getmotion = pe.Node(niu.Function(input_names=["flirt_out_mats"],
                                     output_names=["motion_params"],
                                     function=get_flirt_motion_parameters),
                        name="get_motion_parameters",
                        iterfield="flirt_out_mats")

    wf.connect(flirt, "out_matrix_file", getmotion, "flirt_out_mats")

    art = pe.Node(interface=ArtifactDetect(), name="art")
    art.inputs.use_differences = [True, True]
    art.inputs.save_plot = False
    art.inputs.use_norm = True
    art.inputs.norm_threshold = 3
    art.inputs.zintensity_threshold = 9
    art.inputs.mask_type = 'spm_global'
    art.inputs.parameter_source = 'FSL'

    wf.connect(getmotion, "motion_params", art, "realignment_parameters")
    wf.connect(outputspec, "dmri_corrected", art, "realigned_files")

    datasink = pe.Node(nio.DataSink(), name="sinker")
    datasink.inputs.base_directory = out_dir
    datasink.inputs.substitutions = [
        ("vol0000_flirt_merged.nii.gz", dwi_fname + '.nii.gz'),
        ("stats.vol0000_flirt_merged.txt", dwi_fname + ".art.json"),
        ("motion_parameters.par", dwi_fname + ".motion.txt"),
        ("_rotated.bvec", ".bvec"),
        ("aparc+aseg_warped_out", dwi_fname.replace("_dwi", "_aparc+aseg")),
        ("art.vol0000_flirt_merged_outliers.txt", dwi_fname + ".outliers.txt"),
        ("vol0000_flirt_merged", dwi_fname),
        ("_roi_bbreg_freesurfer", "_register"),
        ("aparc+asegbin_warped_thresh", dwi_fname.replace("_dwi", "_mask")),
        ("derivatives/dmriprep",
         "derivatives/{}/dmriprep".format(bids_sub_name))
    ]

    wf.connect(art, "statistic_files", datasink, "dmriprep.art.@artstat")
    wf.connect(art, "outlier_files", datasink, "dmriprep.art.@artoutlier")
    wf.connect(outputspec, "dmri_corrected", datasink,
               "dmriprep.dwi.@corrected")
    wf.connect(outputspec, "bvec_rotated", datasink, "dmriprep.dwi.@rotated")
    wf.connect(getmotion, "motion_params", datasink, "dmriprep.art.@motion")

    wf.connect(get_tensor, "out_file", datasink, "dmriprep.dti.@tensor")
    wf.connect(get_tensor, "fa_file", datasink, "dmriprep.dti.@fa")
    wf.connect(get_tensor, "md_file", datasink, "dmriprep.dti.@md")
    wf.connect(get_tensor, "ad_file", datasink, "dmriprep.dti.@ad")
    wf.connect(get_tensor, "rd_file", datasink, "dmriprep.dti.@rd")
    wf.connect(get_tensor, "out_file", datasink, "dmriprep.dti.@scaled_tensor")

    wf.connect(bbreg, "min_cost_file", datasink, "dmriprep.reg.@mincost")
    wf.connect(bbreg, "out_fsl_file", datasink, "dmriprep.reg.@fslfile")
    wf.connect(bbreg, "out_reg_file", datasink, "dmriprep.reg.@reg")
    wf.connect(threshold2, "binary_file", datasink, "dmriprep.anat.@mask")
    # wf.connect(vt2, "transformed_file", datasink, "dwi.@aparc_aseg")

    convert = pe.Node(fs.MRIConvert(out_type="niigz"), name="convert2nii")
    wf.connect(vt2, "transformed_file", convert, "in_file")
    wf.connect(convert, "out_file", datasink, "dmriprep.anat.@aparc_aseg")

    wf.base_dir = working_dir
    wf.run()

    copyfile(
        bval_file,
        op.join(out_dir, bids_sub_name, "dmriprep", "dwi",
                op.split(bval_file)[1]))

    dmri_corrected = glob(op.join(out_dir, '*/dmriprep/dwi', '*.nii.gz'))[0]
    bvec_rotated = glob(op.join(out_dir, '*/dmriprep/dwi', '*.bvec'))[0]
    bval_file = glob(op.join(out_dir, '*/dmriprep/dwi', '*.bval'))[0]
    art_file = glob(op.join(out_dir, '*/dmriprep/art', '*.art.json'))[0]
    motion_file = glob(op.join(out_dir, '*/dmriprep/art', '*.motion.txt'))[0]
    outlier_file = glob(op.join(out_dir, '*/dmriprep/art',
                                '*.outliers.txt'))[0]
    return dmri_corrected, bvec_rotated, art_file, motion_file, outlier_file
def create_nonlinear_pipeline(name='nonlinear'):

    # workflow
    nonlinear = Workflow(name='nonlinear')

    # inputnode
    inputnode = Node(util.IdentityInterface(fields=[
        't1_highres', 'epi2highres_lin', 'epi2highres_lin_itk', 'fov_mask',
        'brain_mask', 'wmcsf_mask', 'highres2lowres_itk'
    ]),
                     name='inputnode')

    # outputnode
    outputnode = Node(util.IdentityInterface(fields=[
        'epi2highres_warp', 'epi2highres_invwarp', 'epi2highres_nonlin',
        'brainmask_highres', 'wmcsfmask_highres'
    ]),
                      name='outputnode')

    # project brainmask and wmcsf mask from lowres to highres mp2rage space
    brainmask = Node(ants.ApplyTransforms(dimension=3,
                                          invert_transform_flags=[True],
                                          interpolation='NearestNeighbor'),
                     name='brainmask')

    wmcsf_mask = Node(ants.ApplyTransforms(dimension=3,
                                           invert_transform_flags=[True],
                                           interpolation='NearestNeighbor'),
                      name='wmcsf_mask')

    # mask t1

    #dilate brainmask
    dil_brainmask = Node(fs.Binarize(min=0.5, out_type='nii.gz', dilate=15),
                         name='dil_brainmask')

    mask_epi = Node(fsl.ApplyMask(out_file='epi2highres_lin_masked.nii.gz'),
                    name='mask_epi')

    nonlinear.connect([
        (inputnode, brainmask, [('brain_mask', 'input_image'),
                                ('t1_highres', 'reference_image'),
                                ('highres2lowres_itk', 'transforms')]),
        (brainmask, outputnode, [('output_image', 'brainmask_highres')]),
        (inputnode, wmcsf_mask, [('wmcsf_mask', 'input_image'),
                                 ('t1_highres', 'reference_image'),
                                 ('highres2lowres_itk', 'transforms')]),
        (wmcsf_mask, outputnode, [('output_image', 'wmcsfmask_highres')]),
        (brainmask, dil_brainmask, [('output_image', 'in_file')]),
        (dil_brainmask, mask_epi, [('binary_file', 'mask_file')]),
        (inputnode, mask_epi, [('epi2highres_lin', 'in_file')])
    ])

    # transform fov mask, dilate and apply to t1
    transform_fov = Node(
        ants.ApplyTransforms(dimension=3,
                             output_image='fov_mask_highres.nii.gz',
                             interpolation='NearestNeighbor'), 'transform_fov')

    dilate_fov = Node(fs.Binarize(min=0.5,
                                  dilate=5,
                                  binary_file='fov_mask_highres_dil.nii.gz'),
                      name='dilate_fov')

    #mask t1 twice
    mask_t1_1 = Node(fsl.ApplyMask(out_file='t1_brain_masked.nii.gz'),
                     name='mask_t1_1')

    mask_t1_2 = Node(fsl.ApplyMask(out_file='t1_brain_fov_masked.nii.gz'),
                     name='mask_t1_2')

    nonlinear.connect([
        (inputnode, transform_fov, [('fov_mask', 'input_image'),
                                    ('t1_highres', 'reference_image'),
                                    ('epi2highres_lin_itk', 'transforms')]),
        (transform_fov, dilate_fov, [('output_image', 'in_file')]),
        (brainmask, mask_t1_1, [('output_image', 'mask_file')]),
        (inputnode, mask_t1_1, [('t1_highres', 'in_file')]),
        (dilate_fov, mask_t1_2, [('binary_file', 'mask_file')]),
        (mask_t1_1, mask_t1_2, [('out_file', 'in_file')]),
    ])

    # normalization with ants
    antsreg = Node(interface=ants.registration.Registration(
        dimension=3,
        metric=['CC'],
        metric_weight=[1.0],
        radius_or_number_of_bins=[4],
        sampling_strategy=['None'],
        transforms=['SyN'],
        args='-g 0.1x1x0.1',
        transform_parameters=[(0.10, 3, 0)],
        number_of_iterations=[[50, 20, 10]],
        convergence_threshold=[1e-06],
        convergence_window_size=[10],
        shrink_factors=[[4, 2, 1]],
        smoothing_sigmas=[[2, 1, 0]],
        sigma_units=['vox'],
        use_estimate_learning_rate_once=[True],
        use_histogram_matching=[True],
        collapse_output_transforms=True,
        output_inverse_warped_image=True,
        output_warped_image=True,
        interpolation='BSpline'),
                   name='antsreg')
    antsreg.plugin_args = {'override_specs': 'request_memory = 40000'}

    nonlinear.connect([(mask_epi, antsreg, [('out_file', 'moving_image')]),
                       (mask_t1_2, antsreg, [('out_file', 'fixed_image')]),
                       (antsreg, outputnode,
                        [('reverse_transforms', 'epi2highres_invwarp'),
                         ('forward_transforms', 'epi2highres_warp'),
                         ('warped_image', 'epi2highres_nonlin')])])

    return nonlinear
示例#26
0
def create_ants_workflow(data_dir=None, subjects=None, name="antswarp"):
    """Set up the anatomical normalzation workflow using ANTS.

    Your anatomical data must have been processed in Freesurfer.
    Unlike most lyman workflows, the DataGrabber and DataSink
    nodes are hardwired within the returned workflow, as this
    tightly integrates with the Freesurfer subjects directory
    structure.

    Parameters
    ----------
    data_dir : path
        top level of data hierarchy/FS subjects directory
    subjects : list of strings
        list of subject IDs
    name : alphanumeric string, optional
        workflow name

    """
    if data_dir is None:
        data_dir = os.environ["SUBJECTS_DIR"]
    if subjects is None:
        subjects = []

    # Subject source node
    subjectsource = Node(IdentityInterface(fields=["subject_id"]),
                         iterables=("subject_id", subjects),
                         name="subjectsource")

    # Grab recon-all outputs
    templates = dict(aseg="{subject_id}/mri/aparc+aseg.mgz",
                     head="{subject_id}/mri/brain.mgz")
    datasource = Node(SelectFiles(templates, base_directory=data_dir),
                      "datasource")

    # Convert images to nifti storage and float representation
    cvtaseg = Node(fs.MRIConvert(out_type="niigz"), "convertaseg")

    cvtbrain = Node(fs.MRIConvert(out_type="niigz", out_datatype="float"),
                    "convertbrain")

    # Turn the aparc+aseg into a brainmask
    makemask = Node(fs.Binarize(dilate=4, erode=3, min=0.5), "makemask")

    # Extract the brain from the orig.mgz using the mask
    skullstrip = Node(fsl.ApplyMask(), "skullstrip")

    # Normalize using ANTS
    antswarp = Node(ANTSIntroduction(), "antswarp")

    # Generate a png summarizing the registration
    warpreport = Node(WarpReport(), "warpreport")

    # Save relevant files to the data directory
    datasink = Node(DataSink(base_directory=data_dir, parameterization=False),
                    name="datasink")

    # Define and connect the workflow
    # -------------------------------

    normalize = Workflow(name=name)

    normalize.connect([
        (subjectsource, datasource, [("subject_id", "subject_id")]),
        (datasource, cvtaseg, [("aseg", "in_file")]),
        (datasource, cvtbrain, [("head", "in_file")]),
        (cvtaseg, makemask, [("out_file", "in_file")]),
        (cvtbrain, skullstrip, [("out_file", "in_file")]),
        (makemask, skullstrip, [("binary_file", "mask_file")]),
        (skullstrip, antswarp, [("out_file", "in_file")]),
        (antswarp, warpreport, [("brain_file", "in_file")]),
        (subjectsource, datasink, [("subject_id", "container")]),
        (antswarp, datasink, [("warp_file", "normalization.@warpfield"),
                              ("inv_warp_file",
                               "normalization.@inverse_warpfield"),
                              ("affine_file", "normalization.@affine"),
                              ("brain_file", "normalization.@brain")]),
        (warpreport, datasink, [("out_file", "normalization.@report")]),
    ])

    return normalize
示例#27
0
def create_mgzconvert_pipeline(name='mgzconvert'):

    # workflow
    mgzconvert = Workflow(name='mgzconvert')

    #inputnode
    inputnode = Node(util.IdentityInterface(fields=[
        'fs_subjects_dir',
        'fs_subject_id',
    ]),
                     name='inputnode')

    #outputnode
    outputnode = Node(util.IdentityInterface(
        fields=['anat_head', 'anat_brain', 'func_mask', 'wmseg', 'wmedge']),
                      name='outputnode')

    # import files from freesurfer
    fs_import = Node(interface=nio.FreeSurferSource(), name='fs_import')

    # convert Freesurfer T1 file to nifti
    head_convert = Node(fs.MRIConvert(out_type='niigz', out_file='T1.nii.gz'),
                        name='head_convert')

    # convert Freesurfer brain.finalsurf file to nifti
    # grab finalsurf file
    def grab_brain(fs_subjects_dir, fs_subject_id):
        import os
        brainfile = os.path.join(fs_subjects_dir, fs_subject_id, 'mri',
                                 'brain.finalsurfs.mgz')
        return os.path.abspath(brainfile)

    brain_grab = Node(util.Function(
        input_names=['fs_subjects_dir', 'fs_subject_id'],
        output_names=['brain_file'],
        function=grab_brain),
                      name='brain_grab')

    brain_convert = Node(fs.MRIConvert(out_type='niigz',
                                       out_file='T1_brain.nii.gz'),
                         name='brain_convert')

    # create brainmask from aparc+aseg with single dilation for functional data
    # DIFFERENT APPROACHES TO MASK THE FUNCTIONAL AND STRUCTURAL DATA
    # ARE USED FOR HISTORIC REASONS
    def get_aparc_aseg(files):
        for name in files:
            if 'aparc+aseg' in name:
                return name

    funcmask = Node(fs.Binarize(min=0.5, dilate=1, out_type='nii.gz'),
                    name='funcmask')

    # fill holes in mask, smooth, rebinarize
    fillholes = Node(fsl.maths.MathsCommand(args='-fillh -s 3 -thr 0.1 -bin',
                                            out_file='func_mask.nii.gz'),
                     name='fillholes')

    # cortical and cerebellar white matter volumes to construct wm edge
    # [lh cerebral wm, lh cerebellar wm, rh cerebral wm, rh cerebellar wm, brain stem]
    wmseg = Node(fs.Binarize(out_type='nii.gz',
                             match=[2, 7, 41, 46, 16],
                             binary_file='T1_brain_wmseg.nii.gz'),
                 name='wmseg')

    # make edge from wmseg  to visualize coregistration quality
    edge = Node(fsl.ApplyMask(args='-edge -bin',
                              out_file='T1_brain_wmedge.nii.gz'),
                name='edge')

    # connections
    mgzconvert.connect([
        (inputnode, fs_import, [('fs_subjects_dir', 'subjects_dir'),
                                ('fs_subject_id', 'subject_id')]),
        (fs_import, head_convert, [('T1', 'in_file')]),
        (inputnode, brain_grab, [('fs_subjects_dir', 'fs_subjects_dir'),
                                 ('fs_subject_id', 'fs_subject_id')]),
        (brain_grab, brain_convert, [('brain_file', 'in_file')]),
        (fs_import, wmseg, [(('aparc_aseg', get_aparc_aseg), 'in_file')]),
        (fs_import, funcmask, [(('aparc_aseg', get_aparc_aseg), 'in_file')]),
        (funcmask, fillholes, [('binary_file', 'in_file')]),
        (wmseg, edge, [('binary_file', 'in_file'),
                       ('binary_file', 'mask_file')]),
        (head_convert, outputnode, [('out_file', 'anat_head')]),
        (fillholes, outputnode, [('out_file', 'func_mask')]),
        (brain_convert, outputnode, [('out_file', 'anat_brain')]),
        (wmseg, outputnode, [('binary_file', 'wmseg')]),
        (edge, outputnode, [('out_file', 'wmedge')])
    ])

    return mgzconvert
示例#28
0
def create_fsl_workflow(data_dir=None, subjects=None, name="fslwarp"):
    """Set up the anatomical normalzation workflow using FNIRT.

    Your anatomical data must have been processed in Freesurfer.
    Unlike most lyman workflows, the DataGrabber and DataSink
    nodes are hardwired within the returned workflow, as this
    tightly integrates with the Freesurfer subjects directory
    structure.

    Parameters
    ----------
    data_dir : path
        top level of data hierarchy/FS subjects directory
    subjects : list of strings
        list of subject IDs
    name : alphanumeric string, optional
        workflow name

    """
    if data_dir is None:
        data_dir = os.environ["SUBJECTS_DIR"]
    if subjects is None:
        subjects = []

    # Get target images
    target_brain = fsl.Info.standard_image("avg152T1_brain.nii.gz")
    target_head = fsl.Info.standard_image("avg152T1.nii.gz")
    hires_head = fsl.Info.standard_image("MNI152_T1_1mm.nii.gz")
    target_mask = fsl.Info.standard_image(
        "MNI152_T1_2mm_brain_mask_dil.nii.gz")
    fnirt_cfg = os.path.join(os.environ["FSLDIR"],
                             "etc/flirtsch/T1_2_MNI152_2mm.cnf")

    # Subject source node
    subjectsource = Node(IdentityInterface(fields=["subject_id"]),
                         iterables=("subject_id", subjects),
                         name="subjectsource")

    # Grab recon-all outputs
    head_image = "T1"
    templates = dict(aseg="{subject_id}/mri/aparc+aseg.mgz",
                     head="{subject_id}/mri/" + head_image + ".mgz")
    datasource = Node(SelectFiles(templates, base_directory=data_dir),
                      "datasource")

    # Convert images to nifti storage and float representation
    cvtaseg = Node(fs.MRIConvert(out_type="niigz"), "convertaseg")

    cvthead = Node(fs.MRIConvert(out_type="niigz", out_datatype="float"),
                   "converthead")

    # Turn the aparc+aseg into a brainmask
    makemask = Node(fs.Binarize(dilate=1, min=0.5), "makemask")

    # Extract the brain from the orig.mgz using the mask
    skullstrip = Node(fsl.ApplyMask(), "skullstrip")

    # FLIRT brain to MNI152_brain
    flirt = Node(fsl.FLIRT(reference=target_brain), "flirt")

    sw = [-180, 180]
    for dim in ["x", "y", "z"]:
        setattr(flirt.inputs, "searchr_%s" % dim, sw)

    # FNIRT head to MNI152
    fnirt = Node(
        fsl.FNIRT(ref_file=target_head,
                  refmask_file=target_mask,
                  config_file=fnirt_cfg,
                  fieldcoeff_file=True), "fnirt")

    # Warp and rename the images
    warpbrain = Node(
        fsl.ApplyWarp(ref_file=target_head,
                      interp="spline",
                      out_file="brain_warp.nii.gz"), "warpbrain")

    warpbrainhr = Node(
        fsl.ApplyWarp(ref_file=hires_head,
                      interp="spline",
                      out_file="brain_warp_hires.nii.gz"), "warpbrainhr")

    # Generate a png summarizing the registration
    warpreport = Node(WarpReport(), "warpreport")

    # Save relevant files to the data directory
    fnirt_subs = [(head_image + "_out_masked_flirt.mat", "affine.mat"),
                  (head_image + "_out_fieldwarp", "warpfield"),
                  (head_image + "_out_masked", "brain"),
                  (head_image + "_out", "T1")]
    datasink = Node(
        DataSink(base_directory=data_dir,
                 parameterization=False,
                 substitutions=fnirt_subs), "datasink")

    # Define and connect the workflow
    # -------------------------------

    normalize = Workflow(name=name)

    normalize.connect([
        (subjectsource, datasource, [("subject_id", "subject_id")]),
        (datasource, cvtaseg, [("aseg", "in_file")]),
        (datasource, cvthead, [("head", "in_file")]),
        (cvtaseg, makemask, [("out_file", "in_file")]),
        (cvthead, skullstrip, [("out_file", "in_file")]),
        (makemask, skullstrip, [("binary_file", "mask_file")]),
        (skullstrip, flirt, [("out_file", "in_file")]),
        (flirt, fnirt, [("out_matrix_file", "affine_file")]),
        (cvthead, fnirt, [("out_file", "in_file")]),
        (skullstrip, warpbrain, [("out_file", "in_file")]),
        (fnirt, warpbrain, [("fieldcoeff_file", "field_file")]),
        (skullstrip, warpbrainhr, [("out_file", "in_file")]),
        (fnirt, warpbrainhr, [("fieldcoeff_file", "field_file")]),
        (warpbrain, warpreport, [("out_file", "in_file")]),
        (subjectsource, datasink, [("subject_id", "container")]),
        (skullstrip, datasink, [("out_file", "normalization.@brain")]),
        (cvthead, datasink, [("out_file", "normalization.@t1")]),
        (flirt, datasink, [("out_file", "normalization.@brain_flirted")]),
        (flirt, datasink, [("out_matrix_file", "normalization.@affine")]),
        (warpbrain, datasink, [("out_file", "normalization.@brain_warped")]),
        (warpbrainhr, datasink, [("out_file", "normalization.@brain_hires")]),
        (fnirt, datasink, [("fieldcoeff_file", "normalization.@warpfield")]),
        (warpreport, datasink, [("out_file", "normalization.@report")]),
    ])

    return normalize
示例#29
0
def create_epi_t1_nonlinear_pipeline(name='epi_t1_nonlinear'):
    """Creates a pipeline that performs nonlinear EPI to T1 registration using 
    the antsRegistration tool. Beforehand, the T1 image has to be processed in 
    freesurfer and the EPI timeseries should be realigned.

    Example
    -------

    >>> nipype_epi_t1_nonlin = create_epi_t1_nonlinear_pipeline('nipype_epi_t1_nonlin')
    >>> nipype_epi_t1_nonlin.inputs.inputnode.fs_subject_id = '123456'
    >>> nipype_epi_t1_nonlin.inputs.inputnode.fs_subjects_dir = '/project/data/freesurfer'
    >>> nipype_epi_t1_nonlin.inputs.inputnode.realigned_epi = 'mcflirt.nii.gz'
    >>> nipype_epi_t1_nonlin.run()

    Inputs::

        inputnode.fs_subject_id    # subject id used in freesurfer
        inputnode.fs_subjects_dir  # path to freesurfer output
        inputnode.realigned_epi    # realigned EPI timeseries

    Outputs::

        outputnode.lin_epi2anat     # ITK format
        outputnode.lin_anat2epi     # ITK format
        outputnode.nonlin_epi2anat  # ANTs specific 5D deformation field
        outputnode.nonlin_anat2epi  # ANTs specific 5D deformation field

    """

    nonreg = Workflow(name='epi_t1_nonlinear')

    # input
    inputnode = Node(interface=util.IdentityInterface(
        fields=['fs_subject_id', 'fs_subjects_dir', 'realigned_epi']),
                     name='inputnode')

    # calculate the temporal mean image of the realigned timeseries
    tmean = Node(interface=fsl.maths.MeanImage(dimension='T',
                                               output_type='NIFTI_GZ'),
                 name='tmean')

    nonreg.connect(inputnode, 'realigned_epi', tmean, 'in_file')

    # import brain.mgz and ribbon.mgz from freesurfer directory
    fs_import = Node(interface=nio.FreeSurferSource(),
                     name='freesurfer_import')

    nonreg.connect(inputnode, 'fs_subjects_dir', fs_import, 'subjects_dir')
    nonreg.connect(inputnode, 'fs_subject_id', fs_import, 'subject_id')

    # convert brain.mgz to niigz
    mriconvert = Node(interface=fs.MRIConvert(out_type='niigz'),
                      name='mriconvert')

    nonreg.connect(fs_import, 'brain', mriconvert, 'in_file')

    # calculate rigid transformation of mean epi to t1 with bbregister
    bbregister = Node(interface=fs.BBRegister(init='fsl',
                                              contrast_type='t2',
                                              out_fsl_file=True),
                      name='bbregister')

    nonreg.connect(inputnode, 'fs_subjects_dir', bbregister, 'subjects_dir')
    nonreg.connect(inputnode, 'fs_subject_id', bbregister, 'subject_id')
    nonreg.connect(tmean, 'out_file', bbregister, 'source_file')

    # convert linear transformation to itk format compatible with ants
    itk = Node(interface=c3.C3dAffineTool(fsl2ras=True,
                                          itk_transform='epi2anat_affine.txt'),
               name='itk')

    nonreg.connect(tmean, 'out_file', itk, 'source_file')
    nonreg.connect(mriconvert, 'out_file', itk, 'reference_file')
    nonreg.connect(bbregister, 'out_fsl_file', itk, 'transform_file')

    # get aparc aseg mask
    # create brainmask from aparc+aseg
    def get_aparc_aseg(files):
        for name in files:
            if 'aparc+aseg' in name:
                return name

    aparc_aseg_mask = Node(fs.Binarize(min=0.1,
                                       dilate=10,
                                       erode=7,
                                       out_type='nii.gz',
                                       binary_file='aparc_aseg_mask.nii.gz'),
                           name='aparc_aseg_mask')

    # fill holes in mask
    fillholes = Node(fsl.maths.MathsCommand(args='-fillh'), name='fillholes')

    nonreg.connect([(fs_import, aparc_aseg_mask, [
        (('aparc_aseg', get_aparc_aseg), 'in_file')
    ]), (aparc_aseg_mask, fillholes, [('binary_file', 'in_file')])])

    #create bounding box mask and rigidly transform into anatomical (fs) space
    fov = Node(interface=fs.model.Binarize(min=0.0, out_type='nii.gz'),
               name='fov')

    nonreg.connect(tmean, 'out_file', fov, 'in_file')

    fov_trans = Node(interface=ants.resampling.ApplyTransforms(
        dimension=3, interpolation='NearestNeighbor'),
                     name='fov_trans')

    nonreg.connect(itk, ('itk_transform', filename_to_list), fov_trans,
                   'transforms')
    nonreg.connect(fov, 'binary_file', fov_trans, 'input_image')
    nonreg.connect(fillholes, 'out_file', fov_trans, 'reference_image')
    #nonreg.connect(ribbon, 'binary_file', fov_trans, 'reference_image')

    # intersect both masks
    intersect = Node(interface=fsl.maths.BinaryMaths(operation='mul'),
                     name='intersect')

    nonreg.connect(fillholes, 'out_file', intersect, 'in_file')
    #nonreg.connect(ribbon, 'binary_file', intersect, 'in_file')
    nonreg.connect(fov_trans, 'output_image', intersect, 'operand_file')

    # inversly transform mask and mask original epi
    mask_trans = Node(interface=ants.resampling.ApplyTransforms(
        dimension=3,
        interpolation='NearestNeighbor',
        invert_transform_flags=[True]),
                      name='mask_trans')

    nonreg.connect(itk, ('itk_transform', filename_to_list), mask_trans,
                   'transforms')
    nonreg.connect(intersect, 'out_file', mask_trans, 'input_image')
    nonreg.connect(tmean, 'out_file', mask_trans, 'reference_image')

    maskepi = Node(interface=fs.utils.ApplyMask(), name='maskepi')

    nonreg.connect(mask_trans, 'output_image', maskepi, 'mask_file')
    nonreg.connect(tmean, 'out_file', maskepi, 'in_file')

    # mask anatomical image (brain)
    maskanat = Node(interface=fs.utils.ApplyMask(), name='maskanat')

    nonreg.connect(intersect, 'out_file', maskanat, 'mask_file')
    nonreg.connect(mriconvert, 'out_file', maskanat, 'in_file')

    # invert masked anatomical image
    anat_min_max = Node(interface=fsl.utils.ImageStats(op_string='-R'),
                        name='anat_min_max')
    epi_min_max = Node(interface=fsl.utils.ImageStats(op_string='-r'),
                       name='epi_min_max')

    nonreg.connect(maskanat, 'out_file', anat_min_max, 'in_file')
    nonreg.connect(tmean, 'out_file', epi_min_max, 'in_file')

    def calc_inversion(anat_min_max, epi_min_max):
        mul = -(epi_min_max[1] - epi_min_max[0]) / (anat_min_max[1] -
                                                    anat_min_max[0])
        add = abs(anat_min_max[1] * mul) + epi_min_max[0]
        return mul, add

    calcinv = Node(interface=Function(
        input_names=['anat_min_max', 'epi_min_max'],
        output_names=['mul', 'add'],
        function=calc_inversion),
                   name='calcinv')

    nonreg.connect(anat_min_max, 'out_stat', calcinv, 'anat_min_max')
    nonreg.connect(epi_min_max, 'out_stat', calcinv, 'epi_min_max')

    mulinv = Node(interface=fsl.maths.BinaryMaths(operation='mul'),
                  name='mulinv')
    addinv = Node(interface=fsl.maths.BinaryMaths(operation='add'),
                  name='addinv')

    nonreg.connect(maskanat, 'out_file', mulinv, 'in_file')
    nonreg.connect(calcinv, 'mul', mulinv, 'operand_value')
    nonreg.connect(mulinv, 'out_file', addinv, 'in_file')
    nonreg.connect(calcinv, 'add', addinv, 'operand_value')

    # nonlinear transformation of masked anat to masked epi with ants
    antsreg = Node(interface=ants.registration.Registration(
        dimension=3,
        invert_initial_moving_transform=True,
        metric=['CC'],
        metric_weight=[1.0],
        radius_or_number_of_bins=[4],
        sampling_strategy=['None'],
        transforms=['SyN'],
        args='-g .1x1x.1',
        transform_parameters=[(0.10, 3, 0)],
        number_of_iterations=[[10, 5]],
        convergence_threshold=[1e-06],
        convergence_window_size=[10],
        shrink_factors=[[2, 1]],
        smoothing_sigmas=[[1, 0.5]],
        sigma_units=['vox'],
        use_estimate_learning_rate_once=[True],
        use_histogram_matching=[True],
        collapse_output_transforms=True,
        output_inverse_warped_image=True,
        output_warped_image=True),
                   name='antsreg')

    nonreg.connect(itk, 'itk_transform', antsreg, 'initial_moving_transform')
    nonreg.connect(maskepi, 'out_file', antsreg, 'fixed_image')
    nonreg.connect(addinv, 'out_file', antsreg, 'moving_image')

    # output

    def second_element(file_list):
        return file_list[1]

    def first_element(file_list):
        return file_list[0]

    outputnode = Node(interface=util.IdentityInterface(fields=[
        'lin_epi2anat', 'lin_anat2epi', 'nonlin_epi2anat', 'nonlin_anat2epi'
    ]),
                      name='outputnode')

    nonreg.connect(itk, 'itk_transform', outputnode, 'lin_epi2anat')
    nonreg.connect(antsreg, ('forward_transforms', first_element), outputnode,
                   'lin_anat2epi')
    nonreg.connect(antsreg, ('forward_transforms', second_element), outputnode,
                   'nonlin_anat2epi')
    nonreg.connect(antsreg, ('reverse_transforms', second_element), outputnode,
                   'nonlin_epi2anat')

    return nonreg
示例#30
0
# Reslice
reslice = Node(afni.Resample(voxel_size=(0.1, 0.1, 0.1),
                             resample_mode='Cu',
                             out_file='struct_resliced.nii.gz'),
               name='reslice')
preproc_struct.connect([(selectfiles, reslice, [('struct', 'in_file')])])

# Skull stripping
skullstrip = Node(afni.SkullStrip(outputtype='NIFTI_GZ',
                                  args='-rat -surface_coil'),
                  name='skullstrip')
preproc_struct.connect([(reslice, skullstrip, [('out_file', 'in_file')])])

# Binarize mask
struct_mask = Node(fs.Binarize(out_type='nii.gz', min=0.1, erode=1),
                   name='struct_mask')
preproc_struct.connect([(skullstrip, struct_mask, [('out_file', 'in_file')])])

#Reslice mask
reslice_mask = Node(afni.Resample(resample_mode='NN',
                                  out_file='struct_mask.nii.gz'),
                    name='reslice_mask')
preproc_struct.connect([
    (struct_bias, reslice_mask, [('output_image', 'master')]),
    (struct_mask, reslice_mask, [('binary_file', 'in_file')])
])

# Apply mask
apply_struct = Node(fsl.ApplyMask(out_file='struct_masked.nii.gz'),
                    name='apply_struct')