예제 #1
0
def cale(input_dir, output_dir):

    fns = glob(op.join(input_dir, '*.nii.gz'))

    merger = Merge()
    merger.inputs.in_files = fns
    merger.inputs.dimension = 't'
    merger.inputs.merged_file = op.join(output_dir, 'cALE.nii.gz')

    meanimg = MeanImage()
    meanimg.inputs.in_file = op.join(output_dir, 'cALE.nii.gz')
    meanimg.inputs.dimensions = 'T'
    meanimg.inputs.out_file = op.join(output_dir, 'cALE.nii.gz')

    maths = MultiImageMaths()
    maths.inputs.in_file = op.join(output_dir, 'cALE.nii.gz')
    maths.inputs.op_string = '-mul {0}'.format(len(fns))
    maths.inputs.out_file = op.join(output_dir, 'cALE.nii.gz')

    thresh = Threshold()
    thresh.inputs.in_file = op.join(output_dir, 'cALE.nii.gz')
    thresh.inputs.thresh = np.floor(len(fns) / 2)
    thresh.inputs.direction = 'below'
    thresh.inputs.out_file = op.join(
        output_dir, 'cALE_thresh-{0}.nii.gz'.format(np.floor(len(fns) / 2)))
예제 #2
0
def test_MultiImageMaths_outputs():
    output_map = dict(out_file=dict(), )
    outputs = MultiImageMaths.output_spec()

    for key, metadata in output_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(outputs.traits()[key], metakey), value
예제 #3
0
def test_MultiImageMaths_outputs():
    output_map = dict(out_file=dict(),
    )
    outputs = MultiImageMaths.output_spec()

    for key, metadata in output_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(outputs.traits()[key], metakey), value
예제 #4
0
def test_MultiImageMaths_inputs():
    input_map = dict(
        args=dict(argstr='%s', ),
        environ=dict(
            nohash=True,
            usedefault=True,
        ),
        ignore_exception=dict(
            nohash=True,
            usedefault=True,
        ),
        in_file=dict(
            argstr='%s',
            mandatory=True,
            position=2,
        ),
        internal_datatype=dict(
            argstr='-dt %s',
            position=1,
        ),
        nan2zeros=dict(
            argstr='-nan',
            position=3,
        ),
        op_string=dict(
            argstr='%s',
            mandatory=True,
            position=4,
        ),
        operand_files=dict(mandatory=True, ),
        out_file=dict(
            argstr='%s',
            genfile=True,
            hash_files=False,
            position=-2,
        ),
        output_datatype=dict(
            argstr='-odt %s',
            position=-1,
        ),
        output_type=dict(),
        terminal_output=dict(
            mandatory=True,
            nohash=True,
        ),
    )
    inputs = MultiImageMaths.input_spec()

    for key, metadata in input_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(inputs.traits()[key], metakey), value
예제 #5
0
def test_MultiImageMaths_inputs():
    input_map = dict(ignore_exception=dict(nohash=True,
    usedefault=True,
    ),
    nan2zeros=dict(position=3,
    argstr='-nan',
    ),
    op_string=dict(position=4,
    mandatory=True,
    argstr='%s',
    ),
    out_file=dict(hash_files=False,
    genfile=True,
    position=-2,
    argstr='%s',
    ),
    args=dict(argstr='%s',
    ),
    internal_datatype=dict(position=1,
    argstr='-dt %s',
    ),
    terminal_output=dict(mandatory=True,
    nohash=True,
    ),
    environ=dict(nohash=True,
    usedefault=True,
    ),
    in_file=dict(position=2,
    mandatory=True,
    argstr='%s',
    ),
    output_type=dict(),
    output_datatype=dict(position=-1,
    argstr='-odt %s',
    ),
    operand_files=dict(mandatory=True,
    ),
    )
    inputs = MultiImageMaths.input_spec()

    for key, metadata in input_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(inputs.traits()[key], metakey), value
예제 #6
0
    def backwrap_to_ute_pipeline(self, **kwargs):

        pipeline = self.create_pipeline(
            name='backwrap_to_ute',
            inputs=[
                DatasetSpec('ute1_registered', nifti_gz_format),
                DatasetSpec('ute_echo1', dicom_format),
                DatasetSpec('umap_ute', dicom_format),
                DatasetSpec('template_to_ute_mat', text_matrix_format),
                DatasetSpec('sute_cont_template', nifti_gz_format),
                DatasetSpec('sute_fix_template', nifti_gz_format)
            ],
            outputs=[
                DatasetSpec('sute_cont_ute', nifti_gz_format),
                DatasetSpec('sute_fix_ute', nifti_gz_format)
            ],
            desc="Moving umaps back to the UTE space",
            version=1,
            citations=(matlab_cite),
            **kwargs)

        echo1_conv = pipeline.create_node(MRConvert(), name='echo1_conv')
        echo1_conv.inputs.out_ext = '.nii.gz'
        pipeline.connect_input('ute_echo1', echo1_conv, 'in_file')

        umap_conv = pipeline.create_node(MRConvert(), name='umap_conv')
        umap_conv.inputs.out_ext = '.nii.gz'
        pipeline.connect_input('umap_ute', umap_conv, 'in_file')

        zero_template_mask = pipeline.create_node(BinaryMaths(),
                                                  name='zero_template_mask',
                                                  requirements=[fsl5_req],
                                                  wall_time=3)
        pipeline.connect_input('ute1_registered', zero_template_mask,
                               'in_file')
        zero_template_mask.inputs.operation = "mul"
        zero_template_mask.inputs.operand_value = 0
        zero_template_mask.inputs.output_type = 'NIFTI_GZ'

        region_template_mask = pipeline.create_node(
            FLIRT(),
            name='region_template_mask',
            requirements=[fsl5_req],
            wall_time=5)
        region_template_mask.inputs.apply_xfm = True
        region_template_mask.inputs.bgvalue = 1
        region_template_mask.inputs.interp = 'nearestneighbour'
        region_template_mask.inputs.output_type = 'NIFTI_GZ'
        pipeline.connect(zero_template_mask, 'out_file', region_template_mask,
                         'in_file')
        pipeline.connect(echo1_conv, 'out_file', region_template_mask,
                         'reference')
        pipeline.connect_input('template_to_ute_mat', region_template_mask,
                               'in_matrix_file')

        fill_in_umap = pipeline.create_node(MultiImageMaths(),
                                            name='fill_in_umap',
                                            requirements=[fsl5_req],
                                            wall_time=3)
        fill_in_umap.inputs.op_string = "-mul %s "
        fill_in_umap.inputs.output_type = 'NIFTI_GZ'
        pipeline.connect(region_template_mask, 'out_file', fill_in_umap,
                         'in_file')
        pipeline.connect(umap_conv, 'out_file', fill_in_umap, 'operand_files')

        sute_fix_ute_space = pipeline.create_node(FLIRT(),
                                                  name='sute_fix_ute_space',
                                                  requirements=[fsl5_req],
                                                  wall_time=5)
        pipeline.connect(echo1_conv, 'out_file', sute_fix_ute_space,
                         'reference')
        pipeline.connect_input('template_to_ute_mat', sute_fix_ute_space,
                               'in_matrix_file')
        pipeline.connect_input('sute_fix_template', sute_fix_ute_space,
                               'in_file')
        sute_fix_ute_space.inputs.apply_xfm = True
        sute_fix_ute_space.inputs.bgvalue = 0
        sute_fix_ute_space.inputs.output_type = 'NIFTI_GZ'

        sute_cont_ute_space = pipeline.create_node(FLIRT(),
                                                   name='sute_cont_ute_space',
                                                   requirements=[fsl5_req],
                                                   wall_time=5)
        pipeline.connect(echo1_conv, 'out_file', sute_cont_ute_space,
                         'reference')
        pipeline.connect_input('template_to_ute_mat', sute_cont_ute_space,
                               'in_matrix_file')
        pipeline.connect_input('sute_cont_template', sute_cont_ute_space,
                               'in_file')
        sute_cont_ute_space.inputs.apply_xfm = True
        sute_cont_ute_space.inputs.bgvalue = 0
        sute_cont_ute_space.inputs.output_type = 'NIFTI_GZ'

        sute_fix_ute_background = pipeline.create_node(
            MultiImageMaths(),
            name='sute_fix_ute_background',
            requirements=[fsl5_req],
            wall_time=5)
        pipeline.connect(sute_fix_ute_space, 'out_file',
                         sute_fix_ute_background, 'in_file')
        sute_fix_ute_background.inputs.op_string = "-add %s "
        sute_fix_ute_background.inputs.output_type = 'NIFTI_GZ'
        pipeline.connect(fill_in_umap, 'out_file', sute_fix_ute_background,
                         'operand_files')

        sute_cont_ute_background = pipeline.create_node(
            MultiImageMaths(),
            name='sute_cont_ute_background',
            requirements=[fsl5_req],
            wall_time=5)
        pipeline.connect(sute_cont_ute_space, 'out_file',
                         sute_cont_ute_background, 'in_file')
        sute_cont_ute_background.inputs.op_string = "-add %s "
        sute_cont_ute_background.inputs.output_type = 'NIFTI_GZ'
        pipeline.connect(fill_in_umap, 'out_file', sute_cont_ute_background,
                         'operand_files')

        smooth_sute_fix = pipeline.create_node(Smooth(),
                                               name='smooth_sute_fix',
                                               requirements=[fsl5_req],
                                               wall_time=5)
        smooth_sute_fix.inputs.sigma = 2.
        pipeline.connect(sute_fix_ute_background, 'out_file', smooth_sute_fix,
                         'in_file')

        smooth_sute_cont = pipeline.create_node(Smooth(),
                                                name='smooth_sute_cont',
                                                requirements=[fsl5_req],
                                                wall_time=5)
        smooth_sute_cont.inputs.sigma = 2.
        pipeline.connect(sute_cont_ute_background, 'out_file',
                         smooth_sute_cont, 'in_file')

        pipeline.connect_output('sute_fix_ute', smooth_sute_fix,
                                'smoothed_file')
        pipeline.connect_output('sute_cont_ute', smooth_sute_cont,
                                'smoothed_file')
        pipeline.assert_connected()

        return pipeline
예제 #7
0
def create_subject_ffx_wf(
        sub_id, bet_fracthr, spatial_fwhm, susan_brightthresh, hp_vols,
        lp_vols, remove_hemi, film_thresh, film_model_autocorr, use_derivs, tr,
        tcon_subtractive, cluster_threshold, cluster_thresh_frac, cluster_p,
        dilate_clusters_voxel, cond_ids, dsdir, work_basedir):
    # todo: new mapnode inputs: cluster_threshold, cluster_p
    """
    Make a workflow including preprocessing, first level, and second level GLM analysis for a given subject.
    This pipeline includes:
    - skull stripping
    - spatial smoothing
    - removing the irrelevant hemisphere
    - temporal band pass filter
    - 1st level GLM
    - averaging f-contrasts from 1st level GLM
    - clustering run-wise f-tests, dilating clusters, and returning binary roi mask
    """

    from nipype.algorithms.modelgen import SpecifyModel
    from nipype.interfaces.fsl import BET, SUSAN, ImageMaths
    from nipype.interfaces.fsl.model import SmoothEstimate, Cluster
    from nipype.interfaces.fsl.maths import TemporalFilter, MathsCommand
    from nipype.interfaces.utility import Function
    from nipype.pipeline.engine import Workflow, Node, MapNode
    from nipype.workflows.fmri.fsl import create_modelfit_workflow
    from nipype.interfaces.fsl.maths import MultiImageMaths
    from nipype.interfaces.utility import IdentityInterface
    import sys
    from os.path import join as pjoin
    import os
    sys.path.insert(
        0, "/data/project/somato/raw/code/roi_glm/custom_node_functions.py")
    # TODO: don't hardcode this
    import custom_node_functions

    # set up sub-workflow
    sub_wf = Workflow(name='subject_%s_wf' % sub_id)
    # set up sub-working-directory
    subwf_wd = pjoin(work_basedir, 'subject_ffx_wfs',
                     'subject_%s_ffx_workdir' % sub_id)
    if not os.path.exists(subwf_wd):
        os.makedirs(subwf_wd)
    sub_wf.base_dir = subwf_wd

    # Grab bold files for all four runs of one subject.
    # in the order [d1_d5, d5_d1, blocked_design1, blocked_design2]
    grab_boldfiles = Node(Function(
        function=custom_node_functions.grab_boldfiles_subject,
        input_names=['sub_id', 'cond_ids', 'ds_dir'],
        output_names=['boldfiles']),
                          name='grab_boldfiles')
    grab_boldfiles.inputs.sub_id = sub_id
    grab_boldfiles.inputs.cond_ids = cond_ids
    grab_boldfiles.inputs.ds_dir = dsdir

    getonsets = Node(Function(
        function=custom_node_functions.grab_blocked_design_onsets_subject,
        input_names=['sub_id', 'prepped_ds_dir'],
        output_names=['blocked_design_onsets_dicts']),
                     name='getonsets')
    getonsets.inputs.sub_id = sub_id
    getonsets.inputs.prepped_ds_dir = dsdir

    # pass bold files through preprocessing pipeline
    bet = MapNode(BET(frac=bet_fracthr, functional=True, mask=True),
                  iterfield=['in_file'],
                  name='bet')

    pick_mask = Node(Function(function=custom_node_functions.pick_first_mask,
                              input_names=['mask_files'],
                              output_names=['first_mask']),
                     name='pick_mask')

    # SUSAN smoothing node
    susan = MapNode(SUSAN(fwhm=spatial_fwhm,
                          brightness_threshold=susan_brightthresh),
                    iterfield=['in_file'],
                    name='susan')

    # bandpass filter node
    bpf = MapNode(TemporalFilter(highpass_sigma=hp_vols / 2.3548,
                                 lowpass_sigma=lp_vols / 2.3548),
                  iterfield=['in_file'],
                  name='bpf')

    # cut away hemisphere node
    if remove_hemi == 'r':
        roi_args = '-roi 96 -1 0 -1 0 -1 0 -1'
    elif remove_hemi == 'l':
        roi_args = '-roi 0 96 0 -1 0 -1 0 -1'
    else:
        raise IOError('did not recognite value of remove_hemi %s' %
                      remove_hemi)

    cut_hemi_func = MapNode(MathsCommand(),
                            iterfield=['in_file'],
                            name='cut_hemi_func')
    cut_hemi_func.inputs.args = roi_args

    cut_hemi_mask = MapNode(MathsCommand(),
                            iterfield=['in_file'],
                            name='cut_hemi_mask')
    cut_hemi_mask.inputs.args = roi_args

    # Make Design and Contrasts for that subject
    # subject_info ist a list of two "Bunches", each for one run, containing conditions, onsets, durations
    designgen = Node(Function(
        input_names=['subtractive_contrast', 'blocked_design_onsets_dicts'],
        output_names=['subject_info', 'contrasts'],
        function=custom_node_functions.make_bunch_and_contrasts),
                     name='designgen')
    designgen.inputs.subtractive_contrasts = tcon_subtractive

    # create 'session_info' for modelfit
    modelspec = MapNode(SpecifyModel(input_units='secs'),
                        name='modelspec',
                        iterfield=['functional_runs', 'subject_info'])
    modelspec.inputs.high_pass_filter_cutoff = hp_vols * tr
    modelspec.inputs.time_repetition = tr

    flatten_session_infos = Node(Function(
        input_names=['nested_list'],
        output_names=['flat_list'],
        function=custom_node_functions.flatten_nested_list),
                                 name='flatten_session_infos')

    # Fist-level workflow
    modelfit = create_modelfit_workflow(f_contrasts=True)
    modelfit.inputs.inputspec.interscan_interval = tr
    modelfit.inputs.inputspec.film_threshold = film_thresh
    modelfit.inputs.inputspec.model_serial_correlations = film_model_autocorr
    modelfit.inputs.inputspec.bases = {'dgamma': {'derivs': use_derivs}}

    # node that reshapes list of copes returned from modelfit
    cope_sorter = Node(Function(input_names=['copes', 'varcopes', 'contrasts'],
                                output_names=['copes', 'varcopes', 'n_runs'],
                                function=custom_node_functions.sort_copes),
                       name='cope_sorter')

    # average zfstats from both runs
    split_zfstats = Node(Function(
        function=custom_node_functions.split_zfstats_runs,
        input_names=['zfstats_list'],
        output_names=['zfstat_run1', 'zfstat_run2']),
                         name='split_zfstats')
    average_zfstats = Node(MultiImageMaths(op_string='-add %s -div 2'),
                           name='mean_images')

    # estimate smoothness of 1st lvl zf-files
    smoothest = MapNode(SmoothEstimate(),
                        name='smoothest',
                        iterfield=['mask_file', 'zstat_file'])

    cluster = MapNode(Cluster(),
                      name='cluster',
                      iterfield=['in_file', 'volume', 'dlh'])
    cluster.inputs.threshold = cluster_threshold
    cluster.inputs.pthreshold = cluster_p
    cluster.inputs.fractional = cluster_thresh_frac
    cluster.inputs.no_table = True
    cluster.inputs.out_threshold_file = True
    cluster.inputs.out_pval_file = True
    cluster.inputs.out_localmax_vol_file = True
    cluster.inputs.out_max_file = True
    cluster.inputs.out_size_file = True

    # dilate clusters
    dilate = MapNode(MathsCommand(args='-kernel sphere %i -dilD' %
                                  dilate_clusters_voxel),
                     iterfield=['in_file'],
                     name='dilate')

    # binarize the result to a mask
    binarize_roi = MapNode(ImageMaths(op_string='-nan -thr 0.001 -bin'),
                           iterfield=['in_file'],
                           name='binarize_roi')

    # connect preprocessing
    sub_wf.connect(grab_boldfiles, 'boldfiles', bet, 'in_file')
    sub_wf.connect(bet, 'out_file', susan, 'in_file')
    sub_wf.connect(susan, 'smoothed_file', bpf, 'in_file')
    sub_wf.connect(bpf, 'out_file', cut_hemi_func, 'in_file')
    sub_wf.connect(bet, 'mask_file', cut_hemi_mask, 'in_file')
    # connect to 1st level model
    sub_wf.connect(cut_hemi_func, 'out_file', modelspec, 'functional_runs')
    sub_wf.connect(getonsets, 'blocked_design_onsets_dicts', designgen,
                   'blocked_design_onsets_dicts')
    sub_wf.connect(designgen, 'subject_info', modelspec, 'subject_info')
    sub_wf.connect(modelspec, 'session_info', flatten_session_infos,
                   'nested_list')
    sub_wf.connect(flatten_session_infos, 'flat_list', modelfit,
                   'inputspec.session_info')
    sub_wf.connect(designgen, 'contrasts', modelfit, 'inputspec.contrasts')
    sub_wf.connect(cut_hemi_func, 'out_file', modelfit,
                   'inputspec.functional_data')
    # connect to cluster thresholding
    sub_wf.connect(cut_hemi_mask, 'out_file', smoothest, 'mask_file')
    sub_wf.connect(modelfit.get_node('modelestimate'), 'zfstats', smoothest,
                   'zstat_file')
    sub_wf.connect(modelfit.get_node('modelestimate'), 'zfstats', cluster,
                   'in_file')
    sub_wf.connect(smoothest, 'dlh', cluster, 'dlh')
    sub_wf.connect(smoothest, 'volume', cluster, 'volume')
    sub_wf.connect(cluster, 'threshold_file', dilate, 'in_file')
    sub_wf.connect(dilate, 'out_file', binarize_roi, 'in_file')
    # connect to averaging f-contrasts
    sub_wf.connect(modelfit.get_node('modelestimate'), 'zfstats',
                   split_zfstats, 'zfstats_list')
    sub_wf.connect(split_zfstats, 'zfstat_run1', average_zfstats, 'in_file')
    sub_wf.connect(split_zfstats, 'zfstat_run2', average_zfstats,
                   'operand_files')
    # redirect to outputspec
    # TODO: redirekt outputspec to datasink in meta-wf
    outputspec = Node(IdentityInterface(fields=[
        'threshold_file', 'index_file', 'pval_file', 'localmax_txt_file'
    ]),
                      name='outputspec')
    sub_wf.connect(cluster, 'threshold_file', outputspec, 'threshold_file')
    sub_wf.connect(cluster, 'index_file', outputspec, 'index_file')
    sub_wf.connect(cluster, 'pval_file', outputspec, 'pval_file')
    sub_wf.connect(cluster, 'localmax_txt_file', outputspec,
                   'localmax_txt_file')
    sub_wf.connect(binarize_roi, 'out_file', outputspec, 'roi')

    # run subject-lvl workflow
    # sub_wf.write_graph(graph2use='colored', dotfilename='./subwf_graph.dot')
    # sub_wf.run(plugin='MultiProc', plugin_args={'n_procs': 6})
    # sub_wf.run(plugin='CondorDAGMan')
    # sub_wf.run()

    return sub_wf
예제 #8
0
def sdc_t2b(name='SDC_T2B', icorr=True, num_threads=1):
    """
    The T2w-registration based method (T2B) implements an SDC by nonlinear
    registration of the anatomically correct *T2w* image to the *b0* image
    of the *dMRI* dataset. The implementation here tries to reproduce the one
    included in ExploreDTI `(Leemans et al., 2009)
    <http://www.exploredti.com/ref/ExploreDTI_ISMRM_2009.pdf>`_, which is
    also used by `(Irfanoglu et al., 2012)
    <http://dx.doi.org/10.1016/j.neuroimage.2012.02.054>`_.

    :param str name: a unique name for the workflow.

    :inputs:

        * in_t2w: the reference T2w image

    :outputs:

        * outputnode.corrected_image: the dMRI image after correction


    Example::

    >>> t2b = sdc_t2b()
    >>> t2b.inputs.inputnode.in_dwi = 'dwi_brain.nii'
    >>> t2b.inputs.inputnode.in_bval = 'dwi.bval'
    >>> t2b.inputs.inputnode.in_mask = 'b0_mask.nii'
    >>> t2b.inputs.inputnode.in_t2w = 't2w_brain.nii'
    >>> t2b.inputs.inputnode.in_param = 'parameters.txt'
    >>> t2b.run() # doctest: +SKIP

    """
    inputnode = pe.Node(niu.IdentityInterface(
        fields=['in_dwi', 'in_bval', 'in_t2w', 'dwi_mask', 't2w_mask',
                'in_param', 'in_surf']), name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(
        fields=['dwi', 'dwi_mask', 'out_surf']), name='outputnode')

    avg_b0 = pe.Node(niu.Function(
        input_names=['in_dwi', 'in_bval'], output_names=['out_file'],
        function=b0_average), name='AverageB0')
    n4_b0 = pe.Node(N4BiasFieldCorrection(dimension=3), name='BiasB0')
    n4_t2 = pe.Node(N4BiasFieldCorrection(dimension=3), name='BiasT2')

    getparam = pe.Node(nio.JSONFileGrabber(defaults={'enc_dir': 'y'}),
                       name='GetEncDir')
    reg = pe.Node(nex.Registration(num_threads=1), name='Elastix')
    tfx_b0 = pe.Node(nex.EditTransform(), name='tfm_b0')
    split_dwi = pe.Node(fsl.utils.Split(dimension='t'), name='split_dwi')
    warp = pe.MapNode(nex.ApplyWarp(), iterfield=['moving_image'],
                      name='UnwarpDWIs')
    warp_prop = pe.Node(nex.AnalyzeWarp(), name='DisplFieldAnalysis')
    warpbuff = pe.Node(niu.IdentityInterface(fields=['unwarped']),
                       name='UnwarpedCache')
    mskdwis = pe.MapNode(fs.ApplyMask(), iterfield='in_file', name='MaskDWIs')
    thres = pe.MapNode(Threshold(thresh=0.0), iterfield=['in_file'],
                       name='RemoveNegs')
    merge_dwi = pe.Node(fsl.utils.Merge(dimension='t'), name='merge_dwis')
    tfx_msk = pe.Node(nex.EditTransform(
        interpolation='nearest', output_type='unsigned char'),
        name='MSKInterpolator')
    corr_msk = pe.Node(nex.ApplyWarp(), name='UnwarpMsk')
    closmsk = pe.Node(fsl.maths.MathsCommand(
        nan2zeros=True, args='-kernel sphere 3 -dilM -kernel sphere 2 -ero'),
        name='MaskClosing')

    swarp = pe.MapNode(nex.PointsWarp(), iterfield=['points_file'],
                       name='UnwarpSurfs')

    wf = pe.Workflow(name=name)
    wf.connect([
        (inputnode,     avg_b0, [('in_dwi', 'in_dwi'),
                                 ('in_bval', 'in_bval')]),
        (inputnode,   getparam, [('in_param', 'in_file')]),
        (inputnode,  split_dwi, [('in_dwi', 'in_file')]),
        (inputnode,   corr_msk, [('dwi_mask', 'moving_image')]),
        (inputnode,      swarp, [('in_surf', 'points_file')]),
        (inputnode,        reg, [('t2w_mask', 'fixed_mask'),
                                 ('dwi_mask', 'moving_mask')]),
        (inputnode,      n4_t2, [('in_t2w', 'input_image'),
                                 ('t2w_mask', 'mask_image')]),
        (inputnode,      n4_b0, [('dwi_mask', 'mask_image')]),
        (avg_b0,         n4_b0, [('out_file', 'input_image')]),
        (getparam,         reg, [
            (('enc_dir', _default_params), 'parameters')]),
        (n4_t2,            reg, [('output_image', 'fixed_image')]),
        (n4_b0,            reg, [('output_image', 'moving_image')]),
        (reg,           tfx_b0, [
            (('transform', _get_last), 'transform_file')]),
        (avg_b0,        tfx_b0, [('out_file', 'reference_image')]),
        (tfx_b0,     warp_prop, [('output_file', 'transform_file')]),
        (tfx_b0,          warp, [('output_file', 'transform_file')]),
        (split_dwi,       warp, [('out_files', 'moving_image')]),
        (warpbuff,     mskdwis, [('unwarped', 'in_file')]),
        (closmsk,      mskdwis, [('out_file', 'mask_file')]),
        (mskdwis,        thres, [('out_file', 'in_file')]),
        (thres,      merge_dwi, [('out_file', 'in_files')]),
        (reg,          tfx_msk, [
            (('transform', _get_last), 'transform_file')]),
        (tfx_b0,         swarp, [('output_file', 'transform_file')]),
        (avg_b0,       tfx_msk, [('out_file', 'reference_image')]),
        (tfx_msk,     corr_msk, [('output_file', 'transform_file')]),
        (corr_msk,     closmsk, [('warped_file', 'in_file')]),
        (merge_dwi, outputnode, [('merged_file', 'dwi')]),
        (closmsk,   outputnode, [('out_file', 'dwi_mask')]),
        (warp_prop, outputnode, [('jacdet_map', 'jacobian')]),
        (swarp,     outputnode, [('warped_file', 'out_surf')])
    ])

    if icorr:
        jac_mask = pe.Node(fs.ApplyMask(), name='mask_jac')
        mult = pe.MapNode(MultiImageMaths(op_string='-mul %s'),
                          iterfield=['in_file'], name='ModulateDWIs')
        wf.connect([
            (closmsk,      jac_mask, [('out_file', 'mask_file')]),
            (warp_prop,    jac_mask, [('jacdet_map', 'in_file')]),
            (warp,             mult, [('warped_file', 'in_file')]),
            (jac_mask,         mult, [('out_file', 'operand_files')]),
            (mult,         warpbuff, [('out_file', 'unwarped')])
        ])
    else:
        wf.connect([
            (warp,         warpbuff, [('warped_file', 'unwarped')])
        ])

    return wf
예제 #9
0
##Part 3: Brain extraction:
#Ready the tissues, then merge them together, then fill holes, then apply the mask to the images in output.
def extract_tissue_c123(c1, c2, c3):

    #Extract and Return Values
    first_tissue = c1
    string_list = [c2, c3]
    return (first_tissue, string_list)


pre_merge = Node(Function(input_names=['c1', 'c2', 'c3'],
                          output_names=['first_tissue', 'string_list'],
                          function=extract_tissue_c123),
                 name='Pre_Merge_Tissues')

merge_tissues = Node(MultiImageMaths(), name="Merge_C1_C2_C3")
merge_tissues.inputs.op_string = "-add %s -add %s -thr 0.05 -bin"

fill_mask = Node(UnaryMaths(), name="FillHoles_Mask")
fill_mask.inputs.operation = "fillh"

apply_mask_t1 = Node(ApplyMask(), name="ApplyMask_T1")
apply_mask_flair = Node(ApplyMask(), name="ApplyMask_FLAIR")
apply_mask_swi = Node(ApplyMask(), name="ApplyMask_SWI")
apply_mask_bct1 = Node(ApplyMask(), name="ApplyMask_BiasCorrect_T1")

###SNR
#Tissue 1-3 mask construction and HeadMask construction.
con_tissue_mask_1 = Node(Threshold(), name="Tissue1_Mask")
con_tissue_mask_1.inputs.thresh = 0.1
con_tissue_mask_1.inputs.args = "-bin"