Esempio n. 1
0
def create_connectome(name='connectome'):

    wf = pe.Workflow(name=name)

    inputspec = pe.Node(
        util.IdentityInterface(fields=[
            'time_series',
            'method'
        ]),
        name='inputspec'
    )

    outputspec = pe.Node(
        util.IdentityInterface(fields=[
            'connectome',
        ]),
        name='outputspec'
    )

    node = pe.Node(Function(input_names=['time_series', 'method'],
                            output_names=['connectome'],
                            function=compute_correlation,
                            as_module=True),
                   name='connectome')

    wf.connect([
        (inputspec, node, [('time_series', 'time_series')]),
        (inputspec, node, [('method', 'method')]),
        (node, outputspec, [('connectome', 'connectome')]),
    ])

    return wf
    
Esempio n. 2
0
def create_qc_skullstrip(wf_name='qc_skullstrip'):

    wf = pe.Workflow(name=wf_name)

    input_node = pe.Node(util.IdentityInterface(
        fields=['anatomical_brain', 'anatomical_reorient']),
                         name='inputspec')

    output_node = pe.Node(
        util.IdentityInterface(fields=['axial_image', 'sagittal_image']),
        name='outputspec')

    skull_edge = pe.Node(Function(input_names=['in_file'],
                                  output_names=['out_file'],
                                  function=afni_Edge3,
                                  as_module=True),
                         name='skull_edge')

    montage_skull = create_montage('montage_skull', 'red', 'skull_vis')

    wf.connect(input_node, 'anatomical_reorient', skull_edge, 'in_file')
    wf.connect(input_node, 'anatomical_brain', montage_skull,
               'inputspec.underlay')
    wf.connect(skull_edge, 'out_file', montage_skull, 'inputspec.overlay')

    wf.connect(montage_skull, 'outputspec.axial_png', output_node,
               'axial_image')
    wf.connect(montage_skull, 'outputspec.sagittal_png', output_node,
               'sagittal_image')

    return wf
Esempio n. 3
0
def create_qc_motion(wf_name='qc_motion'):

    wf = pe.Workflow(name=wf_name)

    input_node = pe.Node(util.IdentityInterface(fields=['motion_parameters']),
                         name='inputspec')

    output_node = pe.Node(util.IdentityInterface(
        fields=['motion_translation_plot', 'motion_rotation_plot']),
                          name='outputspec')

    mov_plot = pe.Node(Function(
        input_names=['motion_parameters'],
        output_names=['translation_plot', 'rotation_plot'],
        function=gen_motion_plt,
        as_module=True),
                       name='motion_plot')

    wf.connect(input_node, 'motion_parameters', mov_plot, 'motion_parameters')

    wf.connect(mov_plot, 'translation_plot', output_node,
               'motion_translation_plot')
    wf.connect(mov_plot, 'rotation_plot', output_node, 'motion_rotation_plot')

    return wf
Esempio n. 4
0
def test_function_str():

    f = pe.Node(Function(input_names=['scan', 'rest_dict', 'resource'],
                         output_names=['file_path'],
                         function=get_rest),
                name='get_rest')

    f.inputs.set(resource=resource, rest_dict=rest_dict, scan=scan)

    results = f.run()
    assert rest_dict['rest_acq-1_run-1']['scan'] == results.outputs.file_path
Esempio n. 5
0
def test_iterable_selector():

    selector_test = yaml.load(selector)['Regressors']

    nuisance_wf = pe.Workflow(name='iterable_selector')
    nuisance_wf.base_dir = '/tmp/iter_working_dir'

    try:
        import shutil
        shutil.rmtree(nuisance_wf.base_dir)
    except:
        pass

    inputspec = pe.Node(util.IdentityInterface(fields=['selector']),
                        name='inputspec')

    summarize_timeseries_node = pe.Node(Function(
        input_names=['selector'],
        output_names=['residual_file_path', 'regressors_file_path'],
        function=summarize_timeseries,
        as_module=True,
    ),
                                        name='summarize_timeseries')

    outputspec = pe.Node(util.IdentityInterface(
        fields=['residual_file_path', 'regressors_file_path']),
                         name='outputspec')

    nuisance_wf.connect(inputspec, 'selector', summarize_timeseries_node,
                        'selector')
    nuisance_wf.connect(summarize_timeseries_node, 'residual_file_path',
                        outputspec, 'residual_file_path')
    nuisance_wf.connect(summarize_timeseries_node, 'regressors_file_path',
                        outputspec, 'regressors_file_path')

    nuisance_wf.get_node('inputspec').iterables = (('selector', [
        NuisanceRegressor(s) for s in selector_test
    ]))

    nuisance_wf.run()
Esempio n. 6
0
def create_qc_fd(wf_name='qc_fd'):

    wf = pe.Workflow(name=wf_name)

    input_node = pe.Node(
        util.IdentityInterface(fields=['fd', 'excluded_volumes']),
        name='inputspec')

    output_node = pe.Node(util.IdentityInterface(fields=['fd_histogram_plot']),
                          name='outputspec')

    fd_plot = pe.Node(Function(input_names=['arr', 'measure', 'ex_vol'],
                               output_names=['hist_path'],
                               function=gen_plot_png,
                               as_module=True),
                      name='fd_plot')

    fd_plot.inputs.measure = 'FD'

    wf.connect(input_node, 'fd', fd_plot, 'arr')
    wf.connect(input_node, 'excluded_volumes', fd_plot, 'ex_vol')
    wf.connect(fd_plot, 'hist_path', output_node, 'fd_histogram_plot')

    return wf
Esempio n. 7
0
def create_montage_gm_wm_csf(wf_name, png_name):

    wf = pe.Workflow(name=wf_name)

    inputNode = pe.Node(util.IdentityInterface(
        fields=['underlay', 'overlay_csf', 'overlay_wm', 'overlay_gm']),
                        name='inputspec')

    outputNode = pe.Node(util.IdentityInterface(fields=[
        'axial_png', 'sagittal_png', 'resampled_underlay',
        'resampled_overlay_csf', 'resampled_overlay_wm', 'resampled_overlay_gm'
    ]),
                         name='outputspec')

    resample_u = pe.Node(Function(input_names=['file_'],
                                  output_names=['new_fname'],
                                  function=resample_1mm,
                                  as_module=True),
                         name='resample_u')

    resample_o_csf = resample_u.clone('resample_o_csf')
    resample_o_wm = resample_u.clone('resample_o_wm')
    resample_o_gm = resample_u.clone('resample_o_gm')

    wf.connect(inputNode, 'underlay', resample_u, 'file_')
    wf.connect(inputNode, 'overlay_csf', resample_o_csf, 'file_')
    wf.connect(inputNode, 'overlay_gm', resample_o_gm, 'file_')
    wf.connect(inputNode, 'overlay_wm', resample_o_wm, 'file_')

    montage_a = pe.Node(Function(input_names=[
        'overlay_csf', 'overlay_wm', 'overlay_gm', 'underlay', 'png_name'
    ],
                                 output_names=['png_name'],
                                 function=montage_gm_wm_csf_axial,
                                 as_module=True),
                        name='montage_a')

    wf.connect(resample_u, 'new_fname', montage_a, 'underlay')
    wf.connect(resample_o_csf, 'new_fname', montage_a, 'overlay_csf')
    wf.connect(resample_o_gm, 'new_fname', montage_a, 'overlay_gm')
    wf.connect(resample_o_wm, 'new_fname', montage_a, 'overlay_wm')
    montage_a.inputs.png_name = png_name + '_a.png'

    montage_s = pe.Node(Function(input_names=[
        'overlay_csf', 'overlay_wm', 'overlay_gm', 'underlay', 'png_name'
    ],
                                 output_names=['png_name'],
                                 function=montage_gm_wm_csf_sagittal,
                                 as_module=True),
                        name='montage_s')
    montage_s.inputs.png_name = png_name + '_s.png'

    wf.connect(resample_u, 'new_fname', montage_s, 'underlay')
    wf.connect(resample_o_csf, 'new_fname', montage_s, 'overlay_csf')
    wf.connect(resample_o_gm, 'new_fname', montage_s, 'overlay_gm')
    wf.connect(resample_o_wm, 'new_fname', montage_s, 'overlay_wm')

    wf.connect(resample_u, 'new_fname', outputNode, 'resampled_underlay')
    wf.connect(resample_o_csf, 'new_fname', outputNode,
               'resampled_overlay_csf')
    wf.connect(resample_o_wm, 'new_fname', outputNode, 'resampled_overlay_wm')
    wf.connect(resample_o_gm, 'new_fname', outputNode, 'resampled_overlay_gm')
    wf.connect(montage_a, 'png_name', outputNode, 'axial_png')
    wf.connect(montage_s, 'png_name', outputNode, 'sagittal_png')

    return wf
Esempio n. 8
0
def create_cwas(name='cwas', working_dir=None, crash_dir=None):
    """
    Connectome Wide Association Studies
    
    This workflow performs CWAS on a group of subjects.
    
    Parameters
    ----------
    name : string, optional
        Name of the workflow.
        
    Returns
    -------
    cwas : nipype.pipeline.engine.Workflow
        CWAS workflow.
        
    Notes
    -----
    
    Workflow Inputs::
    
        inputspec.subjects : dict (subject id: nifti files)
            4-D timeseries of a group of subjects normalized to MNI space
        inputspec.roi : string (nifti file)
            Mask of region(s) of interest
        inputspec.regressor : list (float)
            Corresponding list of the regressor variable of shape (`N`) or (`N`,`1`), `N` subjects
        inputspec.cols : list (int)
            todo
        inputspec.f_samples : int
            Number of permutation samples to draw from the pseudo F distribution
        inputspec.parallel_nodes : integer
            Number of nodes to create and potentially parallelize over
        
    Workflow Outputs::

        outputspec.F_map : string (nifti file)
            Pseudo F values of CWAS
        outputspec.p_map : string (nifti file)
            Significance p values calculated from permutation tests
            
    CWAS Procedure:
    
    1. Calculate spatial correlation of a voxel
    2. Correlate spatial z-score maps for every subject pair
    3. Convert matrix to distance matrix, `1-r`
    4. Calculate MDMR statistics for the voxel
    5. Determine significance of MDMR statistics with permutation tests
    
    Workflow Graph:
    
    .. image:: ../images/cwas.dot.png
        :width: 500
        
    Detailed Workflow Graph:
    
    .. image:: ../images/cwas_detailed.dot.png
        :width: 500
    
    References
    ----------
    .. [1] Shehzad Z, Kelly C, Reiss PT, Emerson JW, McMahon K, Copland DA, Castellanos FX, Milham MP. An Analytic Framework for Connectome-Wide Association Studies. Under Review.
    
    """

    if not working_dir:
        working_dir = os.path.join(os.getcwd(), 'MDMR_work_dir')
    if not crash_dir:
        crash_dir = os.path.join(os.getcwd(), 'MDMR_crash_dir')

    workflow = pe.Workflow(name=name)
    workflow.base_dir = working_dir
    workflow.config['execution'] = {
        'hash_method': 'timestamp',
        'crashdump_dir': os.path.abspath(crash_dir)
    }

    inputspec = pe.Node(util.IdentityInterface(fields=[
        'roi', 'subjects', 'regressor', 'participant_column', 'columns',
        'permutations', 'parallel_nodes'
    ]),
                        name='inputspec')

    outputspec = pe.Node(
        util.IdentityInterface(fields=['F_map', 'p_map', 'neglog_p_map']),
        name='outputspec')

    ccb = pe.Node(Function(input_names=['mask_file', 'batches'],
                           output_names='batch_list',
                           function=create_cwas_batches,
                           as_module=True),
                  name='cwas_batches')

    ncwas = pe.MapNode(Function(input_names=[
        'subjects', 'mask_file', 'regressor_file', 'participant_column',
        'columns_string', 'permutations', 'voxel_range'
    ],
                                output_names=['result_batch'],
                                function=nifti_cwas,
                                as_module=True),
                       name='cwas_batch',
                       iterfield='voxel_range')

    jmask = pe.Node(Function(input_names=['subjects', 'mask_file'],
                             output_names=['joint_mask'],
                             function=joint_mask,
                             as_module=True),
                    name='joint_mask')

    mcwasb = pe.Node(Function(
        input_names=['cwas_batches', 'mask_file'],
        output_names=['F_file', 'p_file', 'neglog_p_file'],
        function=merge_cwas_batches,
        as_module=True),
                     name='cwas_volumes')

    #Compute the joint mask
    workflow.connect(inputspec, 'subjects', jmask, 'subjects')
    workflow.connect(inputspec, 'roi', jmask, 'mask_file')

    #Create batches based on the joint mask
    workflow.connect(jmask, 'joint_mask', ccb, 'mask_file')
    workflow.connect(inputspec, 'parallel_nodes', ccb, 'batches')

    #Compute CWAS over batches of voxels
    workflow.connect(jmask, 'joint_mask', ncwas, 'mask_file')
    workflow.connect(inputspec, 'subjects', ncwas, 'subjects')
    workflow.connect(inputspec, 'regressor', ncwas, 'regressor_file')
    workflow.connect(inputspec, 'permutations', ncwas, 'permutations')
    workflow.connect(inputspec, 'participant_column', ncwas,
                     'participant_column')
    workflow.connect(inputspec, 'columns', ncwas, 'columns_string')

    workflow.connect(ccb, 'batch_list', ncwas, 'voxel_range')

    #Merge the computed CWAS data
    workflow.connect(ncwas, 'result_batch', mcwasb, 'cwas_batches')
    workflow.connect(jmask, 'joint_mask', mcwasb, 'mask_file')

    workflow.connect(mcwasb, 'F_file', outputspec, 'F_map')
    workflow.connect(mcwasb, 'p_file', outputspec, 'p_map')
    workflow.connect(mcwasb, 'neglog_p_file', outputspec, 'neglog_p_map')

    return workflow
Esempio n. 9
0
def create_montage(wf_name, cbar_name, png_name):

    wf = pe.Workflow(name=wf_name)

    inputnode = pe.Node(util.IdentityInterface(fields=['underlay', 'overlay']),
                        name='inputspec')

    outputnode = pe.Node(util.IdentityInterface(fields=[
        'axial_png', 'sagittal_png', 'resampled_underlay', 'resampled_overlay'
    ]),
                         name='outputspec')

    # node for resampling create_montage images to 1mm for QC pages
    resample_u = pe.Node(Function(input_names=['file_'],
                                  output_names=['new_fname'],
                                  function=resample_1mm,
                                  as_module=True),
                         name='resample_u')

    wf.connect(inputnode, 'underlay', resample_u, 'file_')
    wf.connect(resample_u, 'new_fname', outputnode, 'resampled_underlay')

    # same for overlays (resampling to 1mm)
    resample_o = pe.Node(Function(input_names=['file_'],
                                  output_names=['new_fname'],
                                  function=resample_1mm,
                                  as_module=True),
                         name='resample_o')

    wf.connect(inputnode, 'overlay', resample_o, 'file_')
    wf.connect(resample_o, 'new_fname', outputnode, 'resampled_overlay')

    # node for axial montages
    montage_a = pe.MapNode(Function(
        input_names=['overlay', 'underlay', 'png_name', 'cbar_name'],
        output_names=['png_name'],
        function=montage_axial,
        as_module=True),
                           name='montage_a',
                           iterfield=['overlay'])
    montage_a.inputs.cbar_name = cbar_name
    montage_a.inputs.png_name = png_name + '_a.png'

    wf.connect(resample_u, 'new_fname', montage_a, 'underlay')

    wf.connect(resample_o, 'new_fname', montage_a, 'overlay')

    # node for sagittal montages
    montage_s = pe.MapNode(Function(
        input_names=['overlay', 'underlay', 'png_name', 'cbar_name'],
        output_names=['png_name'],
        function=montage_sagittal,
        as_module=True),
                           name='montage_s',
                           iterfield=['overlay'])
    montage_s.inputs.cbar_name = cbar_name
    montage_s.inputs.png_name = png_name + '_s.png'

    wf.connect(resample_u, 'new_fname', montage_s, 'underlay')
    wf.connect(resample_o, 'new_fname', montage_s, 'overlay')

    wf.connect(montage_a, 'png_name', outputnode, 'axial_png')
    wf.connect(montage_s, 'png_name', outputnode, 'sagittal_png')

    return wf
Esempio n. 10
0
def qa_montages(workflow, c, strat, num_strat, qc_montage_id_a,
                qc_montage_id_s, qc_hist_id, measure, idx):

    try:
        overlay, out_file = strat[measure]

        overlay_drop_percent = pe.MapNode(
            Function(input_names=['measure_file', 'percent'],
                     output_names=['modified_measure_file'],
                     function=drop_percent,
                     as_module=True),
            name='dp_%s_%d' % (measure, num_strat),
            iterfield=['measure_file'])

        overlay_drop_percent.inputs.percent = 99.999

        workflow.connect(overlay, out_file, overlay_drop_percent,
                         'measure_file')

        montage = create_montage('montage_%s_%d' % (measure, num_strat),
                                 'cyan_to_yellow', measure)
        montage.inputs.inputspec.underlay = c.template_brain_only_for_func

        workflow.connect(overlay_drop_percent, 'modified_measure_file',
                         montage, 'inputspec.overlay')

        if 'centrality' in measure:
            histogram = pe.MapNode(
                Function(input_names=['measure_file', 'measure'],
                         output_names=['hist_path'],
                         function=gen_histogram,
                         as_module=True),
                name='hist_{0}_{1}'.format(measure, num_strat),
                iterfield=['measure_file'])
        else:
            histogram = pe.Node(Function(
                input_names=['measure_file', 'measure'],
                output_names=['hist_path'],
                function=gen_histogram,
                as_module=True),
                                name='hist_{0}_{1}'.format(measure, num_strat))

        histogram.inputs.measure = measure

        workflow.connect(overlay, out_file, histogram, 'measure_file')

        strat.update_resource_pool({
            'qc___%s_a' % measure: (montage, 'outputspec.axial_png'),
            'qc___%s_s' % measure: (montage, 'outputspec.sagittal_png'),
            'qc___%s_hist' % measure: (histogram, 'hist_path')
        })

        if not idx in qc_montage_id_a:
            qc_montage_id_a[idx] = '%s_a' % measure
            qc_montage_id_s[idx] = '%s_s' % measure
            qc_hist_id[idx] = '%s_hist' % measure

    except Exception as e:
        print "[!] Connection of QA montages workflow for %s " \
                "has failed.\n" % measure
        print "Error: %s" % e
        pass
Esempio n. 11
0
def create_isfc(name='isfc', working_dir=None, crash_dir=None):
    """
    Inter-Subject Functional Correlation
    
    Parameters
    ----------
    name : string, optional
        Name of the workflow.
        
    Returns
    -------
    workflow : nipype.pipeline.engine.Workflow
        ISFC workflow.
        
    Notes
    -----
    
    Workflow Inputs::
    
        
    Workflow Outputs::

    
    References
    ----------
    .. [1] Simony, E., Honey, C. J., Chen, J., Lositsky, O., Yeshurun,
           Y., Wiesel, A., & Hasson, U. (2016). Dynamic reconfiguration of the
           default mode network during narrative comprehension.
           Nature Communications, 7(May 2015), 1-13.
           https://doi.org/10.1038/ncomms12141
    
    """

    if not working_dir:
        working_dir = os.path.join(os.getcwd(), 'ISC_work_dir')
    if not crash_dir:
        crash_dir = os.path.join(os.getcwd(), 'ISC_crash_dir')

    wf = pe.Workflow(name=name)
    wf.base_dir = working_dir
    wf.config['execution'] = {'hash_method': 'timestamp',
                              'crashdump_dir': os.path.abspath(crash_dir)}

    inputspec = pe.Node(
        util.IdentityInterface(fields=[
            'subjects',
            'permutations',
            'collapse_subj',
            'std',
            'two_sided',
            'random_state'
        ]),
        name='inputspec'
    )

    data_node = pe.Node(Function(input_names=['subjects'],
                                 output_names=['subject_ids', 'D', 'voxel_masker'],
                                 function=load_data,
                                 as_module=True),
                        name='data')

    save_node = pe.Node(Function(input_names=['subject_ids', 'ISFC', 'p', 'collapse_subj'],
                                 output_names=['subject_ids', 'correlations', 'significance'],
                                 function=save_data_isfc,
                                 as_module=True),
                        name='save')

    outputspec = pe.Node(
        util.IdentityInterface(fields=['correlations', 'significance']),
        name='outputspec'
    )

    isfc_node = pe.Node(Function(input_names=['D',
                                             'std',
                                             'collapse_subj'],
                                output_names=['ISFC', 'masked'],
                                function=node_isfc,
                                as_module=True),
                       name='ISFC')

    permutations_node = pe.MapNode(Function(input_names=['permutation',
                                                         'D',
                                                         'masked',
                                                         'collapse_subj',
                                                         'random_state'],
                                            output_names=['permutation',
                                                          'min_null',
                                                          'max_null'],
                                            function=node_isfc_permutation,
                                            as_module=True),
                                   name='ISFC_permutation', iterfield='permutation')

    significance_node = pe.Node(Function(input_names=['ISFC',
                                                      'min_null',
                                                      'max_null',
                                                      'two_sided'],
                                         output_names=['p'],
                                         function=node_isfc_significance,
                                         as_module=True),
                                name='ISFC_p')

    wf.connect([
        (inputspec, data_node, [('subjects', 'subjects')]),
        (inputspec, isfc_node, [('collapse_subj', 'collapse_subj')]),
        (inputspec, isfc_node, [('std', 'std')]),
        (data_node, isfc_node, [('D', 'D')]),

        (isfc_node, significance_node, [('ISFC', 'ISFC')]),

        (data_node, permutations_node, [('D', 'D')]),
        (isfc_node, permutations_node, [('masked', 'masked')]),
        (inputspec, permutations_node, [('collapse_subj', 'collapse_subj')]),
        (inputspec, permutations_node, [(('permutations', _permutations), 'permutation')]),
        (inputspec, permutations_node, [('random_state', 'random_state')]),

        (permutations_node, significance_node, [('min_null', 'min_null')]),
        (permutations_node, significance_node, [('max_null', 'max_null')]),
        (inputspec, significance_node, [('two_sided', 'two_sided')]),

        (data_node, save_node, [('subject_ids', 'subject_ids')]),
        (inputspec, save_node, [('collapse_subj', 'collapse_subj')]),
        (isfc_node, save_node, [('ISFC', 'ISFC')]),
        (significance_node, save_node, [('p', 'p')]),

        (save_node, outputspec, [('subject_ids', 'subject_ids')]),
        (save_node, outputspec, [('correlations', 'correlations')]),
        (save_node, outputspec, [('significance', 'significance')]),
    ])

    return wf
Esempio n. 12
0
File: qc.py Progetto: tbweng/C-PAC
def create_qc_carpet(wf_name='qc_carpet', output_image='qc_carpet'):

    wf = pe.Workflow(name=wf_name)

    input_node = pe.Node(util.IdentityInterface(fields=[
        'functional_to_standard', 'mean_functional_to_standard',
        'anatomical_gm_mask', 'anatomical_wm_mask', 'anatomical_csf_mask'
    ]),
                         name='inputspec')

    output_node = pe.Node(util.IdentityInterface(fields=['carpet_plot']),
                          name='outputspec')

    gm_resample = pe.Node(afni.Resample(), name='gm_resample')
    gm_resample.inputs.outputtype = 'NIFTI'
    wf.connect(input_node, 'anatomical_gm_mask', gm_resample, 'in_file')
    wf.connect(input_node, 'mean_functional_to_standard', gm_resample,
               'master')

    gm_mask = pe.Node(afni.Calc(), name="gm_mask")
    gm_mask.inputs.expr = 'astep(a, 0.5)'
    gm_mask.inputs.outputtype = 'NIFTI'
    wf.connect(gm_resample, 'out_file', gm_mask, 'in_file_a')

    wm_resample = pe.Node(afni.Resample(), name='wm_resample')
    wm_resample.inputs.outputtype = 'NIFTI'
    wf.connect(input_node, 'anatomical_wm_mask', wm_resample, 'in_file')
    wf.connect(input_node, 'mean_functional_to_standard', wm_resample,
               'master')

    wm_mask = pe.Node(afni.Calc(), name="wm_mask")
    wm_mask.inputs.expr = 'astep(a, 0.5)'
    wm_mask.inputs.outputtype = 'NIFTI'
    wf.connect(wm_resample, 'out_file', wm_mask, 'in_file_a')

    csf_resample = pe.Node(afni.Resample(), name='csf_resample')
    csf_resample.inputs.outputtype = 'NIFTI'
    wf.connect(input_node, 'anatomical_csf_mask', csf_resample, 'in_file')
    wf.connect(input_node, 'mean_functional_to_standard', csf_resample,
               'master')

    csf_mask = pe.Node(afni.Calc(), name="csf_mask")
    csf_mask.inputs.expr = 'astep(a, 0.5)'
    csf_mask.inputs.outputtype = 'NIFTI'
    wf.connect(csf_resample, 'out_file', csf_mask, 'in_file_a')

    carpet_plot = pe.Node(Function(input_names=[
        'gm_mask', 'wm_mask', 'csf_mask', 'functional_to_standard', 'output'
    ],
                                   output_names=['carpet_plot'],
                                   function=gen_carpet_plt,
                                   as_module=True),
                          name='carpet_plot')

    carpet_plot.inputs.output = output_image
    wf.connect(gm_mask, 'out_file', carpet_plot, 'gm_mask')
    wf.connect(wm_mask, 'out_file', carpet_plot, 'wm_mask')
    wf.connect(csf_mask, 'out_file', carpet_plot, 'csf_mask')
    wf.connect(input_node, 'functional_to_standard', carpet_plot,
               'functional_to_standard')
    wf.connect(carpet_plot, 'carpet_plot', output_node, 'carpet_plot')

    return wf
Esempio n. 13
0
def generate_summarize_tissue_mask(nuisance_wf,
                                   pipeline_resource_pool,
                                   regressor_descriptor,
                                   regressor_selector,
                                   use_ants=True):
    """
    Add tissue mask generation into pipeline according to the selector.

    :param nuisance_wf: Nuisance regressor workflow.
    :param pipeline_resource_pool: dictionary of available resources.
    :param regressor_descriptor: dictionary of steps to build, including keys:
        'tissue', 'resolution', 'erosion'
    :param regressor_selector: dictionary with the original selector

    :return: the full path of the 3D nifti file containing the mask created by
        this operation.
    """

    steps = [
        key
        for key in ['tissue', 'resolution', 'erosion']
        if key in regressor_descriptor
    ]

    full_mask_key = "_".join(
        regressor_descriptor[s]
        for s in steps
    )

    for step_i, step in enumerate(steps):

        mask_key = "_".join(
            regressor_descriptor[s]
            for s in steps[:step_i+1]
        )

        if mask_key in pipeline_resource_pool:
            continue

        node_mask_key = re.sub(r"[^\w]", "_", mask_key)

        prev_mask_key = "_".join(
            regressor_descriptor[s]
            for s in steps[:step_i]
        )

        if step == 'tissue':

            if mask_key.startswith('FunctionalVariance'):

                create_variance_mask_node = pe.Node(
                    Function(
                        input_names=[
                            'functional_file_path',
                            'mask_file_path',
                            'threshold',
                            'by_slice'
                        ],
                        output_names=['mask_file_path'],
                        function=create_temporal_variance_mask,
                        as_module=True,
                    ),
                    name='create_temporal_variance_mask_{}'
                         .format(node_mask_key)
                )

                nuisance_wf.connect(*(
                    pipeline_resource_pool['Functional'] +
                    (create_variance_mask_node, 'functional_file_path')
                ))

                nuisance_wf.connect(*(
                    pipeline_resource_pool['GlobalSignal'] +
                    (create_variance_mask_node, 'mask_file_path')
                ))

                create_variance_mask_node.inputs.threshold = \
                    regressor_selector['threshold']

                create_variance_mask_node.inputs.by_slice = \
                    regressor_selector['by_slice']

                pipeline_resource_pool[mask_key] = \
                    (create_variance_mask_node, 'mask_file_path')

        elif step == 'resolution':

            mask_to_epi = pe.Node(interface=fsl.FLIRT(),
                                  name='{}_flirt'
                                       .format(node_mask_key))

            mask_to_epi.inputs.interp = 'nearestneighbour'

            if regressor_selector['extraction_resolution'] == "Functional":
                nuisance_wf.connect(*(
                    pipeline_resource_pool['Functional'] +
                    (mask_to_epi, 'reference')
                ))
            else:

                resolution = regressor_selector['extraction_resolution']
                mask_to_epi.inputs.apply_isoxfm = \
                    resolution

                nuisance_wf.connect(*(
                    pipeline_resource_pool['Anatomical_{}mm'
                                           .format(resolution)] +
                    (mask_to_epi, 'reference')
                ))

            nuisance_wf.connect(*(
                pipeline_resource_pool[prev_mask_key] +
                (mask_to_epi, 'in_file')
            ))

            pipeline_resource_pool[mask_key] = \
                (mask_to_epi, 'out_file')


        elif step == 'erosion':

            erode_mask_node = pe.Node(interface=afni.Calc(),
                                      name='{}'.format(node_mask_key))
            erode_mask_node.inputs.args = "-b a+i -c a-i -d a+j " + \
                                          "-e a-j -f a+k -g a-k"
            erode_mask_node.inputs.expr = 'a*(1-amongst(0,b,c,d,e,f,g))'
            erode_mask_node.inputs.outputtype = 'NIFTI_GZ'
            erode_mask_node.inputs.out_file = 'erode_mask_node.nii.gz'

            nuisance_wf.connect(*(
                pipeline_resource_pool[prev_mask_key] +
                (erode_mask_node, 'in_file_a')
            ))

            pipeline_resource_pool[mask_key] = \
                (erode_mask_node, 'out_file')

    # Mask CSF with Ventricles
    if full_mask_key.startswith('CerebrospinalFluid'):

        if '{}_Unmasked'.format(full_mask_key) not in pipeline_resource_pool:

            # reduce CSF mask to the lateral ventricles
            mask_csf_with_lat_ven = pe.Node(interface=afni.Calc(), name='{}_Ventricles'.format(full_mask_key))
            mask_csf_with_lat_ven.inputs.expr = 'a*b'
            mask_csf_with_lat_ven.inputs.outputtype = 'NIFTI_GZ'
            mask_csf_with_lat_ven.inputs.out_file = 'csf_lat_ven_mask.nii.gz'

            ventricles_key = 'VentriclesToAnat'
            if 'resolution' in regressor_descriptor:
                ventricles_key += '_{}'.format(regressor_descriptor['resolution'])

            if ventricles_key not in pipeline_resource_pool:

                transforms = pipeline_resource_pool['Transformations']
                
                if use_ants is True:

                    # perform the transform using ANTS
                    collect_linear_transforms = pe.Node(util.Merge(3), name='{}_ants_transforms'.format(ventricles_key))

                    nuisance_wf.connect(*(transforms['anat_to_mni_initial_xfm'] + (collect_linear_transforms, 'in1')))
                    nuisance_wf.connect(*(transforms['anat_to_mni_rigid_xfm'] + (collect_linear_transforms, 'in2')))
                    nuisance_wf.connect(*(transforms['anat_to_mni_affine_xfm'] + (collect_linear_transforms, 'in3')))

                    lat_ven_mni_to_anat = pe.Node(interface=ants.ApplyTransforms(), name='{}_ants'.format(ventricles_key))
                    lat_ven_mni_to_anat.inputs.invert_transform_flags = [True, True, True]
                    lat_ven_mni_to_anat.inputs.interpolation = 'NearestNeighbor'
                    lat_ven_mni_to_anat.inputs.dimension = 3

                    nuisance_wf.connect(collect_linear_transforms, 'out', lat_ven_mni_to_anat, 'transforms')

                    nuisance_wf.connect(*(pipeline_resource_pool['Ventricles'] + (lat_ven_mni_to_anat, 'input_image')))
                    nuisance_wf.connect(*(pipeline_resource_pool[full_mask_key] + (lat_ven_mni_to_anat, 'reference_image')))

                    pipeline_resource_pool[ventricles_key] = (lat_ven_mni_to_anat, 'output_image')


                else:

                    # perform the transform using FLIRT
                    lat_ven_mni_to_anat = pe.Node(interface=fsl.FLIRT(), name='{}_flirt'.format(ventricles_key))
                    lat_ven_mni_to_anat.inputs.interp = 'nearestneighbour'

                    resolution = regressor_selector['extraction_resolution']
                    lat_ven_mni_to_anat.inputs.apply_isoxfm = \
                        resolution

                    nuisance_wf.connect(*(transforms['mni_to_anat_linear_xfm'] + (lat_ven_mni_to_anat, 'in_matrix_file')))
                    nuisance_wf.connect(*(pipeline_resource_pool['Ventricles'] + (lat_ven_mni_to_anat, 'in_file')))
                    nuisance_wf.connect(*(pipeline_resource_pool[full_mask_key] + (lat_ven_mni_to_anat, 'reference')))

                    pipeline_resource_pool[ventricles_key] = (lat_ven_mni_to_anat, 'out_file')


            nuisance_wf.connect(*(pipeline_resource_pool[ventricles_key] + (mask_csf_with_lat_ven, 'in_file_a')))
            nuisance_wf.connect(*(pipeline_resource_pool[full_mask_key] + (mask_csf_with_lat_ven, 'in_file_b')))

            pipeline_resource_pool['{}_Unmasked'.format(full_mask_key)] = pipeline_resource_pool[full_mask_key]
            pipeline_resource_pool[full_mask_key] = (mask_csf_with_lat_ven, 'out_file')

    return pipeline_resource_pool, full_mask_key
Esempio n. 14
0
def create_randomise(name='randomise', working_dir=None, crash_dir=None):
    """
    Parameters
    ----------
        
    Returns
    -------
    workflow : nipype.pipeline.engine.Workflow
        Randomise workflow.
        
    Notes
    -----
    
    Workflow Inputs::
    
        
    Workflow Outputs::

    
    References
    ----------
    
    """

    if not working_dir:
        working_dir = os.path.join(os.getcwd(), 'Randomise_work_dir')
    if not crash_dir:
        crash_dir = os.path.join(os.getcwd(), 'Randomise_crash_dir')

    wf = pe.Workflow(name=name)
    wf.base_dir = working_dir
    wf.config['execution'] = {
        'hash_method': 'timestamp',
        'crashdump_dir': os.path.abspath(crash_dir)
    }

    inputspec = pe.Node(util.IdentityInterface(fields=[
        'subjects_list', 'pipeline_output_folder', 'permutations',
        'mask_boolean', 'demean', 'c_thresh'
    ]),
                        name='inputspec')

    outputspec = pe.Node(util.IdentityInterface(fields=[
        'tstat_files', 't_corrected_p_files', 'index_file', 'threshold_file',
        'localmax_txt_file', 'localmax_vol_file', 'max_file', 'mean_file',
        'pval_file', 'size_file'
    ]),
                         name='outputspec')

    #merge = pe.Node(interface=fsl.Merge(), name='fsl_merge')
    #merge.inputs.dimension = 't'
    #merge.inputs.merged_file = "randomise_merged.nii.gz"

    #wf.connect(inputspec, 'subjects', merge, 'in_files')

    #mask = pe.Node(interface=fsl.maths.MathsCommand(), name='fsl_maths')
    #mask.inputs.args = '-abs -Tmin -bin'
    #mask.inputs.out_file = "randomise_mask.nii.gz"
    #wf.connect(inputspec, 'subjects', mask, 'in_file')

    randomise = pe.Node(interface=fsl.Randomise(), name='randomise')
    randomise.inputs.base_name = "randomise"
    randomise.inputs.demean = True
    randomise.inputs.tfce = True
    wf.connect([(inputspec, randomise, [
        ('subjects', 'in_file'),
        ('design_matrix_file', 'design_mat'),
        ('constrast_file', 'tcon'),
        ('permutations', 'num_perm'),
    ])])
    wf.connect(randomise, 'tstat_files', outputspec, 'tstat_files')
    wf.connect(randomise, 't_corrected_p_files', outputspec,
               't_corrected_p_files')
    #------------- issue here arises while using tfce. By not using tfce, you don't get t_corrected_p files. R V in a conundrum? --------------------#

    select_tcorrp_files = pe.Node(Function(input_names=['input_list'],
                                           output_names=['out_file'],
                                           function=select),
                                  name='select_t_corrp')

    wf.connect(randomise, 't_corrected_p_files', select_tcorrp_files,
               'input_list')
    wf.connect(select_tcorrp_files, 'out_file', outputspec,
               'out_tcorr_corrected')

    select_tstat_files = pe.Node(Function(input_names=['input_list'],
                                          output_names=['out_file'],
                                          function=select),
                                 name='select_t_stat')

    wf.connect(randomise, 'tstat_files', select_tstat_files, 'input_list')
    wf.connect(select_tstat_files, 'out_file', outputspec,
               'out_tstat_corrected')

    thresh = pe.Node(interface=fsl.Threshold(), name='fsl_threshold_contrast')
    thresh.inputs.thresh = 0.95
    thresh.inputs.out_file = 'rando_pipe_thresh_tstat.nii.gz'
    wf.connect(select_tstat_files, 'out_file', thresh, 'in_file')
    wf.connect(thresh, 'out_file', outputspec,
               'rando_pipe_thresh_tstat.nii.gz')

    thresh_bin = pe.Node(interface=fsl.UnaryMaths(),
                         name='fsl_threshold_bin_contrast')
    thresh_bin.inputs.operation = 'bin'
    wf.connect(thresh, 'out_file', thresh_bin, 'in_file')
    wf.connect(thresh_bin, 'out_file', outputspec, 'thresh_bin_out')

    apply_mask = pe.Node(interface=fsl.ApplyMask(),
                         name='fsl_applymask_contrast')
    wf.connect(select_tstat_files, 'out_file', apply_mask, 'in_file')
    wf.connect(thresh_bin, 'out_file', apply_mask, 'mask_file')

    cluster = pe.Node(interface=fsl.Cluster(), name='cluster_contrast')
    cluster.inputs.threshold = 0.0001
    cluster.inputs.out_index_file = "index_file"
    cluster.inputs.out_localmax_txt_file = "lmax_contrast.txt"
    cluster.inputs.out_size_file = "cluster_size_contrast"
    cluster.inputs.out_threshold_file = True
    cluster.inputs.out_max_file = True
    cluster.inputs.out_mean_file = True
    cluster.inputs.out_pval_file = True
    cluster.inputs.out_size_file = True

    wf.connect(apply_mask, 'out_file', cluster, 'in_file')

    wf.connect(cluster, 'index_file', outputspec, 'index_file')
    wf.connect(cluster, 'threshold_file', outputspec, 'threshold_file')
    wf.connect(cluster, 'localmax_txt_file', outputspec, 'localmax_txt_file')
    wf.connect(cluster, 'localmax_vol_file', outputspec, 'localmax_vol_file')
    wf.connect(cluster, 'max_file', outputspec, 'max_file')
    wf.connect(cluster, 'mean_file', outputspec, 'meal_file')
    wf.connect(cluster, 'pval_file', outputspec, 'pval_file')
    wf.connect(cluster, 'size_file', outputspec, 'size_file')

    return wf
Esempio n. 15
0
def motion_power_statistics(name='motion_stats'):
    """
    The main purpose of this workflow is to get various statistical measures
     from the movement/motion parameters obtained in functional preprocessing.

    Parameters
    ----------
    :param str name: Name of the workflow, defaults to 'motion_stats'
    :return: Nuisance workflow.
    :rtype: nipype.pipeline.engine.Workflow

    Notes
    -----

    Workflow Inputs::

        inputspec.subject_id : string
            Subject name or id

        inputspec.scan_id : string
            Functional Scan id or name

        inputspec.motion_correct : string (func/rest file or a list of func/rest nifti file)
            Path to motion corrected functional data

        inputspec.mask : string (nifti file)
            Path to field containing brain-only mask for the functional data

        inputspec.max_displacement : string (Mat file)
            maximum displacement (in mm) vector for brain voxels in each volume.
            This file is obtained in functional preprocessing step

        inputspec.movement_parameters : string (Mat file)
            1D file containing six movement/motion parameters(3 Translation, 3 Rotations)
            in different columns (roll pitch yaw dS  dL  dP), obtained in functional preprocessing step


    Workflow Outputs::

        outputspec.FDP_1D : 1D file
            mean Framewise Displacement (FD)

        outputspec.power_params : txt file
            Text file containing various power parameters for scrubbing

        outputspec.motion_params : txt file
            Text file containing various movement parameters


    Order of commands:

    - Calculate Framewise Displacement FD as per power et al., 2012

      Differentiating head realignment parameters across frames yields a six dimensional timeseries that represents instantaneous head motion.
      Rotational displacements are converted from degrees to millimeters by calculating displacement on the surface of a sphere of radius 50 mm.[R5]

    - Calculate Framewise Displacement FD as per jenkinson et al., 2002

    - Calculate DVARS

      DVARS (D temporal derivative of timecourses, VARS referring to RMS variance over voxels) indexes
      the rate of change of BOLD signal across the entire brain at each frame of data.To calculate
      DVARS, the volumetric timeseries is differentiated (by backwards differences) and RMS signal
      change is calculated over the whole brain.DVARS is thus a measure of how much the intensity
      of a brain image changes in comparison to the previous timepoint (as opposed to the global
      signal, which is the average value of a brain image at a timepoint).[R5]

    - Calculate Power parameters::

        MeanFD : Mean (across time/frames) of the absolute values for Framewise Displacement (FD),
        computed as described in Power et al., Neuroimage, 2012)

        rootMeanSquareFD : Root mean square (RMS; across time/frames) of the absolute values for FD

        rmsFD : Root mean square (RMS; across time/frames) of the absolute values for FD

        FDquartile(top 1/4th FD) : Mean of the top 25% highest FD values

        MeanDVARS : Mean of voxel DVARS

    - Calculate Motion Parameters

      Following motion parameters are calculated::

        Subject
        Scan
        Mean Relative RMS Displacement
        Max Relative RMS Displacement
        Movements > threshold
        Mean Relative Mean Rotation
        Mean Relative Maxdisp
        Max Relative Maxdisp
        Max Abs Maxdisp
        Max Relative Roll
        Max Relative Pitch
        Max Relative Yaw
        Max Relative dS-I
        Max Relative dL-R
        Max Relative dP-A
        Mean Relative Roll
        Mean Relative Pitch
        Mean Relative Yaw
        Mean Relative dS-I
        Mean Relative dL-R
        Mean Relative dP-A
        Max Abs Roll
        Max Abs Pitch
        Max Abs Yaw
        Max Abs dS-I
        Max Abs dL-R
        Max Abs dP-A
        Mean Abs Roll
        Mean Abs Pitch
        Mean Abs Yaw
        Mean Abs dS-I
        Mean Abs dL-R
        Mean Abs dP-A

    High Level Workflow Graph:

    .. exec::
        from CPAC.generate_motion_statistics import motion_power_statistics
        wf = motion_power_statistics()
        wf.write_graph(
            graph2use='orig',
            dotfilename='./images/parameters.dot'
        )

    .. image:: ../images/parameters.png
       :width: 1000

    Detailed Workflow Graph:

    .. image:: ../images/parameters_detailed.png
       :width: 1000

    Examples
    --------
    >>> import generate_motion_statistics
    >>> wf = generate_motion_statistics.motion_power_statistics("generate_statistics")
    >>> wf.inputs.inputspec.movement_parameters = 'CPAC_outupts/sub01/func/movement_parameteres/rest_mc.1D'
    >>> wf.inputs.inputspec.max_displacement = 'CPAC_outputs/sub01/func/max_dispalcement/max_disp.1D'
    >>> wf.inputs.inputspec.motion_correct = 'CPAC_outputs/sub01/func/motion_correct/rest_mc.nii.gz'
    >>> wf.inputs.inputspec.mask = 'CPAC_outputs/sub01/func/func_mask/rest_mask.nii.gz'
    >>> wf.inputs.inputspec.transformations = 'CPAC_outputs/sub01/func/coordinate_transformation/rest_mc.aff12.1D'
    >>> wf.inputs.inputspec.subject_id = 'sub01'
    >>> wf.inputs.inputspec.scan_id = 'rest_1'
    >>> wf.base_dir = './working_dir'
    >>> wf.run()

    References
    ----------

    .. [1] Power, J. D., Barnes, K. A., Snyder, A. Z., Schlaggar, B. L., & Petersen, S. E. (2012). Spurious
           but systematic correlations in functional connectivity MRI networks arise from subject motion. NeuroImage, 59(3),
           2142-2154. doi:10.1016/j.neuroimage.2011.10.018

    .. [2] Power, J. D., Barnes, K. A., Snyder, A. Z., Schlaggar, B. L., & Petersen, S. E. (2012). Steps
           toward optimizing motion artifact removal in functional connectivity MRI; a reply to Carp.
           NeuroImage. doi:10.1016/j.neuroimage.2012.03.017

    .. [3] Jenkinson, M., Bannister, P., Brady, M., Smith, S., 2002. Improved optimization for the robust
           and accurate linear registration and motion correction of brain images. Neuroimage 17, 825-841.

    """

    wf = pe.Workflow(name=name)
    input_node = pe.Node(util.IdentityInterface(fields=[
        'subject_id', 'scan_id', 'movement_parameters', 'max_displacement',
        'motion_correct', 'mask', 'transformations'
    ]),
                         name='inputspec')

    output_node = pe.Node(util.IdentityInterface(fields=[
        'FDP_1D', 'FDJ_1D', 'DVARS_1D', 'power_params', 'motion_params'
    ]),
                          name='outputspec')

    cal_DVARS = pe.Node(Function(input_names=['rest', 'mask'],
                                 output_names=['out_file'],
                                 function=calculate_DVARS,
                                 as_module=True),
                        name='cal_DVARS')

    # calculate mean DVARS
    wf.connect(input_node, 'motion_correct', cal_DVARS, 'rest')
    wf.connect(input_node, 'mask', cal_DVARS, 'mask')

    wf.connect(cal_DVARS, 'out_file', output_node, 'DVARS_1D')

    # Calculating mean Framewise Displacement as per power et al., 2012
    calculate_FDP = pe.Node(Function(input_names=['in_file'],
                                     output_names=['out_file'],
                                     function=calculate_FD_P,
                                     as_module=True),
                            name='calculate_FD')

    wf.connect(input_node, 'movement_parameters', calculate_FDP, 'in_file')
    wf.connect(calculate_FDP, 'out_file', output_node, 'FDP_1D')

    # Calculating mean Framewise Displacement as per jenkinson et al., 2002
    calculate_FDJ = pe.Node(Function(input_names=['in_file'],
                                     output_names=['out_file'],
                                     function=calculate_FD_J,
                                     as_module=True),
                            name='calculate_FDJ')

    wf.connect(input_node, 'transformations', calculate_FDJ, 'in_file')
    wf.connect(calculate_FDJ, 'out_file', output_node, 'FDJ_1D')

    calc_motion_parameters = pe.Node(Function(input_names=[
        "subject_id", "scan_id", "movement_parameters", "max_displacement"
    ],
                                              output_names=['out_file'],
                                              function=gen_motion_parameters,
                                              as_module=True),
                                     name='calc_motion_parameters')

    wf.connect(input_node, 'subject_id', calc_motion_parameters, 'subject_id')
    wf.connect(input_node, 'scan_id', calc_motion_parameters, 'scan_id')
    wf.connect(input_node, 'movement_parameters', calc_motion_parameters,
               'movement_parameters')
    wf.connect(input_node, 'max_displacement', calc_motion_parameters,
               'max_displacement')

    wf.connect(calc_motion_parameters, 'out_file', output_node,
               'motion_params')

    calc_power_parameters = pe.Node(Function(
        input_names=["subject_id", "scan_id", "fdp", "fdj", "dvars"],
        output_names=['out_file'],
        function=gen_power_parameters,
        as_module=True),
                                    name='calc_power_parameters')

    wf.connect(input_node, 'subject_id', calc_power_parameters, 'subject_id')
    wf.connect(input_node, 'scan_id', calc_power_parameters, 'scan_id')
    wf.connect(cal_DVARS, 'out_file', calc_power_parameters, 'dvars')
    wf.connect(calculate_FDP, 'out_file', calc_power_parameters, 'fdp')
    wf.connect(calculate_FDJ, 'out_file', calc_power_parameters, 'fdj')

    wf.connect(calc_power_parameters, 'out_file', output_node, 'power_params')

    return wf
Esempio n. 16
0
def create_nuisance_workflow(nuisance_selectors,
                             use_ants,
                             name='nuisance'):
    """
    Workflow for the removal of various signals considered to be noise from resting state
    fMRI data.  The residual signals for linear regression denoising is performed in a single
    model.  Therefore the residual time-series will be orthogonal to all signals.

    Parameters
    ----------
    :param nuisance_selectors: dictionary describing nuisance regression to be performed
    :param use_ants: flag indicating whether FNIRT or ANTS is used
    :param name: Name of the workflow, defaults to 'nuisance'
    :return: nuisance : nipype.pipeline.engine.Workflow
        Nuisance workflow.
        
    Notes
    -----

    Workflow Inputs
    ---------------
    Workflow Inputs::
    
        inputspec.functional_file_path : string (nifti file)
            Path to realigned and motion corrected functional image (nifti) file.

        inputspec.functional_brain_mask_file_path : string (nifti file)
            Whole brain mask corresponding to the functional data.

        inputspec.anatomical_file_path : string (nifti file)
            Corresponding preprocessed anatomical.
        inputspec.wm_mask_file_path : string (nifti file)
            Corresponding white matter mask.
        inputspec.csf_mask_file_path : string (nifti file)
            Corresponding cerebral spinal fluid mask.
        inputspec.gm_mask_file_path : string (nifti file)
            Corresponding grey matter mask.
        inputspec.lat_ventricles_mask_file_path : string (nifti file)
            Mask of lateral ventricles calculated from the Harvard Oxford Atlas.

        inputspec.mni_to_anat_linear_xfm_file_path: string (nifti file)
            FLIRT Linear MNI to Anat transform
        inputspec.anat_to_mni_initial_xfm_file_path: string (nifti file)
            ANTS initial transform from anat to MNI
        inputspec.anat_to_mni_rigid_xfm_file_path: string (nifti file)
            ANTS rigid (6 parameter, no scaling) transform from anat to MNI
        inputspec.anat_to_mni_affine_xfm_file_path: string (nifti file)
            ANTS affine (13 parameter, scales and shears) transform from anat to MNI

        inputspec.func_to_anat_linear_xfm_file_path: string (nifti file)
            FLIRT Linear Transform between functional and anatomical spaces 

        inputspec.motion_parameter_file_path : string (text file)
            Corresponding rigid-body motion parameters. Matrix in the file should be of shape 
            (`T`, `R`), `T` time points and `R` motion parameters.
        inputspec.fd_j_file_path : string (text file)
            Framewise displacement calculated from the volume alignment.
        inputspec.fd_p_file_path : string (text file)
            Framewise displacement calculated from the motion parameters.
        inputspec.dvars_file_path : string (text file)
            DVARS calculated from the functional data.

        inputspec.selector : Dictionary containing configuration parameters for nuisance regression.
            To not run a type of nuisance regression, it may be ommited from the dictionary.
            selector = {
                aCompCor: {
                    symmary: {
                        method: 'DetrendPC', aCompCor will always extract the principal components from
                            detrended tissues signal,
                        components: number of components to retain,
                    },
                    tissues: list of tissues to extract regressors.
                        Valid values are: 'WhiteMatter', 'CerebrospinalFluid',
                    extraction_resolution: None | floating point value indicating isotropic
                        resolution (ex. 2 for 2mm x 2mm x 2mm that data should be extracted at,
                        the corresponding tissue mask will be resampled to this resolution. The
                        functional data will also be resampled to this resolution, and the
                        extraction will occur at this new resolution. The goal is to avoid
                        contamination from undesired tissue components when extracting nuisance
                        regressors,
                    erode_mask: True | False, whether or not the mask should be eroded to
                        further avoid a mask overlapping with a different tissue class,
                    include_delayed: True | False, whether or not to include a one-frame delay regressor,
                        default to False,
                    include_squared: True | False, whether or not to include a squared regressor,
                        default to False,
                    include_delayed_squared: True | False, whether or not to include a squared one-frame
                        delay regressor, default to False,
                },
                tCompCor: {
                    symmary: {
                        method: 'PC', tCompCor will always extract the principal components from
                            BOLD signal,
                        components: number of components to retain,
                    },
                    threshold:
                        floating point number = cutoff as raw variance value,
                        floating point number followed by SD (ex. 1.5SD) = mean + a multiple of the SD,
                        floating point number followed by PCT (ex. 2PCT) = percentile from the top (ex is top 2%),
                    by_slice: True | False, whether or not the threshold criterion should be applied
                        by slice or across the entire volume, makes most sense for thresholds
                        using SD or PCT,
                    include_delayed: True | False,
                    include_squared: True | False,
                    include_delayed_squared: True | False,
                },
                WhiteMatter: {
                    symmary: {
                        method: 'PC', 'DetrendPC', 'Mean', 'NormMean' or 'DetrendNormMean',
                        components: number of components to retain, if PC,
                    },
                    extraction_resolution: None | floating point value (same as for aCompCor),
                    erode_mask: True | False (same as for aCompCor),
                    include_delayed: True | False (same as for aCompCor),
                    include_squared: True | False (same as for aCompCor),
                    include_delayed_squared: True | False (same as for aCompCor),
                },
                CerebrospinalFluid: {
                    symmary: {
                        method: 'PC', 'DetrendPC', 'Mean', 'NormMean' or 'DetrendNormMean',
                        components: number of components to retain, if PC,
                    },
                    extraction_resolution: None | floating point value (same as for aCompCor),
                    erode_mask: True | False (same as for aCompCor),
                    include_delayed: True | False (same as for aCompCor),
                    include_squared: True | False (same as for aCompCor),
                    include_delayed_squared: True | False (same as for aCompCor),
                },
                GreyMatter: {
                    symmary: {
                        method: 'PC', 'DetrendPC', 'Mean', 'NormMean' or 'DetrendNormMean',
                        components: number of components to retain, if PC,
                    },
                    extraction_resolution: None | floating point value (same as for aCompCor),
                    erode_mask: True | False (same as for aCompCor),
                    include_delayed: True | False (same as for aCompCor),
                    include_squared: True | False (same as for aCompCor),
                    include_delayed_squared: True | False (same as for aCompCor),
                },
                GlobalSignal: {
                    symmary: {
                        method: 'PC', 'DetrendPC', 'Mean', 'NormMean' or 'DetrendNormMean',
                        components: number of components to retain, if PC,
                    },
                    include_delayed: True | False (same as for aCompCor),
                    include_squared: True | False (same as for aCompCor),
                    include_delayed_squared: True | False (same as for aCompCor),
                },
                Motion: None | { 
                    include_delayed: True | False (same as for aCompCor),
                    include_squared: True | False (same as for aCompCor),
                    include_delayed_squared: True | False (same as for aCompCor),
                },
                Censor: {
                    method: 'Kill', 'Zero', 'Interpolate', 'SpikeRegression',
                    thresholds: list of dictionary, {
                        type: 'FD_J', 'FD_P', 'DVARS',
                        value: threshold value to be applied to metric
                    },
                    number_of_previous_trs_to_censor: integer, number of previous
                        TRs to censor (remove or regress, if spike regression)
                    number_of_subsequent_trs_to_censor: integer, number of
                        subsequent TRs to censor (remove or regress, if spike
                        regression)
                },
                PolyOrt: {
                    degree: integer, polynomial degree up to which will be removed,
                        e.g. 2 means constant + linear + quadratic, practically
                        that is probably, the most that will be need especially
                        if band pass filtering
                },
                Bandpass: {
                    bottom_frequency: floating point value, frequency in hertz of
                        the highpass part of the pass band, frequencies below this
                        will be removed,
                    top_frequency: floating point value, frequency in hertz of the
                        lowpass part of the pass band, frequencies above this
                        will be removed
                }
            }

    Workflow Outputs::

        outputspec.residual_file_path : string (nifti file)
            Path of residual file in nifti format
        outputspec.regressors_file_path : string (TSV file)
            Path of TSV file of regressors used. Column name indicates the regressors included .

    Nuisance Procedure:

    1. Compute nuisance regressors based on input selections.
    2. Calculate residuals with respect to these nuisance regressors in a
       single model for every voxel.

    High Level Workflow Graph:

    .. exec::
        from CPAC.nuisance import create_nuisance_workflow
        wf = create_nuisance_workflow({
            'PolyOrt': {'degree': 2},
            'tCompCor': {'summary': {'method': 'PC', 'components': 5}, 'threshold': '1.5SD', 'by_slice': True},
            'aCompCor': {'summary': {'method': 'PC', 'components': 5}, 'tissues': ['WhiteMatter', 'CerebrospinalFluid'], 'extraction_resolution': 2},
            'WhiteMatter': {'summary': {'method': 'PC', 'components': 5}, 'extraction_resolution': 2},
            'CerebrospinalFluid': {'summary': {'method': 'PC', 'components': 5}, 'extraction_resolution': 2, 'erode_mask': True},
            'GreyMatter': {'summary': {'method': 'PC', 'components': 5}, 'extraction_resolution': 2, 'erode_mask': True},
            'GlobalSignal': {'summary': 'Mean', 'include_delayed': True, 'include_squared': True, 'include_delayed_squared': True},
            'Motion': {'include_delayed': True, 'include_squared': True, 'include_delayed_squared': True},
            'Censor': {'method': 'Interpolate', 'thresholds': [{'type': 'FD_J', 'value': 0.5}, {'type': 'DVARS', 'value': 0.7}]}
        }, use_ants=False)

        wf.write_graph(
            graph2use='orig',
            dotfilename='./images/nuisance.dot'
        )

    .. image:: ../images/nuisance.png
       :width: 1000

    Detailed Workflow Graph:

    .. image:: ../images/nuisance_detailed.png
       :width: 1000

    """

    nuisance_wf = pe.Workflow(name=name)

    inputspec = pe.Node(util.IdentityInterface(fields=[
        'selector',
        'functional_file_path',

        'anatomical_file_path',
        'gm_mask_file_path',
        'wm_mask_file_path',
        'csf_mask_file_path',
        'lat_ventricles_mask_file_path',

        'functional_brain_mask_file_path',

        'func_to_anat_linear_xfm_file_path',
        'mni_to_anat_linear_xfm_file_path',
        'anat_to_mni_initial_xfm_file_path',
        'anat_to_mni_rigid_xfm_file_path',
        'anat_to_mni_affine_xfm_file_path',

        'motion_parameters_file_path',
        'fd_j_file_path',
        'fd_p_file_path',
        'dvars_file_path',
    ]), name='inputspec')

    outputspec = pe.Node(util.IdentityInterface(fields=['residual_file_path',
                                                        'regressors_file_path']),
                         name='outputspec')

    # Resources to create regressors
    pipeline_resource_pool = {
        "Anatomical": (inputspec, 'anatomical_file_path'),
        "Functional": (inputspec, 'functional_file_path'),
        "GlobalSignal": (inputspec, 'functional_brain_mask_file_path'),
        "WhiteMatter": (inputspec, 'wm_mask_file_path'),
        "CerebrospinalFluid": (inputspec, 'csf_mask_file_path'),
        "GreyMatter": (inputspec, 'gm_mask_file_path'),
        "Ventricles": (inputspec, 'lat_ventricles_mask_file_path'),

        "Transformations": {
            "func_to_anat_linear_xfm": (inputspec, "func_to_anat_linear_xfm_file_path"),
            "mni_to_anat_linear_xfm": (inputspec, "mni_to_anat_linear_xfm_file_path"),
            "anat_to_mni_initial_xfm": (inputspec, "anat_to_mni_initial_xfm_file_path"),
            "anat_to_mni_rigid_xfm": (inputspec, "anat_to_mni_rigid_xfm_file_path"),
            "anat_to_mni_affine_xfm": (inputspec, "anat_to_mni_affine_xfm_file_path"),
        }
    }

    # Regressor map to simplify construction of the needed regressors
    regressors = {
        'GreyMatter': ['grey_matter_summary_file_path', ()],
        'WhiteMatter': ['white_matter_summary_file_path', ()],
        'CerebrospinalFluid': ['csf_summary_file_path', ()],
        'aCompCor': ['acompcor_file_path', ()],
        'tCompCor': ['tcompcor_file_path', ()],
        'GlobalSignal': ['global_summary_file_path', ()],
        'DVARS': ['dvars_file_path', (inputspec, 'dvars_file_path')],
        'FD_J': ['framewise_displacement_j_file_path', (inputspec, 'framewise_displacement_j_file_path')],
        'FD_P': ['framewise_displacement_p_file_path', (inputspec, 'framewise_displacement_p_file_path')],
        'Motion': ['motion_parameters_file_path', (inputspec, 'motion_parameters_file_path')]
    }

    derived = ['tCompCor', 'aCompCor']
    tissues = ['GreyMatter', 'WhiteMatter', 'CerebrospinalFluid']

    for regressor_type, regressor_resource in regressors.items():

        if regressor_type not in nuisance_selectors:
            continue

        regressor_selector = nuisance_selectors[regressor_type]

        # Set summary method for tCompCor and aCompCor
        if regressor_type in derived:

            if 'summary' not in regressor_selector:
                regressor_selector['summary'] = {}

            if type(regressor_selector['summary']) is not dict:
                raise ValueError("Regressor {0} requires PC summary method, "
                                 "but {1} specified"
                                 .format(regressor_type,
                                         regressor_selector['summary']))

            regressor_selector['summary']['method'] = \
                'DetrendPC' if regressor_type == 'aCompCor' else 'PC'

            if not regressor_selector['summary'].get('components'):
                regressor_selector['summary']['components'] = 1

        # If regressor is not present, build up the regressor
        if not regressor_resource[1]:

            # We don't have the regressor, look for it in the resource pool,
            # build a corresponding key, this is seperated in to a mask key
            # and an extraction key, which when concatenated provide the
            # resource key for the regressor
            regressor_descriptor = {'tissue': regressor_type}

            if regressor_type == 'aCompCor':
                if not regressor_selector.get('tissues'):
                    raise ValueError("Tissue type required for aCompCor, "
                                     "but none specified")

                regressor_descriptor = {
                    'tissue': regressor_selector['tissues']
                }


            if regressor_type == 'tCompCor':
                if not regressor_selector.get('threshold'):
                    raise ValueError("Threshold required for tCompCor, "
                                     "but none specified.")

                regressor_descriptor = {
                    'tissue': 'FunctionalVariance-{}'
                              .format(regressor_selector['threshold'])
                }

                if regressor_selector.get('by_slice'):
                    regressor_descriptor['tissue'] += '-BySlice'
                else:
                    regressor_selector['by_slice'] = False


            # Add selector into regressor description

            if regressor_selector.get('extraction_resolution'):
                regressor_descriptor['resolution'] = \
                    str(regressor_selector['extraction_resolution']) + "mm"

            elif regressor_type in tissues:
                regressor_selector['extraction_resolution'] = "Functional"
                regressor_descriptor['resolution'] = "Functional"

            if regressor_selector.get('erode_mask'):
                regressor_descriptor['erosion'] = 'Eroded'

            if not regressor_selector.get('summary'):
                raise ValueError("Summary method required for {0}, "
                                 "but none specified".format(regressor_type))

            if type(regressor_selector['summary']) is dict:
                regressor_descriptor['extraction'] = \
                    regressor_selector['summary']['method']
            else:
                regressor_descriptor['extraction'] = \
                    regressor_selector['summary']

            if regressor_descriptor['extraction'] in ['DetrendPC', 'PC']:
                if not regressor_selector['summary'].get('components'):
                    raise ValueError("Summary method PC requires components, "
                                     "but received none.")

                regressor_descriptor['extraction'] += \
                    '_{0}'.format(regressor_selector['summary']['components'])

            if type(regressor_descriptor['tissue']) is not list:
                regressor_descriptor['tissue'] = \
                    [regressor_descriptor['tissue']]


            if regressor_selector.get('extraction_resolution') and \
                    regressor_selector["extraction_resolution"] != "Functional":

                functional_at_resolution_key = "Functional_{0}mm".format(
                    regressor_selector["extraction_resolution"]
                )

                anatomical_at_resolution_key = "Anatomical_{0}mm".format(
                    regressor_selector["extraction_resolution"]
                )

                if anatomical_at_resolution_key not in pipeline_resource_pool:

                    anat_resample = pe.Node(
                        interface=fsl.FLIRT(),
                        name='{}_flirt'
                             .format(anatomical_at_resolution_key)
                    )
                    anat_resample.inputs.apply_isoxfm = regressor_selector["extraction_resolution"]

                    nuisance_wf.connect(*(
                        pipeline_resource_pool['Anatomical'] +
                        (anat_resample, 'in_file')
                    ))

                    nuisance_wf.connect(*(
                        pipeline_resource_pool['Anatomical'] +
                        (anat_resample, 'reference')
                    ))

                    pipeline_resource_pool[anatomical_at_resolution_key] = \
                        (anat_resample, 'out_file')

                if functional_at_resolution_key not in pipeline_resource_pool:

                    func_resample = pe.Node(
                        interface=fsl.FLIRT(),
                        name='{}_flirt'
                             .format(functional_at_resolution_key)
                    )
                    func_resample.inputs.apply_xfm = True

                    nuisance_wf.connect(*(
                        pipeline_resource_pool['Transformations']['func_to_anat_linear_xfm'] +
                        (func_resample, 'in_matrix_file')
                    ))

                    nuisance_wf.connect(*(
                        pipeline_resource_pool['Functional'] +
                        (func_resample, 'in_file')
                    ))

                    nuisance_wf.connect(*(
                        pipeline_resource_pool[anatomical_at_resolution_key] +
                        (func_resample, 'reference')
                    ))

                    pipeline_resource_pool[functional_at_resolution_key] = \
                        (func_resample, 'out_file')

            # Create merger to summarize the functional timeseries
            regressor_mask_file_resource_keys = []
            for tissue in regressor_descriptor['tissue']:

                # Ignore non tissue masks
                if tissue not in tissues and \
                    not tissue.startswith('FunctionalVariance'):
                    regressor_mask_file_resource_keys += [tissue]
                    continue

                tissue_regressor_descriptor = regressor_descriptor.copy()
                tissue_regressor_descriptor['tissue'] = tissue

                # Generate resource masks
                (pipeline_resource_pool,
                 regressor_mask_file_resource_key) = \
                    generate_summarize_tissue_mask(
                        nuisance_wf,
                        pipeline_resource_pool,
                        tissue_regressor_descriptor,
                        regressor_selector,
                        use_ants=use_ants
                    )

                regressor_mask_file_resource_keys += \
                    [regressor_mask_file_resource_key]

            # Keep tissus ordered, to avoid duplicates
            regressor_mask_file_resource_keys = \
                list(sorted(regressor_mask_file_resource_keys))

            # Create key for the final regressors
            regressor_file_resource_key = "_".join([
                "-".join(regressor_descriptor[key])
                if type(regressor_descriptor[key]) == list
                else regressor_descriptor[key]

                for key in ['tissue', 'resolution', 'erosion', 'extraction']
                if key in regressor_descriptor
            ])

            if regressor_file_resource_key not in pipeline_resource_pool:

                # Retrieve summary from voxels at provided mask
                summarize_timeseries_node = pe.Node(
                    Function(
                        input_names=[
                            'functional_path',
                            'masks_path',
                            'summary'
                        ],
                        output_names=['components_path'],
                        function=summarize_timeseries,
                        as_module=True,
                    ),
                    name='{}_summarization'.format(regressor_type)
                )

                summarize_timeseries_node.inputs.summary = \
                    regressor_selector['summary']

                # Merge mask paths to extract voxel timeseries
                merge_masks_paths = pe.Node(
                    util.Merge(len(regressor_mask_file_resource_keys)),
                    name='{}_marge_masks'.format(regressor_type)
                )
                for i, regressor_mask_file_resource_key in \
                        enumerate(regressor_mask_file_resource_keys):

                    node, node_output = \
                        pipeline_resource_pool[regressor_mask_file_resource_key]

                    nuisance_wf.connect(
                        node, node_output,
                        merge_masks_paths, "in{}".format(i + 1)
                    )

                nuisance_wf.connect(
                    merge_masks_paths, 'out',
                    summarize_timeseries_node, 'masks_path'
                )

                functional_key = 'Functional'
                if regressor_selector.get('extraction_resolution') and \
                        regressor_selector["extraction_resolution"] != "Functional":

                    functional_key = 'Functional_{}mm'.format(
                        regressor_selector['extraction_resolution']
                    )

                nuisance_wf.connect(*(
                    pipeline_resource_pool[functional_key] +
                    (summarize_timeseries_node, 'functional_path')
                ))

                pipeline_resource_pool[regressor_file_resource_key] = \
                    (summarize_timeseries_node, 'components_path')

                # Add it to internal resource pool
                regressor_resource[1] = \
                    pipeline_resource_pool[regressor_file_resource_key]

    # Build regressors and combine them into a single file
    build_nuisance_regressors = pe.Node(Function(
        input_names=['functional_file_path',
                     'selector',
                     'grey_matter_summary_file_path',
                     'white_matter_summary_file_path',
                     'csf_summary_file_path',
                     'acompcor_file_path',
                     'tcompcor_file_path',
                     'global_summary_file_path',
                     'motion_parameters_file_path',
                     'censor_file_path'],
        output_names=['out_file'],
        function=gather_nuisance,
        as_module=True
    ), name="build_nuisance_regressors")

    nuisance_wf.connect(
        inputspec, 'functional_file_path',
        build_nuisance_regressors, 'functional_file_path'
    )

    nuisance_wf.connect(
        inputspec, 'selector',
        build_nuisance_regressors, 'selector'
    )

    # Check for any regressors to combine into files
    has_nuisance_regressors = any(
        regressor_resource[1]
        for regressor_key, regressor_resource
        in regressors.items()
    )

    if has_nuisance_regressors:
        for regressor_key, (regressor_arg, regressor_node) in regressors.items():
            if regressor_key in nuisance_selectors:
                nuisance_wf.connect(
                    regressor_node[0], regressor_node[1],
                    build_nuisance_regressors, regressor_arg
                )

    if nuisance_selectors.get('Censor'):

        censor_methods = ['Kill', 'Zero', 'Interpolate', 'SpikeRegression']

        censor_selector = nuisance_selectors.get('Censor')
        if censor_selector.get('method') not in censor_methods:
            raise ValueError("Improper censoring method specified ({0}), "
                             "should be one of {1}."
                             .format(censor_selector.get('method'),
                                     censor_methods))

        find_censors = pe.Node(Function(
            input_names=['fd_j_file_path',
                         'fd_j_threshold',
                         'fd_p_file_path',
                         'fd_p_threshold',
                         'dvars_file_path',
                         'dvars_threshold',
                         'number_of_previous_trs_to_censor',
                         'number_of_subsequent_trs_to_censor'],
            output_names=['out_file'],
            function=find_offending_time_points,
            as_module=True
        ), name="find_offending_time_points")

        if not censor_selector.get('thresholds'):
            raise ValueError(
                'Censoring requested, but thresh_metric not provided.'
            )

        for threshold in censor_selector['thresholds']:

            if 'type' not in threshold or threshold['type'] not in ['DVARS', 'FD_J', 'FD_P']:
                raise ValueError(
                    'Censoring requested, but with invalid threshold type.'
                )

            if 'value' not in threshold:
                raise ValueError(
                    'Censoring requested, but threshold not provided.'
                )

            if threshold['type'] == 'FD_J':
                find_censors.inputs.fd_j_threshold = threshold['value']
                nuisance_wf.connect(inputspec, "fd_j_file_path",
                                    find_censors, "fd_j_file_path")

            if threshold['type'] == 'FD_P':
                find_censors.inputs.fd_p_threshold = threshold['value']
                nuisance_wf.connect(inputspec, "fd_p_file_path",
                                    find_censors, "fd_p_file_path")

            if threshold['type'] == 'DVARS':
                find_censors.inputs.dvars_threshold = threshold['value']
                nuisance_wf.connect(inputspec, "dvars_file_path",
                                    find_censors, "dvars_file_path")

        if censor_selector.get('number_of_previous_trs_to_censor') and \
                censor_selector['method'] != 'SpikeRegression':

            find_censors.inputs.number_of_previous_trs_to_censor = \
                censor_selector['number_of_previous_trs_to_censor']

        else:
            find_censors.inputs.number_of_previous_trs_to_censor = 0

        if censor_selector.get('number_of_subsequent_trs_to_censor') and \
                censor_selector['method'] != 'SpikeRegression':

            find_censors.inputs.number_of_subsequent_trs_to_censor = \
                censor_selector['number_of_subsequent_trs_to_censor']

        else:
            find_censors.inputs.number_of_subsequent_trs_to_censor = 0

    # Use 3dTproject to perform nuisance variable regression
    nuisance_regression = pe.Node(interface=afni.TProject(),
                                  name='nuisance_regression')

    nuisance_regression.inputs.out_file = 'residuals.nii.gz'
    nuisance_regression.inputs.outputtype = 'NIFTI_GZ'
    nuisance_regression.inputs.norm = False

    if nuisance_selectors.get('Censor'):
        if nuisance_selectors['Censor']['method'] == 'SpikeRegression':
            nuisance_wf.connect(find_censors, 'out_file',
                                build_nuisance_regressors, 'censor_file_path')
        else:
            if nuisance_selectors['Censor']['method'] == 'Interpolate':
                nuisance_regression.inputs.cenmode = 'NTRP'
            else:
                nuisance_regression.inputs.cenmode = \
                    nuisance_selectors['Censor']['method'].upper()

            nuisance_wf.connect(find_censors, 'out_file',
                                nuisance_regression, 'censor')

    if nuisance_selectors.get('PolyOrt'):
        if not nuisance_selectors['PolyOrt'].get('degree'):
            raise ValueError("Polynomial orthogonalization requested, "
                             "but degree not provided.")

        nuisance_regression.inputs.polort = \
            nuisance_selectors['PolyOrt']['degree']

    else:
        nuisance_regression.inputs.polort = 0

    nuisance_wf.connect([
        (inputspec, nuisance_regression, [
            ('functional_file_path', 'in_file'),
            ('functional_brain_mask_file_path', 'mask'),
        ]),
    ])

    if has_nuisance_regressors:
        nuisance_wf.connect(build_nuisance_regressors, 'out_file',
                            nuisance_regression, 'ort')

    nuisance_wf.connect(nuisance_regression, 'out_file',
                        outputspec, 'residual_file_path')

    nuisance_wf.connect(build_nuisance_regressors, 'out_file',
                        outputspec, 'regressors_file_path')

    return nuisance_wf
Esempio n. 17
0
def create_qc_workflow(workflow, c, strategies, qc_outputs):

    qc_montage_id_a = {}
    qc_montage_id_s = {}
    qc_plot_id = {}
    qc_hist_id = {}

    for num_strat, strat in enumerate(strategies):

        nodes = strat.get_nodes_names()

        preproc, out_file = strat['functional_preprocessed']
        brain_mask, mask_file = strat['functional_brain_mask']
        func_to_anat_xfm, xfm_file = strat['functional_to_anat_linear_xfm']
        anat_ref, ref_file = strat['anatomical_brain']
        mfa, mfa_file = strat['mean_functional_in_anat']

        # make SNR plot
        qc_workflow = create_qc_snr('qc_snr_{0}'.format(num_strat))
        workflow.connect(preproc, out_file, qc_workflow,
                         'inputspec.functional_preprocessed')
        workflow.connect(brain_mask, mask_file, qc_workflow,
                         'inputspec.functional_brain_mask')
        workflow.connect(func_to_anat_xfm, xfm_file, qc_workflow,
                         'inputspec.functional_to_anat_linear_xfm')
        workflow.connect(anat_ref, ref_file, qc_workflow,
                         'inputspec.anatomical_brain')
        workflow.connect(mfa, mfa_file, qc_workflow,
                         'inputspec.mean_functional_in_anat')

        strat.update_resource_pool({
            'qc___snr_a': (qc_workflow, 'outputspec.snr_axial_image'),
            'qc___snr_s': (qc_workflow, 'outputspec.snr_sagittal_image'),
            'qc___snr_hist': (qc_workflow, 'outputspec.snr_histogram_image'),
            'qc___snr_val': (qc_workflow, 'outputspec.snr_mean')
        })

        if not 3 in qc_montage_id_a:
            qc_montage_id_a[3] = 'snr_a'
            qc_montage_id_s[3] = 'snr_s'
            qc_hist_id[3] = 'snr_hist'

        # make motion parameters plot
        mov_param, out_file = strat['movement_parameters']

        qc_workflow = create_qc_motion('qc_motion_{0}'.format(num_strat))
        workflow.connect(mov_param, out_file, qc_workflow,
                         'inputspec.motion_parameters')

        strat.update_resource_pool({
            'qc___movement_trans_plot':
            (qc_workflow, 'outputspec.motion_translation_plot'),
            'qc___movement_rot_plot':
            (qc_workflow, 'outputspec.motion_rotation_plot')
        })

        if not 6 in qc_plot_id:
            qc_plot_id[6] = 'movement_trans_plot'

        if not 7 in qc_plot_id:
            qc_plot_id[7] = 'movement_rot_plot'

        # make FD plot and volumes removed
        if 'gen_motion_stats' in nodes and 1 in c.runNuisance:
            if c.fdCalc == 'Power':
                fd, out_file = strat['frame_wise_displacement_power']
            else:
                fd, out_file = strat['frame_wise_displacement_jenkinson']

            qc_workflow = create_qc_fd('qc_fd_{0}'.format(num_strat))

            workflow.connect(fd, out_file, qc_workflow, 'inputspec.fd')

            if "De-Spiking" in c.runMotionSpike:
                excluded, out_file_ex = strat['despiking_frames_excluded']
                workflow.connect(excluded, out_file_ex, qc_workflow,
                                 'inputspec.excluded_volumes')

            elif "Scrubbing" in c.runMotionSpike:
                excluded, out_file_ex = strat['scrubbing_frames_excluded']
                workflow.connect(excluded, out_file_ex, qc_workflow,
                                 'inputspec.excluded_volumes')

            strat.update_resource_pool({
                'qc___fd_plot': (qc_workflow, 'outputspec.fd_histogram_plot')
            })

            if not 8 in qc_plot_id:
                qc_plot_id[8] = 'fd_plot'

        # make QC montages for Skull Stripping Visualization
        anat_underlay, out_file = strat['anatomical_brain']
        skull, out_file_s = strat['anatomical_reorient']

        qc_workflow = create_qc_skullstrip(
            'qc_skullstrip_{0}'.format(num_strat))
        workflow.connect(anat_underlay, out_file, qc_workflow,
                         'inputspec.anatomical_brain')
        workflow.connect(skull, out_file_s, qc_workflow,
                         'inputspec.anatomical_reorient')

        strat.update_resource_pool({
            'qc___skullstrip_vis_a': (qc_workflow, 'outputspec.axial_image'),
            'qc___skullstrip_vis_s': (qc_workflow, 'outputspec.sagittal_image')
        })

        if not 1 in qc_montage_id_a:
            qc_montage_id_a[1] = 'skullstrip_vis_a'
            qc_montage_id_s[1] = 'skullstrip_vis_s'

        # make QC montages for mni normalized anatomical image
        mni_anat_underlay, out_file = strat['mean_functional_in_anat']

        montage_mni_anat = create_montage(
            'montage_mni_anat_{0}'.format(num_strat), 'red', 'mni_anat')

        workflow.connect(mni_anat_underlay, out_file, montage_mni_anat,
                         'inputspec.underlay')
        montage_mni_anat.inputs.inputspec.overlay = os.path.abspath(
            p.resource_filename(
                'CPAC', 'resources/templates/MNI152_Edge_AllTissues.nii.gz'))

        strat.update_resource_pool({
            'qc___mni_normalized_anatomical_a':
            (montage_mni_anat, 'outputspec.axial_png'),
            'qc___mni_normalized_anatomical_s':
            (montage_mni_anat, 'outputspec.sagittal_png')
        })

        if not 6 in qc_montage_id_a:
            qc_montage_id_a[6] = 'mni_normalized_anatomical_a'
            qc_montage_id_s[6] = 'mni_normalized_anatomical_s'

        # make QC montages for CSF WM GM
        if 'seg_preproc' in nodes:

            anat_underlay, out_file = strat['anatomical_brain']
            csf_overlay, out_file_csf = strat['anatomical_csf_mask']
            wm_overlay, out_file_wm = strat['anatomical_wm_mask']
            gm_overlay, out_file_gm = strat['anatomical_gm_mask']

            montage_csf_gm_wm = create_montage_gm_wm_csf(
                'montage_csf_gm_wm_%d' % num_strat, 'montage_csf_gm_wm')

            workflow.connect(anat_underlay, out_file, montage_csf_gm_wm,
                             'inputspec.underlay')
            workflow.connect(csf_overlay, out_file_csf, montage_csf_gm_wm,
                             'inputspec.overlay_csf')
            workflow.connect(wm_overlay, out_file_wm, montage_csf_gm_wm,
                             'inputspec.overlay_wm')
            workflow.connect(gm_overlay, out_file_gm, montage_csf_gm_wm,
                             'inputspec.overlay_gm')

            strat.update_resource_pool({
                'qc___csf_gm_wm_a':
                (montage_csf_gm_wm, 'outputspec.axial_png'),
                'qc___csf_gm_wm_s':
                (montage_csf_gm_wm, 'outputspec.sagittal_png')
            })

            if not 2 in qc_montage_id_a:
                qc_montage_id_a[2] = 'csf_gm_wm_a'
                qc_montage_id_s[2] = 'csf_gm_wm_s'

        # make QC montage for Mean Functional in T1 with T1 edge
        anat, out_file = strat['anatomical_brain']
        m_f_a, out_file_mfa = strat['mean_functional_in_anat']

        anat_edge = pe.Node(Function(input_names=['in_file'],
                                     output_names=['out_file'],
                                     function=afni_Edge3,
                                     as_module=True),
                            name='anat_edge_%d' % num_strat)

        montage_anat = create_montage('montage_anat_%d' % num_strat, 'red',
                                      't1_edge_on_mean_func_in_t1')

        workflow.connect(anat, out_file, anat_edge, 'in_file')
        workflow.connect(anat_edge, 'out_file', montage_anat,
                         'inputspec.overlay')
        workflow.connect(m_f_a, out_file_mfa, montage_anat,
                         'inputspec.underlay')

        strat.update_resource_pool({
            'qc___mean_func_with_t1_edge_a':
            (montage_anat, 'outputspec.axial_png'),
            'qc___mean_func_with_t1_edge_s':
            (montage_anat, 'outputspec.sagittal_png')
        })

        if not 4 in qc_montage_id_a:
            qc_montage_id_a[4] = 'mean_func_with_t1_edge_a'
            qc_montage_id_s[4] = 'mean_func_with_t1_edge_s'

        # make QC montage for Mean Functional in MNI with MNI edge
        m_f_i, out_file = strat['mean_functional_to_standard']

        montage_mfi = create_montage('montage_mfi_%d' % num_strat, 'red',
                                     'MNI_edge_on_mean_func_mni')
        workflow.connect(m_f_i, out_file, montage_mfi, 'inputspec.underlay')
        montage_mfi.inputs.inputspec.overlay = os.path.abspath(
            p.resource_filename(
                'CPAC', 'resources/templates/MNI152_Edge_AllTissues.nii.gz'))

        strat.update_resource_pool({
            'qc___mean_func_with_mni_edge_a':
            (montage_mfi, 'outputspec.axial_png'),
            'qc___mean_func_with_mni_edge_s':
            (montage_mfi, 'outputspec.sagittal_png')
        })

        if not 5 in qc_montage_id_a:
            qc_montage_id_a[5] = 'mean_func_with_mni_edge_a'
            qc_montage_id_s[5] = 'mean_func_with_mni_edge_s'

        # Link all the derivatives to the QC pages
        idx = 7
        rp = strat.get_resource_pool()
        for key in sorted(rp.keys()):
            # qc_outputs is from the outputs CSV
            if key in qc_outputs:
                qa_montages(workflow, c, strat, num_strat, qc_montage_id_a,
                            qc_montage_id_s, qc_hist_id, key, idx)
                idx += 1

    return qc_montage_id_a, qc_montage_id_s, qc_hist_id, qc_plot_id
Esempio n. 18
0
def create_qc_snr(wf_name='qc_snr'):

    wf = pe.Workflow(name=wf_name)

    input_node = pe.Node(util.IdentityInterface(fields=[
        'functional_preprocessed', 'functional_brain_mask',
        'functional_to_anat_linear_xfm', 'anatomical_brain',
        'mean_functional_in_anat'
    ]),
                         name='inputspec')

    output_node = pe.Node(util.IdentityInterface(fields=[
        'snr_axial_image', 'snr_sagittal_image', 'snr_histogram_image',
        'snr_mean'
    ]),
                          name='outputspec')

    std_dev = pe.Node(afni.TStat(args='-stdev'), name='std_dev')

    std_dev.inputs.outputtype = 'NIFTI_GZ'
    wf.connect(input_node, 'functional_preprocessed', std_dev, 'in_file')
    wf.connect(input_node, 'functional_brain_mask', std_dev, 'mask')

    std_dev_anat = pe.Node(fsl.ApplyWarp(interp='trilinear'),
                           name='std_dev_anat')
    wf.connect(input_node, 'functional_to_anat_linear_xfm', std_dev_anat,
               'premat')
    wf.connect(std_dev, 'out_file', std_dev_anat, 'in_file')

    snr = pe.Node(afni.Calc(expr='b/a'), name='snr')
    snr.inputs.outputtype = 'NIFTI_GZ'
    wf.connect(input_node, 'anatomical_brain', std_dev_anat, 'ref_file')
    wf.connect(input_node, 'mean_functional_in_anat', snr, 'in_file_b')
    wf.connect(std_dev_anat, 'out_file', snr, 'in_file_a')

    snr_val = pe.Node(Function(input_names=['measure_file'],
                               output_names=['snr_storefl'],
                               function=cal_snr_val,
                               as_module=True),
                      name='snr_val')

    wf.connect(snr, 'out_file', snr_val, 'measure_file')

    hist_snr = pe.Node(Function(input_names=['measure_file', 'measure'],
                                output_names=['hist_path'],
                                function=gen_histogram,
                                as_module=True),
                       name='hist_snr')

    hist_snr.inputs.measure = 'snr'

    wf.connect(snr, 'out_file', hist_snr, 'measure_file')

    snr_drop_percent = pe.Node(Function(
        input_names=['measure_file', 'percent'],
        output_names=['modified_measure_file'],
        function=drop_percent,
        as_module=True),
                               name='dp_snr')

    snr_drop_percent.inputs.percent = 99

    wf.connect(snr, 'out_file', snr_drop_percent, 'measure_file')

    montage_snr = create_montage('montage_snr', 'red_to_blue', 'snr')

    wf.connect(snr_drop_percent, 'modified_measure_file', montage_snr,
               'inputspec.overlay')
    wf.connect(input_node, 'anatomical_brain', montage_snr,
               'inputspec.underlay')

    wf.connect(montage_snr, 'outputspec.axial_png', output_node,
               'snr_axial_image')
    wf.connect(montage_snr, 'outputspec.sagittal_png', output_node,
               'snr_sagittal_image')
    wf.connect(hist_snr, 'hist_path', output_node, 'snr_histogram_image')
    wf.connect(snr_val, 'snr_storefl', output_node, 'snr_mean')

    return wf