Exemplo n.º 1
0
def create_check_for_s3_node(name,
                             file_path,
                             img_type='other',
                             creds_path=None,
                             dl_dir=None,
                             map_node=False):
    if map_node:
        check_s3_node = pe.MapNode(function.Function(
            input_names=['file_path', 'creds_path', 'dl_dir', 'img_type'],
            output_names=['local_path'],
            function=check_for_s3,
            as_module=True),
                                   iterfield=['file_path'],
                                   name='check_for_s3_%s' % name)
    else:
        check_s3_node = pe.Node(function.Function(
            input_names=['file_path', 'creds_path', 'dl_dir', 'img_type'],
            output_names=['local_path'],
            function=check_for_s3,
            as_module=True),
                                name='check_for_s3_%s' % name)

    check_s3_node.inputs.set(file_path=file_path,
                             creds_path=creds_path,
                             dl_dir=dl_dir,
                             img_type=img_type)

    return check_s3_node
Exemplo n.º 2
0
def create_grp_analysis_dataflow(wf_name='gp_dataflow'):

    import nipype.pipeline.engine as pe
    import nipype.interfaces.utility as util
    from CPAC.utils import select_model_files

    wf = pe.Workflow(name=wf_name)

    inputnode = pe.Node(util.IdentityInterface(
        fields=['ftest', 'grp_model', 'model_name'], mandatory_inputs=True),
                        name='inputspec')

    selectmodel = pe.Node(function.Function(
        input_names=['model', 'ftest', 'model_name'],
        output_names=['fts_file', 'con_file', 'grp_file', 'mat_file'],
        function=select_model_files,
        as_module=True),
                          name='selectnode')

    wf.connect(inputnode, 'ftest', selectmodel, 'ftest')
    wf.connect(inputnode, 'grp_model', selectmodel, 'model')
    wf.connect(inputnode, 'model_name', selectmodel, 'model_name')

    outputnode = pe.Node(util.IdentityInterface(
        fields=['fts', 'grp', 'mat', 'con'], mandatory_inputs=True),
                         name='outputspec')

    wf.connect(selectmodel, 'mat_file', outputnode, 'mat')
    wf.connect(selectmodel, 'grp_file', outputnode, 'grp')
    wf.connect(selectmodel, 'fts_file', outputnode, 'fts')
    wf.connect(selectmodel, 'con_file', outputnode, 'con')

    return wf
Exemplo n.º 3
0
def create_anat_datasource(wf_name='anat_datasource'):

    import nipype.pipeline.engine as pe
    import nipype.interfaces.utility as util

    wf = pe.Workflow(name=wf_name)

    inputnode = pe.Node(util.IdentityInterface(
        fields=['subject', 'anat', 'creds_path', 'dl_dir'],
        mandatory_inputs=True),
                        name='inputnode')

    check_s3_node = pe.Node(function.Function(
        input_names=['file_path', 'creds_path', 'dl_dir', 'img_type'],
        output_names=['local_path'],
        function=check_for_s3,
        as_module=True),
                            name='check_for_s3')

    wf.connect(inputnode, 'anat', check_s3_node, 'file_path')
    wf.connect(inputnode, 'creds_path', check_s3_node, 'creds_path')
    wf.connect(inputnode, 'dl_dir', check_s3_node, 'dl_dir')
    check_s3_node.inputs.img_type = 'anat'

    outputnode = pe.Node(util.IdentityInterface(fields=['subject', 'anat']),
                         name='outputspec')

    wf.connect(inputnode, 'subject', outputnode, 'subject')
    wf.connect(check_s3_node, 'local_path', outputnode, 'anat')

    # Return the workflow
    return wf
Exemplo n.º 4
0
def create_general_datasource(wf_name):
    from CPAC.pipeline import nipype_pipeline_engine as pe
    import nipype.interfaces.utility as util

    wf = pe.Workflow(name=wf_name)

    inputnode = pe.Node(util.IdentityInterface(
        fields=['unique_id', 'data', 'creds_path', 'dl_dir'],
        mandatory_inputs=True),
                        name='inputnode')

    check_s3_node = pe.Node(function.Function(
        input_names=['file_path', 'creds_path', 'dl_dir', 'img_type'],
        output_names=['local_path'],
        function=check_for_s3,
        as_module=True),
                            name='check_for_s3')
    check_s3_node.inputs.img_type = "other"

    wf.connect(inputnode, 'data', check_s3_node, 'file_path')
    wf.connect(inputnode, 'creds_path', check_s3_node, 'creds_path')
    wf.connect(inputnode, 'dl_dir', check_s3_node, 'dl_dir')

    outputnode = pe.Node(util.IdentityInterface(fields=['unique_id', 'data']),
                         name='outputspec')

    wf.connect(inputnode, 'unique_id', outputnode, 'unique_id')
    wf.connect(check_s3_node, 'local_path', outputnode, 'data')

    return wf
Exemplo n.º 5
0
def calc_avg(workflow, output_name, strat, num_strat, map_node=False):
    """Calculate the average of an output using AFNI 3dmaskave."""

    if map_node:
        calc_average = pe.MapNode(interface=preprocess.Maskave(),
                                  name='{0}_mean_{1}'.format(output_name,
                                                             num_strat),
                                  iterfield=['in_file'])

        mean_to_csv = pe.MapNode(function.Function(input_names=['in_file',
                                                                'output_name'],
                                                   output_names=[
                                                       'output_mean'],
                                                   function=extract_output_mean,
                                                   as_module=True),
                                 name='{0}_mean_to_txt_{1}'.format(output_name,
                                                                   num_strat),
                                 iterfield=['in_file'])
    else:
        calc_average = pe.Node(interface=preprocess.Maskave(),
                               name='{0}_mean_{1}'.format(output_name,
                                                          num_strat))

        mean_to_csv = pe.Node(function.Function(input_names=['in_file',
                                                             'output_name'],
                                                output_names=['output_mean'],
                                                function=extract_output_mean,
                                                as_module=True),
                              name='{0}_mean_to_txt_{1}'.format(output_name,
                                                                num_strat))

    mean_to_csv.inputs.output_name = output_name

    node, out_file = strat[output_name]
    workflow.connect(node, out_file, calc_average, 'in_file')
    workflow.connect(calc_average, 'out_file', mean_to_csv, 'in_file')

    strat.append_name(calc_average.name)
    strat.update_resource_pool({
        'output_means.@{0}_average'.format(output_name): (mean_to_csv, 'output_mean')
    })

    return strat
Exemplo n.º 6
0
def create_log(wf_name="log", scan_id=None):
    """
    Workflow to create log
    """

    import nipype.pipeline.engine as pe
    import nipype.interfaces.utility as util
    import CPAC.utils.function as function

    wf = pe.Workflow(name=wf_name)

    input_node = pe.Node(util.IdentityInterface(fields=['workflow',
                                                        'log_dir',
                                                        'index',
                                                        'inputs']),
                         name='inputspec')

    output_node = pe.Node(util.IdentityInterface(fields=['out_file']),
                          name='outputspec')

    write_log = pe.Node(function.Function(input_names=['workflow',
                                                       'log_dir',
                                                       'index',
                                                       'inputs',
                                                       'scan_id'],
                                          output_names=['out_file'],
                                          function=write_to_log,
                                          as_module=True),
                        name='write_log')

    write_log.inputs.scan_id = scan_id

    wf.connect([
        (
            input_node, write_log, [
                ('workflow', 'workflow'),
                ('log_dir', 'log_dir'),
                ('index', 'index'),
                ('inputs', 'inputs')
            ]
        ),
        (
            write_log, output_node, [
                ('out_file', 'out_file')
            ]
        )
    ])

    return wf
def blip_distcor_wf(wf_name='blip_distcor'):
    """Execute AFNI 3dQWarp to calculate the distortion "unwarp" for phase
    encoding direction EPI field map distortion correction.

        1. Skull-strip the opposite-direction phase encoding EPI.
        2. Transform the opposite-direction phase encoding EPI to the
           skull-stripped functional and pass this as the base_file to
           AFNI 3dQWarp (plus-minus).
        3. If there is a same-direction phase encoding EPI, also skull-strip
           this, and transform it to the skull-stripped functional. Then, pass
           this as the in_file to AFNI 3dQWarp (plus-minus).
        4. If there isn't a same-direction, pass the functional in as the
           in_file of AFNI 3dQWarp (plus-minus).
        5. Convert the 3dQWarp transforms to ANTs/ITK format.
        6. Use antsApplyTransforms, with the original functional as both the
           input and the reference, and apply the warp from 3dQWarp. The
           output of this can then proceed to func_preproc.

    :param wf_name:
    :return:
    """

    wf = pe.Workflow(name=wf_name)

    input_node = pe.Node(util.IdentityInterface(
        fields=['func_mean', 'opposite_pe_epi', 'same_pe_epi']),
                         name='inputspec')

    output_node = pe.Node(util.IdentityInterface(fields=[
        'blip_warp', 'blip_warp_inverse', 'new_func_mean', 'new_func_mask'
    ]),
                          name='outputspec')

    skullstrip_opposite_pe = skullstrip_functional(
        skullstrip_tool='afni',
        wf_name="{0}_skullstrip_opp_pe".format(wf_name))

    wf.connect(input_node, 'opposite_pe_epi', skullstrip_opposite_pe,
               'inputspec.func')

    opp_pe_to_func = pe.Node(interface=fsl.FLIRT(), name='opp_pe_to_func')
    opp_pe_to_func.inputs.cost = 'corratio'

    wf.connect(skullstrip_opposite_pe, 'outputspec.func_brain', opp_pe_to_func,
               'in_file')
    wf.connect(input_node, 'func_mean', opp_pe_to_func, 'reference')

    prep_qwarp_input_imports = ['import os', 'import subprocess']
    prep_qwarp_input = \
        pe.Node(function.Function(input_names=['same_pe_epi',
                                               'func_mean'],
                                  output_names=['qwarp_input'],
                                  function=same_pe_direction_prep,
                                  imports=prep_qwarp_input_imports),
                name='prep_qwarp_input')

    wf.connect(input_node, 'same_pe_epi', prep_qwarp_input, 'same_pe_epi')
    wf.connect(input_node, 'func_mean', prep_qwarp_input, 'func_mean')

    calc_blip_warp = pe.Node(afni.QwarpPlusMinus(), name='calc_blip_warp')
    calc_blip_warp.inputs.plusminus = True
    calc_blip_warp.inputs.outputtype = "NIFTI_GZ"
    calc_blip_warp.inputs.out_file = os.path.abspath("Qwarp.nii.gz")

    wf.connect(opp_pe_to_func, 'out_file', calc_blip_warp, 'base_file')
    wf.connect(prep_qwarp_input, 'qwarp_input', calc_blip_warp, 'in_file')

    convert_afni_warp_imports = ['import os', 'import nibabel as nb']
    convert_afni_warp = \
        pe.Node(function.Function(input_names=['afni_warp'],
                                  output_names=['ants_warp'],
                                  function=convert_afni_to_ants,
                                  imports=convert_afni_warp_imports),
                name='convert_afni_warp')

    wf.connect(calc_blip_warp, 'source_warp', convert_afni_warp, 'afni_warp')

    # TODO: inverse source_warp (node:source_warp_inverse)
    # wf.connect(###
    # output_node, 'blip_warp_inverse')

    undistort_func_mean = pe.Node(interface=ants.ApplyTransforms(),
                                  name='undistort_func_mean',
                                  mem_gb=.1)

    undistort_func_mean.inputs.out_postfix = '_antswarp'
    undistort_func_mean.interface.num_threads = 1
    undistort_func_mean.inputs.interpolation = "LanczosWindowedSinc"
    undistort_func_mean.inputs.dimension = 3
    undistort_func_mean.inputs.input_image_type = 0

    wf.connect(input_node, 'func_mean', undistort_func_mean, 'input_image')
    wf.connect(input_node, 'func_mean', undistort_func_mean, 'reference_image')
    wf.connect(convert_afni_warp, 'ants_warp', undistort_func_mean,
               'transforms')

    create_new_mask = skullstrip_functional(
        skullstrip_tool='afni', wf_name="{0}_new_func_mask".format(wf_name))
    wf.connect(undistort_func_mean, 'output_image', create_new_mask,
               'inputspec.func')

    wf.connect(convert_afni_warp, 'ants_warp', output_node, 'blip_warp')

    wf.connect(undistort_func_mean, 'output_image', output_node,
               'new_func_mean')
    wf.connect(create_new_mask, 'outputspec.func_brain_mask', output_node,
               'new_func_mask')

    return wf
Exemplo n.º 8
0
def create_func_datasource(rest_dict, wf_name='func_datasource'):
    """Return the functional timeseries-related file paths for each
    series/scan, from the dictionary of functional files described in the data
    configuration (sublist) YAML file.

    Scan input (from inputnode) is an iterable.
    """
    import nipype.pipeline.engine as pe
    import nipype.interfaces.utility as util

    wf = pe.Workflow(name=wf_name)

    inputnode = pe.Node(util.IdentityInterface(
        fields=['subject', 'scan', 'creds_path', 'dl_dir'],
        mandatory_inputs=True),
                        name='inputnode')

    outputnode = pe.Node(util.IdentityInterface(fields=[
        'subject', 'rest', 'scan', 'scan_params', 'phase_diff', 'magnitude'
    ]),
                         name='outputspec')

    # have this here for now because of the big change in the data
    # configuration format
    check_scan = pe.Node(function.Function(
        input_names=['func_scan_dct', 'scan'],
        output_names=[],
        function=check_func_scan,
        as_module=True),
                         name='check_func_scan')

    check_scan.inputs.func_scan_dct = rest_dict
    wf.connect(inputnode, 'scan', check_scan, 'scan')

    # get the functional scan itself
    selectrest = pe.Node(function.Function(
        input_names=['scan', 'rest_dict', 'resource'],
        output_names=['file_path'],
        function=get_rest,
        as_module=True),
                         name='selectrest')
    selectrest.inputs.rest_dict = rest_dict
    selectrest.inputs.resource = "scan"
    wf.connect(inputnode, 'scan', selectrest, 'scan')

    # check to see if it's on an Amazon AWS S3 bucket, and download it, if it
    # is - otherwise, just return the local file path
    check_s3_node = pe.Node(function.Function(
        input_names=['file_path', 'creds_path', 'dl_dir', 'img_type'],
        output_names=['local_path'],
        function=check_for_s3,
        as_module=True),
                            name='check_for_s3')

    wf.connect(selectrest, 'file_path', check_s3_node, 'file_path')
    wf.connect(inputnode, 'creds_path', check_s3_node, 'creds_path')
    wf.connect(inputnode, 'dl_dir', check_s3_node, 'dl_dir')
    check_s3_node.inputs.img_type = 'func'

    wf.connect(inputnode, 'subject', outputnode, 'subject')
    wf.connect(check_s3_node, 'local_path', outputnode, 'rest')
    wf.connect(inputnode, 'scan', outputnode, 'scan')

    # scan parameters CSV
    select_scan_params = pe.Node(function.Function(
        input_names=['scan', 'rest_dict', 'resource'],
        output_names=['file_path'],
        function=get_rest,
        as_module=True),
                                 name='select_scan_params')
    select_scan_params.inputs.rest_dict = rest_dict
    select_scan_params.inputs.resource = "scan_parameters"
    wf.connect(inputnode, 'scan', select_scan_params, 'scan')

    # if the scan parameters file is on AWS S3, download it
    s3_scan_params = pe.Node(function.Function(
        input_names=['file_path', 'creds_path', 'dl_dir', 'img_type'],
        output_names=['local_path'],
        function=check_for_s3,
        as_module=True),
                             name='s3_scan_params')

    wf.connect(select_scan_params, 'file_path', s3_scan_params, 'file_path')
    wf.connect(inputnode, 'creds_path', s3_scan_params, 'creds_path')
    wf.connect(inputnode, 'dl_dir', s3_scan_params, 'dl_dir')
    wf.connect(s3_scan_params, 'local_path', outputnode, 'scan_params')

    # field map phase file, for field map distortion correction
    select_fmap_phase = pe.Node(function.Function(
        input_names=['scan', 'rest_dict', 'resource'],
        output_names=['file_path'],
        function=get_rest,
        as_module=True),
                                name='select_fmap_phase')
    select_fmap_phase.inputs.rest_dict = rest_dict
    select_fmap_phase.inputs.resource = "fmap_phase"
    wf.connect(inputnode, 'scan', select_fmap_phase, 'scan')

    s3_fmap_phase = pe.Node(function.Function(
        input_names=['file_path', 'creds_path', 'dl_dir', 'img_type'],
        output_names=['local_path'],
        function=check_for_s3,
        as_module=True),
                            name='s3_fmap_phase')
    s3_fmap_phase.inputs.img_type = "other"
    wf.connect(select_fmap_phase, 'file_path', s3_fmap_phase, 'file_path')
    wf.connect(inputnode, 'creds_path', s3_fmap_phase, 'creds_path')
    wf.connect(inputnode, 'dl_dir', s3_fmap_phase, 'dl_dir')
    wf.connect(s3_fmap_phase, 'local_path', outputnode, 'phase_diff')

    # field map magnitude file, for field map distortion correction
    select_fmap_mag = pe.Node(function.Function(
        input_names=['scan', 'rest_dict', 'resource'],
        output_names=['file_path'],
        function=get_rest,
        as_module=True),
                              name='select_fmap_mag')
    select_fmap_mag.inputs.rest_dict = rest_dict
    select_fmap_mag.inputs.resource = "fmap_mag"
    wf.connect(inputnode, 'scan', select_fmap_mag, 'scan')

    s3_fmap_mag = pe.Node(function.Function(
        input_names=['file_path', 'creds_path', 'dl_dir', 'img_type'],
        output_names=['local_path'],
        function=check_for_s3,
        as_module=True),
                          name='s3_fmap_mag')
    s3_fmap_mag.inputs.img_type = "other"
    wf.connect(select_fmap_mag, 'file_path', s3_fmap_mag, 'file_path')
    wf.connect(inputnode, 'creds_path', s3_fmap_mag, 'creds_path')
    wf.connect(inputnode, 'dl_dir', s3_fmap_mag, 'dl_dir')
    wf.connect(s3_fmap_mag, 'local_path', outputnode, 'magnitude')

    return wf
Exemplo n.º 9
0
def create_spatial_map_dataflow(spatial_maps, wf_name='datasource_maps'):

    import os

    wf = pe.Workflow(name=wf_name)

    spatial_map_dict = {}

    for spatial_map_file in spatial_maps:

        spatial_map_file = spatial_map_file.rstrip('\r\n')
        base_file = os.path.basename(spatial_map_file)

        try:
            valid_extensions = ['.nii', '.nii.gz']

            base_name = [
                base_file[:-len(ext)] for ext in valid_extensions
                if base_file.endswith(ext)
            ][0]

            if base_name in spatial_map_dict:
                raise ValueError(
                    'Files with same name not allowed: %s %s' %
                    (spatial_map_file, spatial_map_dict[base_name]))

            spatial_map_dict[base_name] = spatial_map_file

        except IndexError as e:
            raise Exception('Error in spatial_map_dataflow: '
                            'File extension not in .nii and .nii.gz')

        except Exception as e:
            raise e

    inputnode = pe.Node(util.IdentityInterface(
        fields=['spatial_map', 'spatial_map_file', 'creds_path', 'dl_dir'],
        mandatory_inputs=True),
                        name='inputspec')

    spatial_map_keys, spatial_map_values = \
        zip(*spatial_map_dict.items())

    inputnode.synchronize = True
    inputnode.iterables = [
        ('spatial_map', spatial_map_keys),
        ('spatial_map_file', spatial_map_values),
    ]

    check_s3_node = pe.Node(function.Function(
        input_names=['file_path', 'creds_path', 'dl_dir', 'img_type'],
        output_names=['local_path'],
        function=check_for_s3,
        as_module=True),
                            name='check_for_s3')

    wf.connect(inputnode, 'spatial_map_file', check_s3_node, 'file_path')
    wf.connect(inputnode, 'creds_path', check_s3_node, 'creds_path')
    wf.connect(inputnode, 'dl_dir', check_s3_node, 'dl_dir')
    check_s3_node.inputs.img_type = 'mask'

    select_spatial_map = pe.Node(util.IdentityInterface(fields=['out_file'],
                                                        mandatory_inputs=True),
                                 name='select_spatial_map')

    wf.connect(check_s3_node, 'local_path', select_spatial_map, 'out_file')

    return wf
Exemplo n.º 10
0
def create_fmap_datasource(fmap_dct, wf_name='fmap_datasource'):
    """Return the field map files, from the dictionary of functional files
    described in the data configuration (sublist) YAML file.
    """

    import nipype.pipeline.engine as pe
    import nipype.interfaces.utility as util

    wf = pe.Workflow(name=wf_name)

    inputnode = pe.Node(util.IdentityInterface(
        fields=['subject', 'scan', 'creds_path', 'dl_dir'],
        mandatory_inputs=True),
                        name='inputnode')

    outputnode = pe.Node(util.IdentityInterface(fields=[
        'subject', 'rest', 'scan', 'scan_params', 'phase_diff', 'magnitude'
    ]),
                         name='outputspec')

    # get the functional scan itself
    selectrest = pe.Node(function.Function(
        input_names=['scan', 'rest_dict', 'resource'],
        output_names=['file_path'],
        function=get_rest,
        as_module=True),
                         name='selectrest')
    selectrest.inputs.rest_dict = fmap_dct
    selectrest.inputs.resource = "scan"
    wf.connect(inputnode, 'scan', selectrest, 'scan')

    # check to see if it's on an Amazon AWS S3 bucket, and download it, if it
    # is - otherwise, just return the local file path
    check_s3_node = pe.Node(function.Function(
        input_names=['file_path', 'creds_path', 'dl_dir', 'img_type'],
        output_names=['local_path'],
        function=check_for_s3,
        as_module=True),
                            name='check_for_s3')

    wf.connect(selectrest, 'file_path', check_s3_node, 'file_path')
    wf.connect(inputnode, 'creds_path', check_s3_node, 'creds_path')
    wf.connect(inputnode, 'dl_dir', check_s3_node, 'dl_dir')
    check_s3_node.inputs.img_type = 'other'

    wf.connect(inputnode, 'subject', outputnode, 'subject')
    wf.connect(check_s3_node, 'local_path', outputnode, 'rest')
    wf.connect(inputnode, 'scan', outputnode, 'scan')

    # scan parameters CSV
    select_scan_params = pe.Node(function.Function(
        input_names=['scan', 'rest_dict', 'resource'],
        output_names=['file_path'],
        function=get_rest,
        as_module=True),
                                 name='select_scan_params')
    select_scan_params.inputs.rest_dict = fmap_dct
    select_scan_params.inputs.resource = "scan_parameters"
    wf.connect(inputnode, 'scan', select_scan_params, 'scan')

    # if the scan parameters file is on AWS S3, download it
    s3_scan_params = pe.Node(function.Function(
        input_names=['file_path', 'creds_path', 'dl_dir', 'img_type'],
        output_names=['local_path'],
        function=check_for_s3,
        as_module=True),
                             name='s3_scan_params')

    wf.connect(select_scan_params, 'file_path', s3_scan_params, 'file_path')
    wf.connect(inputnode, 'creds_path', s3_scan_params, 'creds_path')
    wf.connect(inputnode, 'dl_dir', s3_scan_params, 'dl_dir')
    wf.connect(s3_scan_params, 'local_path', outputnode, 'scan_params')

    return wf
Exemplo n.º 11
0
def create_network_centrality_workflow(workflow, c, strategies, s3_config):

    # TODO ASH redo?
    # Check for the existence of AFNI 3dDegreeCentrality/LFCD binaries
    import subprocess
    try:
        ret_code = subprocess.check_call(['which', '3dDegreeCentrality'],
                                        stdout=open(os.devnull, 'wb'))
        if ret_code == 0:
            afni_centrality_found = True
    except subprocess.CalledProcessError as exc:
        afni_centrality_found = False

    try:
        ret_code = subprocess.check_call(['which', '3dLFCD'],
                                        stdout=open(os.devnull, 'wb'))
        if ret_code == 0:
            afni_lfcd_found = True
    except subprocess.CalledProcessError as exc:
        afni_lfcd_found = False


    if not any((
        True in c.degWeightOptions,
        True in c.eigWeightOptions,
        True in c.lfcdWeightOptions
    )):
        return strategies


    for num_strat, strat in enumerate(strategies[:]):

        # Resample the functional mni to the centrality mask resolution
        resample_functional_to_template = pe.Node(
            interface=fsl.FLIRT(),
            name='resample_functional_to_template_%d' % num_strat

        )
        resample_functional_to_template.inputs.set(
            interp='trilinear',
            in_matrix_file=c.identityMatrix,
            apply_xfm=True
        )

        # Get nipype  node and out file of the func mni img
        node, out_file = strat['functional_to_standard']

        # Resample the input functional file to template(roi/mask)
        workflow.connect(node, out_file,
                         resample_functional_to_template, 'in_file')

        workflow.connect(c.templateSpecificationFile, 'local_path',
                         resample_functional_to_template, 'reference')

        # Init merge node for appending method output lists to one another
        merge_node = pe.Node(function.Function(input_names=['deg_list',
                                                            'eig_list',
                                                            'lfcd_list'],
                                               output_names=['merged_list'],
                                               function=merge_lists,
                                               as_module=True),
                             name='merge_node_%d' % num_strat)

        # Function to connect the CPAC centrality python workflow
        # into pipeline

        # Degree/eigen check
        if afni_centrality_found:
            if True in c.degWeightOptions:
                connect_afni_centrality_workflow(
                    workflow, c, strat, num_strat,
                    resample_functional_to_template, c.templateSpecificationFile, merge_node,
                    'degree',
                    c.degCorrelationThresholdOption,
                    c.degCorrelationThreshold
                )
            if True in c.eigWeightOptions:
                connect_afni_centrality_workflow(
                    workflow, c, strat, num_strat,
                    resample_functional_to_template, c.templateSpecificationFile, merge_node,
                    'eigenvector',
                    c.eigCorrelationThresholdOption,
                    c.eigCorrelationThreshold
                )
        # Otherwise run the CPAC python workflow
        else:
            # If we're calculating degree centrality
            if True in c.degWeightOptions:
                connect_centrality_workflow(
                    workflow, c, strat, num_strat,
                    resample_functional_to_template, c.templateSpecificationFile, merge_node,
                    'degree',
                    c.degCorrelationThresholdOption,
                    c.degCorrelationThreshold,
                    c.degWeightOptions,
                    'deg_list'
                )
            # If we're calculating eigenvector centrality
            if True in c.eigWeightOptions:
                connect_centrality_workflow(
                    workflow, c, strat, num_strat,
                    resample_functional_to_template, c.templateSpecificationFile, merge_node,
                    'eigenvector',
                    c.eigCorrelationThresholdOption,
                    c.eigCorrelationThreshold,
                    c.eigWeightOptions,
                    'eig_list'
                )
        # LFCD check
        if afni_lfcd_found:
            # If we're calculating lFCD
            if True in c.lfcdWeightOptions:
                connect_afni_centrality_workflow(
                    workflow, c, strat, num_strat,
                    resample_functional_to_template, c.templateSpecificationFile, merge_node,
                    'lfcd',
                    c.lfcdCorrelationThresholdOption,
                    c.lfcdCorrelationThreshold
                )
        # Otherwise run the CPAC python workflow
        else:
            # If we're calculating lFCD
            if True in c.lfcdWeightOptions:
                connect_centrality_workflow(
                    workflow, c, strat, num_strat,
                    resample_functional_to_template, c.templateSpecificationFile, merge_node,
                    'lfcd',
                    c.lfcdCorrelationThresholdOption,
                    c.lfcdCorrelationThreshold,
                    c.lfcdWeightOptions,
                    'lfcd_list'
                )

        # Update resource pool with centrality outputs
        strat.update_resource_pool({
            'centrality': (merge_node, 'merged_list')
        })

        if 0 in c.runNetworkCentrality:
            strat = strat.fork()
            strategies += [strat]

    return strategies