示例#1
0
def connect_afni_centrality_workflow(workflow, c, strat, num_strat,

                                     resample_functional_to_template,
                                     template,
                                     merge_node,

                                     method_option, threshold_option,
                                     threshold):

    # Import packages
    from CPAC.network_centrality.afni_network_centrality \
        import create_afni_centrality_wf
    import CPAC.network_centrality.utils as cent_utils

    # Init variables
    # Set method_options variables
    if method_option == 'degree':
        out_list = 'deg_list'
    elif method_option == 'eigenvector':
        out_list = 'eig_list'
    elif method_option == 'lfcd':
        out_list = 'lfcd_list'

    # Init workflow name and resource limits
    wf_name = 'afni_centrality_%d_%s' % (num_strat, method_option)
    num_threads = c.maxCoresPerParticipant
    memory = c.memoryAllocatedForDegreeCentrality

    # Format method and threshold options properly and check for
    # errors
    method_option, threshold_option = \
        cent_utils.check_centrality_params(method_option,
                                           threshold_option,
                                           threshold)

    # Change sparsity thresholding to % to work with afni
    if threshold_option == 'sparsity':
        threshold = threshold * 100

    # Init the workflow
    afni_centrality_wf = \
        create_afni_centrality_wf(wf_name, method_option,
                                  threshold_option,
                                  threshold, num_threads, memory)

    # Connect pipeline resources to workflow
    workflow.connect(resample_functional_to_template, 'out_file',
                     afni_centrality_wf, 'inputspec.in_file')

    # Mask
    workflow.connect(template, 'local_path',
                     afni_centrality_wf, 'inputspec.template')

    # Connect outputs to merge node
    workflow.connect(afni_centrality_wf,
                     'outputspec.outfile_list',
                     merge_node,
                     out_list)
示例#2
0
文件: pipeline.py 项目: gkiar/C-PAC
def connect_centrality_workflow(workflow, c, resample_functional_to_template,
                                template_node, template_out, merge_node,
                                method_option, pipe_num):
    template = c.network_centrality['template_specification_file']

    # Set method_options variables
    if method_option == 'degree_centrality':
        out_list = 'deg_list'
    elif method_option == 'eigenvector_centrality':
        out_list = 'eig_list'
    elif method_option == 'local_functional_connectivity_density':
        out_list = 'lfcd_list'

    threshold_option = c.network_centrality[method_option][
        'correlation_threshold_option']
    threshold = c.network_centrality[method_option]['correlation_threshold']

    # Init workflow name and resource limits
    wf_name = f'afni_centrality_{method_option}_{pipe_num}'
    num_threads = c.pipeline_setup['system_config'][
        'max_cores_per_participant']
    memory = c.network_centrality['memory_allocation']

    # Format method and threshold options properly and check for
    # errors
    method_option, threshold_option = check_centrality_params(
        method_option, threshold_option, threshold)

    # Change sparsity thresholding to % to work with afni
    if threshold_option == 'sparsity':
        threshold = threshold * 100

    afni_centrality_wf = \
        create_centrality_wf(wf_name, method_option,
                             c.network_centrality[method_option][
                                 'weight_options'], threshold_option,
                             threshold, num_threads, memory)

    workflow.connect(resample_functional_to_template, 'out_file',
                     afni_centrality_wf, 'inputspec.in_file')

    workflow.connect(template_node, template_out, afni_centrality_wf,
                     'inputspec.template')

    workflow.connect(afni_centrality_wf, 'outputspec.outfile_list', merge_node,
                     out_list)
def create_centrality_wf(wf_name, method_option, threshold_option,
                         threshold, num_threads=1, memory_gb=1.0):
    """
    Function to create the afni-based centrality workflow

    Parameters
    ----------
    wf_name : string
        the name of the workflow
    method_option : string
        'degree', 'eigenvector', or 'lfcd'
    threshold_option : string
        'significance', 'sparsity', or 'correlation'
    threshold : float
        the threshold value for thresholding the similarity matrix
    num_threads : integer (optional); default=1
        the number of threads to utilize for centrality computation
    memory_gb : float (optional); default=1.0
        the amount of memory the centrality calculation will take (GB)

    Returns
    -------
    centrality_wf : nipype Workflow
        the initialized nipype workflow for the afni centrality command
    """

    import nipype.pipeline.engine as pe
    import nipype.interfaces.utility as util
    import CPAC.network_centrality.utils as utils

    test_thresh = threshold

    if threshold_option == 'sparsity':
        test_thresh = threshold / 100.0

    method_option, threshold_option = \
        utils.check_centrality_params(method_option, threshold_option, test_thresh)

    centrality_wf = pe.Workflow(name=wf_name)

    input_node = pe.Node(util.IdentityInterface(fields=['in_file',
                                                        'template',
                                                        'threshold']),
                         name='inputspec')

    input_node.inputs.threshold = threshold

    # Degree centrality
    if method_option == 'degree':
        afni_centrality_node = \
            pe.Node(DegreeCentrality(environ={'OMP_NUM_THREADS' : str(num_threads)}),
                    name='afni_centrality', mem_gb=memory_gb)
        afni_centrality_node.inputs.out_file = 'degree_centrality_merged.nii.gz'
        out_names = ('degree_centrality_binarize', 'degree_centrality_weighted')

    # Eigenvector centrality
    elif method_option == 'eigenvector':
        afni_centrality_node = \
        pe.Node(ECM(environ={'OMP_NUM_THREADS': str(num_threads)}),
                name='afni_centrality', mem_gb=memory_gb)
        afni_centrality_node.inputs.out_file = 'eigenvector_centrality_merged.nii.gz'
        afni_centrality_node.inputs.memory = memory_gb # 3dECM input only
        out_names = ('eigenvector_centrality_binarize',
                     'eigenvector_centrality_weighted')

    # lFCD
    elif method_option == 'lfcd':
        afni_centrality_node = \
            pe.Node(LFCD(environ={'OMP_NUM_THREADS': str(num_threads)}),
                    name='afni_centrality', mem_gb=memory_gb)
        afni_centrality_node.inputs.out_file = 'lfcd_merged.nii.gz'
        out_names = ('lfcd_binarize', 'lfcd_weighted')

    afni_centrality_node.interface.num_threads = num_threads

    # Connect input image and mask tempalte
    centrality_wf.connect(input_node, 'in_file',
                          afni_centrality_node, 'in_file')
    centrality_wf.connect(input_node, 'template',
                          afni_centrality_node, 'mask')

    # If we're doing significan thresholding, convert to correlation
    if threshold_option == 'significance':
        # Check and (possibly) conver threshold
        convert_thr_node = pe.Node(util.Function(input_names=['datafile',
                                                              'p_value',
                                                              'two_tailed'],
                                                 output_names=['rvalue_threshold'],
                                                 function=utils.convert_pvalue_to_r),
                                   name='convert_threshold')
        # Wire workflow to connect in conversion node
        centrality_wf.connect(input_node, 'in_file',
                              convert_thr_node, 'datafile')
        centrality_wf.connect(input_node, 'threshold',
                              convert_thr_node, 'p_value')
        centrality_wf.connect(convert_thr_node, 'rvalue_threshold',
                              afni_centrality_node, 'thresh')

    # Sparsity thresholding
    elif threshold_option == 'sparsity':
        # Check to make sure it's not lFCD
        if method_option == 'lfcd':
            raise Exception('Sparsity thresholding is not supported for lFCD')

        # Otherwise, connect threshold to sparsity input
        centrality_wf.connect(input_node, 'threshold',
                              afni_centrality_node, 'sparsity')

    # Correlation thresholding
    elif threshold_option == 'correlation':
        centrality_wf.connect(input_node, 'threshold',
                              afni_centrality_node, 'thresh')

    # Need to seprate sub-briks
    sep_subbriks_node = \
        pe.Node(util.Function(input_names=['nifti_file', 'out_names'],
                              output_names=['output_niftis'],
                              function=utils.sep_nifti_subbriks),
                name='sep_nifti_subbriks')

    sep_subbriks_node.inputs.out_names = out_names

    centrality_wf.connect(afni_centrality_node, 'out_file',
                          sep_subbriks_node, 'nifti_file')

    output_node = pe.Node(util.IdentityInterface(fields=['outfile_list',
                                                         'oned_output']),
                          name='outputspec')

    centrality_wf.connect(sep_subbriks_node, 'output_niftis',
                          output_node, 'outfile_list')

    return centrality_wf
示例#4
0
def create_afni_centrality_wf(wf_name, method_option, threshold_option, threshold, num_threads=1, memory_gb=1.0):
    """
    Function to create the afni-based centrality workflow

    Parameters
    ----------
    wf_name : string
        the name of the workflow
    method_option : string
        'degree', 'eigenvector', or 'lfcd'
    threshold_option : string
        'significance', 'sparsity', or 'correlation'
    threshold : float
        the threshold value for thresholding the similarity matrix
    num_threads : integer (optional); default=1
        the number of threads to utilize for centrality computation
    memory_gb : float (optional); default=1.0
        the amount of memory the centrality calculation will take (GB)

    Returns
    -------
    centrality_wf : nipype Workflow
        the initialized nipype workflow for the afni centrality command
    """

    # Import packages
    import nipype.pipeline.engine as pe
    import nipype.interfaces.utility as util
    import CPAC.network_centrality.utils as utils

    # Check the centrality parameters
    test_thresh = threshold
    if threshold_option == "sparsity":
        test_thresh = threshold / 100.0
    method_option, threshold_option = utils.check_centrality_params(method_option, threshold_option, test_thresh)

    # Init variables
    centrality_wf = pe.Workflow(name=wf_name)

    # Create inputspec node
    input_node = pe.Node(util.IdentityInterface(fields=["in_file", "template", "threshold"]), name="inputspec")

    # Input threshold
    input_node.inputs.threshold = threshold

    # Define main input/function node
    # Degree centrality
    if method_option == "degree":
        afni_centrality_node = pe.Node(
            DegreeCentrality(environ={"OMP_NUM_THREADS": str(num_threads)}), name="afni_centrality"
        )
        afni_centrality_node.inputs.out_file = "degree_centrality_merged.nii.gz"
        out_names = ("degree_centrality_binarize", "degree_centrality_weighted")
    # Eigenvector centrality
    elif method_option == "eigenvector":
        afni_centrality_node = pe.Node(ECM(environ={"OMP_NUM_THREADS": str(num_threads)}), name="afni_centrality")
        afni_centrality_node.inputs.out_file = "eigenvector_centrality_merged.nii.gz"
        afni_centrality_node.inputs.memory = memory_gb  # 3dECM input only
        out_names = ("eigenvector_centrality_binarize", "eigenvector_centrality_weighted")
    # lFCD
    elif method_option == "lfcd":
        afni_centrality_node = pe.Node(LFCD(environ={"OMP_NUM_THREADS": str(num_threads)}), name="afni_centrality")
        afni_centrality_node.inputs.out_file = "lfcd_merged.nii.gz"
        out_names = ("lfcd_binarize", "lfcd_weighted")

    # Limit its num_threads and memory via MultiProc plugin
    afni_centrality_node.interface.num_threads = num_threads
    afni_centrality_node.interface.estimated_memory_gb = memory_gb

    # Connect input image and mask tempalte
    centrality_wf.connect(input_node, "in_file", afni_centrality_node, "in_file")
    centrality_wf.connect(input_node, "template", afni_centrality_node, "mask")

    # If we're doing significan thresholding, convert to correlation
    if threshold_option == "significance":
        # Check and (possibly) conver threshold
        convert_thr_node = pe.Node(
            util.Function(
                input_names=["datafile", "p_value", "two_tailed"],
                output_names=["rvalue_threshold"],
                function=utils.convert_pvalue_to_r,
            ),
            name="convert_threshold",
        )
        # Wire workflow to connect in conversion node
        centrality_wf.connect(input_node, "in_file", convert_thr_node, "datafile")
        centrality_wf.connect(input_node, "threshold", convert_thr_node, "p_value")
        centrality_wf.connect(convert_thr_node, "rvalue_threshold", afni_centrality_node, "thresh")
    # Sparsity thresholding
    elif threshold_option == "sparsity":
        # Check to make sure it's not lFCD
        if method_option == "lfcd":
            err_msg = "Sparsity thresholding is not supported for lFCD"
            raise Exception(err_msg)
        # Otherwise, connect threshold to sparsity input
        centrality_wf.connect(input_node, "threshold", afni_centrality_node, "sparsity")
    # Correlation thresholding
    elif threshold_option == "correlation":
        centrality_wf.connect(input_node, "threshold", afni_centrality_node, "thresh")

    # Need to seprate sub-briks
    sep_subbriks_node = pe.Node(
        util.Function(
            input_names=["nifti_file", "out_names"], output_names=["output_niftis"], function=utils.sep_nifti_subbriks
        ),
        name="sep_nifti_subbriks",
    )
    sep_subbriks_node.inputs.out_names = out_names

    # Connect the degree centrality output image to seperate subbriks node
    centrality_wf.connect(afni_centrality_node, "out_file", sep_subbriks_node, "nifti_file")

    # Define outputs node
    output_node = pe.Node(util.IdentityInterface(fields=["outfile_list", "oned_output"]), name="outputspec")

    centrality_wf.connect(sep_subbriks_node, "output_niftis", output_node, "outfile_list")

    # Return the centrality workflow
    return centrality_wf
示例#5
0
def create_afni_centrality_wf(wf_name, method_option, threshold_option,
                              threshold, num_threads=1, memory_gb=1.0):
    '''
    Function to create the afni-based centrality workflow

    Parameters
    ----------
    wf_name : string
        the name of the workflow
    method_option : string
        'degree', 'eigenvector', or 'lfcd'
    threshold_option : string
        'significance', 'sparsity', or 'correlation'
    threshold : float
        the threshold value for thresholding the similarity matrix
    num_threads : integer (optional); default=1
        the number of threads to utilize for centrality computation
    memory_gb : float (optional); default=1.0
        the amount of memory the centrality calculation will take (GB)

    Returns
    -------
    centrality_wf : nipype Workflow
        the initialized nipype workflow for the afni centrality command
    '''

    # Import packages
    import nipype.pipeline.engine as pe
    import nipype.interfaces.utility as util
    import CPAC.network_centrality.utils as utils

    # Check the centrality parameters
    test_thresh = threshold
    if threshold_option == 'sparsity':
        test_thresh = threshold/100.0
    method_option, threshold_option = \
        utils.check_centrality_params(method_option, threshold_option, test_thresh)

    # Init variables
    centrality_wf = pe.Workflow(name=wf_name)

    # Create inputspec node
    input_node = pe.Node(util.IdentityInterface(fields=['in_file',
                                                        'template',
                                                        'threshold']),
                         name='inputspec')

    # Input threshold
    input_node.inputs.threshold = threshold

    # Define main input/function node
    # Degree centrality
    if method_option == 'degree':
        afni_centrality_node = \
            pe.Node(DegreeCentrality(environ={'OMP_NUM_THREADS' : str(num_threads)}),
                    name='afni_centrality', mem_gb=memory_gb)
        afni_centrality_node.inputs.out_file = 'degree_centrality_merged.nii.gz'
        out_names = ('degree_centrality_binarize', 'degree_centrality_weighted')
    # Eigenvector centrality
    elif method_option == 'eigenvector':
        afni_centrality_node = \
        pe.Node(ECM(environ={'OMP_NUM_THREADS' : str(num_threads)}),
                name='afni_centrality', mem_gb=memory_gb)
        afni_centrality_node.inputs.out_file = 'eigenvector_centrality_merged.nii.gz'
        afni_centrality_node.inputs.memory = memory_gb # 3dECM input only
        out_names = ('eigenvector_centrality_binarize',
                     'eigenvector_centrality_weighted')
    # lFCD
    elif method_option == 'lfcd':
        afni_centrality_node = \
            pe.Node(LFCD(environ={'OMP_NUM_THREADS' : str(num_threads)}),
                    name='afni_centrality', mem_gb=memory_gb)
        afni_centrality_node.inputs.out_file = 'lfcd_merged.nii.gz'
        out_names = ('lfcd_binarize', 'lfcd_weighted')

    # Limit its num_threads and memory via MultiProc plugin
    afni_centrality_node.interface.num_threads = num_threads

    # Connect input image and mask tempalte
    centrality_wf.connect(input_node, 'in_file',
                          afni_centrality_node, 'in_file')
    centrality_wf.connect(input_node, 'template',
                          afni_centrality_node, 'mask')

    # If we're doing significan thresholding, convert to correlation
    if threshold_option == 'significance':
        # Check and (possibly) conver threshold
        convert_thr_node = pe.Node(util.Function(input_names=['datafile',
                                                              'p_value',
                                                              'two_tailed'],
                                                 output_names=['rvalue_threshold'],
                                                 function=utils.convert_pvalue_to_r),
                                   name='convert_threshold')
        # Wire workflow to connect in conversion node
        centrality_wf.connect(input_node, 'in_file',
                              convert_thr_node, 'datafile')
        centrality_wf.connect(input_node, 'threshold',
                              convert_thr_node, 'p_value')
        centrality_wf.connect(convert_thr_node, 'rvalue_threshold',
                              afni_centrality_node, 'thresh')
    # Sparsity thresholding
    elif threshold_option == 'sparsity':
        # Check to make sure it's not lFCD
        if method_option == 'lfcd':
            err_msg = 'Sparsity thresholding is not supported for lFCD'
            raise Exception(err_msg)
        # Otherwise, connect threshold to sparsity input
        centrality_wf.connect(input_node, 'threshold',
                              afni_centrality_node, 'sparsity')
    # Correlation thresholding
    elif threshold_option == 'correlation':
        centrality_wf.connect(input_node, 'threshold',
                              afni_centrality_node, 'thresh')

    # Need to seprate sub-briks
    sep_subbriks_node = \
        pe.Node(util.Function(input_names=['nifti_file', 'out_names'],
                              output_names=['output_niftis'],
                              function=utils.sep_nifti_subbriks),
                name='sep_nifti_subbriks')
    sep_subbriks_node.inputs.out_names = out_names

    # Connect the degree centrality output image to seperate subbriks node
    centrality_wf.connect(afni_centrality_node, 'out_file',
                          sep_subbriks_node, 'nifti_file')

    # Define outputs node
    output_node = pe.Node(util.IdentityInterface(fields=['outfile_list',
                                                         'oned_output']),
                          name='outputspec')

    centrality_wf.connect(sep_subbriks_node, 'output_niftis',
                          output_node, 'outfile_list')

    # Return the centrality workflow
    return centrality_wf
示例#6
0
def calc_centrality(in_file, template, method_option, threshold_option,
                    threshold, allocated_memory):
    '''
    Function to calculate centrality and map them to a nifti file
    
    Parameters
    ----------
    in_file : string (nifti file)
        path to subject data file
    template : string (nifti file)
        path to mask/parcellation unit
    method_option : string
        accepted values are 'degree centrality', 'eigenvector centrality', and
        'lfcd'
    threshold_option : string
        accepted values are: 'significance', 'sparsity', and 'correlation'
    threshold : float
        pvalue/sparsity_threshold/threshold value
    allocated_memory : string
        amount of memory allocated to degree centrality
    
    Returns
    -------
    out_list : list
        list containing out mapped centrality images
    '''

    # Import packages
    from CPAC.network_centrality import load,\
                                        get_centrality_by_rvalue,\
                                        get_centrality_by_sparsity,\
                                        get_centrality_fast,\
                                        map_centrality_matrix,\
                                        calc_blocksize,\
                                        convert_pvalue_to_r
    from CPAC.network_centrality.utils import check_centrality_params
    from CPAC.cwas.subdist import norm_cols

    # First check input parameters and get proper formatted method/thr options
    method_option, threshold_option = \
        check_centrality_params(method_option, threshold_option, threshold)

    # Init variables
    out_list = []
    ts, aff, mask, t_type, scans = load(in_file, template)

    # If we're doing degree sparsity
    if method_option == 'degree' and threshold_option == 'sparsity':
        block_size = calc_blocksize(ts, memory_allocated=allocated_memory,
                                    sparsity_thresh=threshold)
    # Otherwise
    elif method_option == 'eigenvector':
        block_size = calc_blocksize(ts, memory_allocated=allocated_memory,
                                    include_full_matrix=True)
    # Otherwise, compute blocksize with regards to available memory
    else:
        block_size = calc_blocksize(ts, memory_allocated=allocated_memory,
                                    include_full_matrix=False)
    # Normalize the timeseries for easy dot-product correlation calc.
    ts_normd = norm_cols(ts.T)

    # P-value threshold centrality
    if threshold_option == 'significance':
        r_value = convert_pvalue_to_r(in_file, threshold, two_tailed=False)
        centrality_matrix = get_centrality_by_rvalue(ts_normd,
                                                     mask,
                                                     method_option,
                                                     r_value,
                                                     block_size)
    # Sparsity threshold
    elif threshold_option == 'sparsity':
        centrality_matrix = get_centrality_by_sparsity(ts_normd,
                                                       method_option,
                                                       threshold,
                                                       block_size)
    # R-value threshold centrality
    elif threshold_option == 'correlation':
        centrality_matrix = get_centrality_by_rvalue(ts_normd,
                                                     mask,
                                                     method_option,
                                                     threshold,
                                                     block_size)
    # For fast approach (no thresholding)
    elif threshold_option == 3:
        centrality_matrix = get_centrality_fast(ts, method_option)
    # Otherwise, incorrect input for threshold_option
    else:
        err_msg = 'Threshold option: %s not supported for network centrality '\
                  'measure: %s; fix this in the pipeline config'\
                  % (str(threshold_option), str(method_option))
        raise Exception(err_msg)
 
    # Map the arrays back to images
    for mat in centrality_matrix:
        centrality_image = map_centrality_matrix(mat, aff, mask, t_type)
        out_list.append(centrality_image)

    # Finally return
    return out_list