def get_image(matrix, template_type):
     
     centrality_image = map_centrality_matrix(matrix, 
                                              affine, 
                                              template_data,
                                              template_type)
     out_list.append(centrality_image) 
 def get_image(matrix, template_type):
     
     centrality_image = map_centrality_matrix(matrix, 
                                              affine, 
                                              template_data,
                                              template_type)
     out_list.append(centrality_image) 
Exemple #3
0
def calc_centrality(datafile,
                    template,
                    method_option,
                    threshold_option,
                    threshold,
                    weight_options,
                    allocated_memory):
    '''
    Method to calculate centrality and map them to a nifti file
    
    Parameters
    ----------
    datafile : string (nifti file)
        path to subject data file
    template : string (nifti file)
        path to mask/parcellation unit
    method_option : integer
        0 - degree centrality calculation, 1 - eigenvector centrality calculation, 2 - lFCD calculation
    threshold_option : an integer
        0 for probability p_value, 1 for sparsity threshold, 
        2 for actual threshold value, and 3 for no threshold and fast approach
    threshold : a float
        pvalue/sparsity_threshold/threshold value
    weight_options : list (boolean)
        list of booleans, where, weight_options[0] corresponds to binary counting 
        and weight_options[1] corresponds to weighted counting (e.g. [True,False]) 
    allocated_memory : string
        amount of memory allocated to degree centrality
    
    Returns
    -------
    out_list : list
        list containing out mapped centrality images
    '''
    
    # Import packages
    from CPAC.network_centrality import load,\
                                        get_centrality_by_rvalue,\
                                        get_centrality_by_sparsity,\
                                        get_centrality_fast,\
                                        map_centrality_matrix,\
                                        calc_blocksize,\
                                        convert_pvalue_to_r
    from CPAC.cwas.subdist import norm_cols
    
    # Check for input errors
    if weight_options.count(True) == 0:
        raise Exception("Invalid values in weight options" \
                        "At least one True value is required")
    # If it's sparsity thresholding, check for (0,1]
    if threshold_option == 1:
        if threshold <= 0 or threshold > 1:
            raise Exception('Threshold value must be a positive number'\
                            'greater than 0 and less than or equal to 1.'\
                            '\nCurrently it is set at %d' % threshold)
    if method_option == 2 and threshold_option != 2:
        raise Exception('lFCD must use correlation-type thresholding.'\
                         'Check the pipline configuration has this setting')
    import time
    start = time.clock()
    
    # Init variables
    out_list = []
    ts, aff, mask, t_type, scans = load(datafile, template)
    
    # If we're doing eigenvectory centrality, need entire correlation matrix
    if method_option == 0 and threshold_option == 1:
        block_size = calc_blocksize(ts, memory_allocated=allocated_memory,
                                    sparsity_thresh=threshold)
    elif method_option == 1:
        block_size = calc_blocksize(ts, memory_allocated=allocated_memory,
                                    include_full_matrix=True)
    # Otherwise, compute blocksize with regards to available memory
    else:
        block_size = calc_blocksize(ts, memory_allocated=allocated_memory,
                                    include_full_matrix=False)
    # Normalize the timeseries for easy dot-product correlation calc.
    ts_normd = norm_cols(ts.T)
    
    # P-value threshold centrality
    if threshold_option == 0:
        r_value = convert_pvalue_to_r(scans, threshold)
        centrality_matrix = get_centrality_by_rvalue(ts_normd, 
                                                     mask, 
                                                     method_option, 
                                                     weight_options, 
                                                     r_value, 
                                                     block_size)
    # Sparsity threshold
    elif threshold_option == 1:
        centrality_matrix = get_centrality_by_sparsity(ts_normd, 
                                                       method_option, 
                                                       weight_options, 
                                                       threshold, 
                                                       block_size)
    # R-value threshold centrality
    elif threshold_option == 2:
        centrality_matrix = get_centrality_by_rvalue(ts_normd, 
                                                     mask, 
                                                     method_option, 
                                                     weight_options, 
                                                     threshold, 
                                                     block_size)
    # For fast approach (no thresholding)
    elif threshold_option == 3:
        centrality_matrix = get_centrality_fast(ts, method_option)
    # Otherwise, incorrect input for threshold_option
    else:
        raise Exception('Option must be between 0-3 and not %s, check your '\
                        'pipeline config file' % str(threshold_option))
    
    # Print timing info
    print 'Timing:', time.clock() - start
 
    # Map the arrays back to images
    for mat in centrality_matrix:
        centrality_image = map_centrality_matrix(mat, aff, mask, t_type)
        out_list.append(centrality_image)
    
    # Finally return
    return out_list
def calc_centrality(in_file, template, method_option, threshold_option,
                    threshold, allocated_memory):
    '''
    Function to calculate centrality and map them to a nifti file
    
    Parameters
    ----------
    in_file : string (nifti file)
        path to subject data file
    template : string (nifti file)
        path to mask/parcellation unit
    method_option : string
        accepted values are 'degree centrality', 'eigenvector centrality', and
        'lfcd'
    threshold_option : string
        accepted values are: 'significance', 'sparsity', and 'correlation'
    threshold : float
        pvalue/sparsity_threshold/threshold value
    allocated_memory : string
        amount of memory allocated to degree centrality
    
    Returns
    -------
    out_list : list
        list containing out mapped centrality images
    '''

    # Import packages
    from CPAC.network_centrality import load,\
                                        get_centrality_by_rvalue,\
                                        get_centrality_by_sparsity,\
                                        get_centrality_fast,\
                                        map_centrality_matrix,\
                                        calc_blocksize,\
                                        convert_pvalue_to_r
    from CPAC.network_centrality.utils import check_centrality_params
    from CPAC.cwas.subdist import norm_cols

    # First check input parameters and get proper formatted method/thr options
    method_option, threshold_option = \
        check_centrality_params(method_option, threshold_option, threshold)

    # Init variables
    out_list = []
    ts, aff, mask, t_type, scans = load(in_file, template)

    # If we're doing degree sparsity
    if method_option == 'degree' and threshold_option == 'sparsity':
        block_size = calc_blocksize(ts, memory_allocated=allocated_memory,
                                    sparsity_thresh=threshold)
    # Otherwise
    elif method_option == 'eigenvector':
        block_size = calc_blocksize(ts, memory_allocated=allocated_memory,
                                    include_full_matrix=True)
    # Otherwise, compute blocksize with regards to available memory
    else:
        block_size = calc_blocksize(ts, memory_allocated=allocated_memory,
                                    include_full_matrix=False)
    # Normalize the timeseries for easy dot-product correlation calc.
    ts_normd = norm_cols(ts.T)

    # P-value threshold centrality
    if threshold_option == 'significance':
        r_value = convert_pvalue_to_r(in_file, threshold, two_tailed=False)
        centrality_matrix = get_centrality_by_rvalue(ts_normd,
                                                     mask,
                                                     method_option,
                                                     r_value,
                                                     block_size)
    # Sparsity threshold
    elif threshold_option == 'sparsity':
        centrality_matrix = get_centrality_by_sparsity(ts_normd,
                                                       method_option,
                                                       threshold,
                                                       block_size)
    # R-value threshold centrality
    elif threshold_option == 'correlation':
        centrality_matrix = get_centrality_by_rvalue(ts_normd,
                                                     mask,
                                                     method_option,
                                                     threshold,
                                                     block_size)
    # For fast approach (no thresholding)
    elif threshold_option == 3:
        centrality_matrix = get_centrality_fast(ts, method_option)
    # Otherwise, incorrect input for threshold_option
    else:
        err_msg = 'Threshold option: %s not supported for network centrality '\
                  'measure: %s; fix this in the pipeline config'\
                  % (str(threshold_option), str(method_option))
        raise Exception(err_msg)
 
    # Map the arrays back to images
    for mat in centrality_matrix:
        centrality_image = map_centrality_matrix(mat, aff, mask, t_type)
        out_list.append(centrality_image)

    # Finally return
    return out_list