def threshold_rmatrix(corr_matrix, option, threshold, scans): """ Method to threshold the correaltion matrix based on any of the two threshold options- sparsity, probability or by simply providing correlation threshold. it is two step process, first calculate the threshold and then apply the threshold to the correlation matrix. Parameters ---------- corr_matrix : string (numpy npy file) patht o file containing correlation matrix option : string (int) list of threshold option: 0 for pvalue, 1 for sparsity, any other for simply correlation threshold threshold : string (float) pvalue/sparsity_threshold/correaltion_threshold scans : string (int) Total number of scans in input data Returns ------- threshold_file : string (numpy npy file) file containing threshold correlation matrix Raises ------ Exception """ import numpy as np import os from CPAC.network_centrality import load_mat,\ convert_pvalue_to_r,\ convert_sparsity_to_r try: r_matrix = load_mat(corr_matrix) print "threshold_option -->", option try: if option == 0: r_value = convert_pvalue_to_r(scans, threshold) if option == 1: r_value = convert_sparsity_to_r(r_matrix, threshold) else: r_value = threshold except: print "Exception in calculating thresold value" raise print "correlation threshold value -> ", r_value print "thresholding the correlation matrix...." threshold_matrix = r_matrix > r_value threshold_file = os.path.join(os.getcwd(), 'threshold_matrix.npy') np.save(threshold_file, threshold_matrix.astype(np.float)) except Exception: print "Exception while thresholding correlation matrix" raise return threshold_file
def get_centrality_by_sparsity(timeseries, method_option, weight_options, threshold, memory_allocated): """ Method to calculate degree and eigen vector centrality Parameters ---------- timeseries : numpy array timeseries of the input subject method_options : string (list of boolean) list of two booleans for degree and eigen options respectively weight_options : string (list of boolean) list of two booleans for binarize and weighted options respectively threshold : float sparsity threshold for the correlation values memory_allocated : a string amount of memory allocated to degree centrality Returns ------- out_list : string (list of tuples) list of tuple containing output name to be used to store nifti image for centrality and centrality matrix Raises ------ Exception """ import os import numpy as np from CPAC.network_centrality import calc_blocksize,\ convert_sparsity_to_r,\ degree_centrality,\ eigenvector_centrality from CPAC.cwas.subdist import norm_cols out_list=[] try: # Calculate the block size (i.e., number of voxels) to compute part of the # connectivity matrix at once. # # We still use a block size to calculate the whole correlation matrix # because of issues in numpy that lead to extra memory usage when # computing the dot product. # See https://cmi.hackpad.com/Numpy-Memory-Issues-BlV9Pg5nRDM. block_size = calc_blocksize(timeseries, memory_allocated, include_full_matrix=True) nvoxs = timeseries.shape[0] ntpts = timeseries.shape[1] calc_degree = False # init degree measure flag to false calc_eigen = False # init eigen measure flag to false calc_lfcd= False # init lFCD measure flag to false # Select which method we're going to perform if method_option == 0: calc_degree = True elif method_option == 1: calc_eigen = True elif method_option == 2: calc_lfcd = True # Set weighting parameters out_binarize = weight_options[0] out_weighted = weight_options[1] corr_matrix = np.zeros((nvoxs, nvoxs), dtype = timeseries.dtype) print "Normalize TimeSeries" timeseries = norm_cols(timeseries.T) print "Computing centrality across %i voxels" % nvoxs j = 0 i = block_size while i <= timeseries.shape[1]: print "running block ->", i,j print "...correlating" np.dot(timeseries[:,j:i].T, timeseries, out=corr_matrix[j:i]) j = i if i == nvoxs: break elif (i+block_size) > nvoxs: i = nvoxs else: i += block_size print "Calculating threshold" r_value = convert_sparsity_to_r(corr_matrix, threshold, full_matrix = True) print "r_value ->", r_value if calc_degree: if out_binarize: print "...calculating binarize degree" degree_binarize = degree_centrality(corr_matrix, r_value, method="binarize") out_list.append(('degree_centrality_binarize', degree_binarize)) if out_weighted: print "...calculating weighted degree" degree_weighted = degree_centrality(corr_matrix, r_value, method="weighted") out_list.append(('degree_centrality_weighted', degree_weighted)) if calc_eigen: if out_binarize: print "...calculating binarize eigenvector" eigen_binarize = eigenvector_centrality(corr_matrix, r_value, method="binarize") out_list.append(('eigenvector_centrality_binarize', eigen_binarize)) if out_weighted: print "...calculating weighted eigenvector" eigen_weighted = eigenvector_centrality(corr_matrix, r_value, method="weighted") out_list.append(('eigenvector_centrality_weighted', eigen_weighted)) except Exception: print "Error while calculating centrality" raise return out_list