def avg_correlation(ts_files, thr=None): import numpy as np import numexpr as ne import hcp_corr # make empty avg corr matrix get_size = np.load(ts_files[0]).shape[0] full_shape = (get_size, get_size) if np.mod((get_size**2-get_size),2)==0.0: avg_corr = np.zeros((get_size**2-get_size)/2) else: print 'size calculation no zero mod' count = 0 for ts in ts_files: # load time series print '...load %s'%ts rest = np.load(ts) # calculate correlations matrix print '...corrcoef' corr = hcp_corr.corrcoef_upper(rest) # corr = np.corrcocoef(rest) del rest # get upper triangular only # corr = corr[np.triu_indices_from(corr, k=1)] # threshold / transform if thr == None: # r-to-z transform and add to avg print '...transform' avg_corr += ne.evaluate('arctanh(corr)') else: # threshold and add to avg print '...threshold' thr = np.percentile(corr, 100-thr) avg_corr[np.where(corr>thr)] += 1 del corr count += 1 # divide by number of sessions included print '...divide' avg_corr /= count # transform back if necessary if thr == None: print '...back transform' avg_corr = np.nan_to_num(ne.evaluate('tanh(avg_corr)')) return avg_corr, full_shape
def avg_correlation(ts_files, thr=None): ''' Calculates average connectivity matrix using hcp_corr package for memory optimization: https://github.com/NeuroanatomyAndConnectivity/hcp_corr ''' # make empty avg corr matrix if type(ts_files[0]) == str: get_size = np.load(ts_files[0]).shape[0] elif type(ts_files[0]) == np.ndarray: get_size = ts_files[0].shape[0] full_shape = (get_size, get_size) if np.mod((get_size**2 - get_size), 2) == 0.0: avg_corr = np.zeros((get_size**2 - get_size) / 2) else: print 'size calculation no zero mod' count = 0 for rest in ts_files: # load time series if type(rest) == str: rest = np.load(rest) elif type(rest) == np.ndarray: pass # calculate correlations matrix print '...corrcoef' corr = hcp_corr.corrcoef_upper(rest) del rest # threshold / transform if thr == None: # r-to-z transform and add to avg print '...transform' avg_corr += ne.evaluate('arctanh(corr)') else: # threshold and add to avg print '...threshold' thr = np.percentile(corr, 100 - thr) avg_corr[np.where(corr > thr)] += 1 del corr count += 1 # divide by number of sessions included print '...divide' avg_corr /= count # transform back if necessary if thr == None: print '...back transform' avg_corr = np.nan_to_num(ne.evaluate('tanh(avg_corr)')) return avg_corr, full_shape
def avg_correlation(ts_files, thr=None): ''' Calculates average connectivity matrix using hcp_corr package for memory optimization: https://github.com/NeuroanatomyAndConnectivity/hcp_corr ''' # make empty avg corr matrix if type(ts_files[0])==str: get_size = np.load(ts_files[0]).shape[0] elif type(ts_files[0])==np.ndarray: get_size = ts_files[0].shape[0] full_shape = (get_size, get_size) if np.mod((get_size**2-get_size),2)==0.0: avg_corr = np.zeros((get_size**2-get_size)/2) else: print 'size calculation no zero mod' count = 0 for rest in ts_files: # load time series if type(rest)==str: rest = np.load(rest) elif type(rest)==np.ndarray: pass # calculate correlations matrix print '...corrcoef' corr = hcp_corr.corrcoef_upper(rest) del rest # threshold / transform if thr == None: # r-to-z transform and add to avg print '...transform' avg_corr += ne.evaluate('arctanh(corr)') else: # threshold and add to avg print '...threshold' thr = np.percentile(corr, 100-thr) avg_corr[np.where(corr>thr)] += 1 del corr count += 1 # divide by number of sessions included print '...divide' avg_corr /= count # transform back if necessary if thr == None: print '...back transform' avg_corr = np.nan_to_num(ne.evaluate('tanh(avg_corr)')) return avg_corr, full_shape
def avg_correlation(ts_files, thr=None): ''' Calculates average connectivity matrix using hcp_corr package for memory optimization: https://github.com/NeuroanatomyAndConnectivity/hcp_corr ''' # make empty avg corr matrix img0 = np.load(ts_files[0]) get_size = img0.shape[1] del img0 full_shape = (get_size, get_size) if np.mod((get_size**2-get_size), 2) == 0.0: avg_corr = np.zeros(int((get_size**2-get_size)/2)) else: print('size calculation no zero mod') count = 0 for rest in ts_files: # load time series rest = np.load(rest).T # calculate correlations matrix print('...corrcoef') corr = hcp_corr.corrcoef_upper(rest) del rest # threshold / transform print('...transform') avg_corr += ne.evaluate('arctanh(corr)') del corr count += 1 # divide by number of sessions included print('...divide') avg_corr /= count # transform back if necessary print('...back transform') avg_corr = np.nan_to_num(ne.evaluate('tanh(avg_corr)')) return avg_corr, full_shape
# list of all subjects as numpy array subject_list = np.array(args.subject) # e.g. /ptmp/sbayrak/hcp/* N = len(subject_list) for i in range(0, N): subject = subject_list[i] print "do loop %d/%d, %s" % (i+1, N, subject) # load time-series matrix of the subject K = hcp_corr.t_series(subject, hemisphere=args.hem, N_first=args.N_first, N_cnt=args.N_cnt) # get upper-triangular of correlation matrix of time-series as 1D array K = hcp_corr.corrcoef_upper(K) print "corrcoef data upper triangular shape: ", K.shape # # get the full corr matrix # N_orig = hcp_corr.N_original(K) # K.resize([N_orig, N_orig]) # hcp_corr.upper_to_down(K) # print "corrcoef data full matrix shape: ", K.shape # sum over all subjects if i == 0: SUM = K else: SUM = ne.evaluate('SUM + K')
t1=np.load(t1_file) full_shape=tuple((t1.shape[0], t1.shape[0])) if euclid: print 'euclid' t1_3_7_diff = sp.spatial.distance.pdist(t1[:,3:8], 'euclidean') f = h5py.File(euclid_file%('rh', '3_7'), 'w') f.create_dataset('upper', data=t1_3_7_diff) f.create_dataset('shape', data=full_shape) f.close() del t1_3_7_diff if corr: print 'corr' t1_3_7_corr = hcp_corr.corrcoef_upper(t1[:,3:8]) f = h5py.File(corr_file%('rh', '3_7'), 'w') f.create_dataset('upper', data=t1_3_7_corr) f.create_dataset('shape', data=full_shape) f.close() if affinity: print 'chebychev' t1_3_7 = t1[:,3:8] coeff, poly = chebapprox(t1_3_7, degree=4) #coeff = t1.copy() print 'affinity' t1_3_7_affine = dist.compute_affinity(coeff, method=affine_method) t1_3_7_affine = t1_3_7_affine[np.triu_indices_from(t1_3_7_affine, k=1)] f = h5py.File(affinity_file, 'w') f.create_dataset('upper', data=t1_3_7_affine)
matrix = masking.apply_mask(rest, mask) matrix = matrix.T cnt_zeros = 0 for i in range(0, matrix.shape[0]): if np.count_nonzero(matrix[i, :]) == 0: cnt_zeros += 1 return cnt_zeros, matrix #### Step 1, get all connectivity matrices of given subject ######### corr_All = [] for image_rest in glob.glob(data_dir + '/' + subject_id + '*' + '/preprocessed/func/' + 'rest_preprocessed2mni_sm.nii.gz' ): [voxel_zeros, t_series] = mask_check(image_rest, image_mask) corr_upper = corrcoef_upper(t_series) N_orig = N_original(corr_upper) corr_upper.resize([N_orig, N_orig]) corr = upper_to_down(corr_upper) print image_rest, corr.shape corr_All.append(corr) corr_All = np.array(corr_All) corr_All = corr_All.T print 'input data size...', corr_All.shape ##### Step 2, get concordance value per voxel ######################## W_voxels = [] p_voxels = [] Fdist_voxels = []