def mask_ts_coors(nifti_file, mask, outnii_filename=None, mask_thresh=0,scrub_trs_file=None): """ Calculates correlations between a mask's mean timeseries and all other voxels """ input = nib.load(nifti_file) input_d = input.get_data() if scrub_trs_file: scrub_trs = np.array(core.file_reader(scrub_trs_file)) # array of TRs to exclude keep_trs = np.nonzero(scrub_trs==0)[0] # array of TRs to include input_d = input_d[:, :, :, keep_trs] nonzero_coords = core.get_nonzero_coords(nifti_file,mask_thresh) mask_coords = core.get_nonzero_coords(mask,mask_thresh) mask_array = [input_d[mask_coord[0], mask_coord[1], mask_coord[2], :] for mask_coord in mask_coords] mask_mean_ts = np.mean(mask_array,axis=0) del mask_array ts_array = np.array([input_d[nz_coord[0],nz_coord[1],nz_coord[2], :] for nz_coord in nonzero_coords]) del input_d indiv_corr = [np.corrcoef([mask_mean_ts,ts])[1][0] for ts in ts_array] coord_corrs = zip(nonzero_coords,indiv_corr) xsize, ysize, zsize = input.shape[0:3] mask_corrs_image = np.zeros((xsize, ysize, zsize)) for out_coord, out_corr in coord_corrs: mask_corrs_image[out_coord[0],out_coord[1],out_coord[2]] = out_corr if not outnii_filename: return mask_corrs_image else: outnifti = nib.Nifti1Image(mask_corrs_image, input.get_header().get_best_affine()) outnifti.to_filename(outnii_filename)
def mask_mutualinfo_matrix(nifti_file,masks,outfile,mask_thresh=0,nbins=10): """ Calculates mutual information matrix for a set of mask mean timeseries' """ from pyentropy import DiscreteSystem mutualinfo_mat = np.zeros((len(masks),len(masks))) input = nib.load(nifti_file) input_d = input.get_data() if len(input.shape) > 3: ts_length = input.shape[3] else: ts_length = 1 mean_bin_ts_array = np.zeros((len(masks),ts_length),dtype='int') for count,mask in enumerate(masks): mask_coords = core.get_nonzero_coords(mask,mask_thresh) mask_array = [input_d[mask_coord[0], mask_coord[1], mask_coord[2], :] for mask_coord in mask_coords] mean_ts = np.mean(mask_array,axis=0) l = np.linspace(min(mean_ts),max(mean_ts),nbins) mean_bin_ts_array[count,:] = np.digitize(mean_ts,l)-1 # set range to start at 0 if count > 0: for prev in range(count): sys = DiscreteSystem(mean_bin_ts_array[count],(1,nbins),mean_bin_ts_array[prev],(1,nbins)) sys.calculate_entropies(method='qe',calc=['HX','HXY','HiXY','HshXY']) mutualinfo_mat[count,prev] = sys.I() mutualinfo_mat_sym = core.symmetrize_mat(mutualinfo_mat,'bottom') np.savetxt('%s.txt'%outfile,mutualinfo_mat_sym) return mutualinfo_mat_sym
def vox_ts_corrs(nifti_file,coord=None,covariate_file=None,outnii_filename=False,mask_thresh=0,scrub_trs_file=None): """ Take either: 1) an (x,y,z) coordinate or 2) an external covariate file (column) and calculate the correlation of that coordinate's timeseries with all other timeseries' """ input = nib.load(nifti_file) input_d = input.get_data() if scrub_trs_file: scrub_trs = np.array(core.file_reader(scrub_trs_file)) # array of TRs to exclude keep_trs = np.nonzero(scrub_trs==0)[0] # array of TRs to include input_d = input_d[:, :, :, keep_trs] nonzero_coords = core.get_nonzero_coords(nifti_file,mask_thresh) ts_array = np.array([input_d[nz_coord[0],nz_coord[1],nz_coord[2], :] for nz_coord in nonzero_coords]) if coord: nonzero_index = nonzero_coords.index(coord) seed_ts = ts_array[nonzero_index] else: seed_ts = core.file_reader(covariate_file) seed_ts = [item for sublist in seed_ts for item in sublist] # flatten list indiv_corr = [np.corrcoef([seed_ts,ts])[1][0] for ts in ts_array] coord_corrs = zip(nonzero_coords,indiv_corr) vox_corrs_image = np.zeros((input.shape[0:3])) for out_coord,out_corr in coord_corrs: vox_corrs_image[out_coord[0],out_coord[1],out_coord[2]]=out_corr if not outnii_filename: return vox_corrs_image else: outnifti = nib.Nifti1Image(vox_corrs_image, input.get_header().get_best_affine()) outnifti.to_filename(outnii_filename)
def mask_connectivity_matrix(tracks,header,masks,outfile,nonzero_thresh=0,through=0,tracks_mm=0,length_thresh=0): """ Calculate the (symmetric) connectivity matrix for a set of tracks (from diffusion toolkit .trk file) and a set of masks """ # Leave third argument as 0 to count number of tracks that originate/terminate at # either end of a pair of masks, set through to 1 to count number of tracks that # intersect both of the masks. connect_mat=np.zeros((len(masks),len(masks))) masks_coords_list=[] tracknums=[[] for x in range(len(masks)*len(masks))] for mask in masks: masks_coords_list.append(set(core.get_nonzero_coords(mask,nonzero_thresh))) for tracknum,track in enumerate(tracks): if through == 0: cur_start=[] cur_end=[] track_start_set=set([track[0]]) track_end_set=set([track[-1]]) for count,mask_coords_set in enumerate(masks_coords_list): if track_start_set & mask_coords_set: if length_thresh: track_len = tracklength(np.array(tracks_mm[tracknum])) if track_len > length_thresh: cur_start.append(count) else: cur_start.append(count) elif track_end_set & mask_coords_set: if length_thresh: track_len = tracklength(np.array(tracks_mm[tracknum])) if track_len > length_thresh: cur_end.append(count) else: cur_end.append(count) for x in cur_start: for y in cur_end: # allow for fiber to start/end in multiple (overlapping) masks connect_mat[x,y] += 1 tracknums[(x*len(masks))+y].append(tracknum) elif through == 1: cur=[] track_set=set(track) for count,mask_coords_set in enumerate(masks_coords_list): if track_set & mask_coords_set: if length_thresh: track_len = tracklength(np.array(tracks_mm[tracknum])) if track_len > length_thresh: cur.append(count) else: cur.append(count) for x,y in list(core.combinations(cur,2)): connect_mat[x,y] += 1 tracknums[(x*len(masks))+y].append(tracknum) connect_mat_sym = core.symmetrize_mat_sum(connect_mat) tracknums_sym = core.symmetrize_tracknum_list(tracknums) np.savetxt('%s_connectmat.txt'%outfile,connect_mat_sym) return connect_mat_sym,tracknums_sym
def mask_ts_coors(nifti_file, mask, outnii_filename=None, mask_thresh=0, scrub_trs_file=None): """ Calculates correlations between a mask's mean timeseries and all other voxels """ input = nib.load(nifti_file) input_d = input.get_data() if scrub_trs_file: scrub_trs = np.array( core.file_reader(scrub_trs_file)) # array of TRs to exclude keep_trs = np.nonzero(scrub_trs == 0)[0] # array of TRs to include input_d = input_d[:, :, :, keep_trs] nonzero_coords = core.get_nonzero_coords(nifti_file, mask_thresh) mask_coords = core.get_nonzero_coords(mask, mask_thresh) mask_array = [ input_d[mask_coord[0], mask_coord[1], mask_coord[2], :] for mask_coord in mask_coords ] mask_mean_ts = np.mean(mask_array, axis=0) del mask_array ts_array = np.array([ input_d[nz_coord[0], nz_coord[1], nz_coord[2], :] for nz_coord in nonzero_coords ]) del input_d indiv_corr = [np.corrcoef([mask_mean_ts, ts])[1][0] for ts in ts_array] coord_corrs = zip(nonzero_coords, indiv_corr) xsize, ysize, zsize = input.shape[0:3] mask_corrs_image = np.zeros((xsize, ysize, zsize)) for out_coord, out_corr in coord_corrs: mask_corrs_image[out_coord[0], out_coord[1], out_coord[2]] = out_corr if not outnii_filename: return mask_corrs_image else: outnifti = nib.Nifti1Image(mask_corrs_image, input.get_header().get_best_affine()) outnifti.to_filename(outnii_filename)
def vox_ts_corrs(nifti_file, coord=None, covariate_file=None, outnii_filename=False, mask_thresh=0, scrub_trs_file=None): """ Take either: 1) an (x,y,z) coordinate or 2) an external covariate file (column) and calculate the correlation of that coordinate's timeseries with all other timeseries' """ input = nib.load(nifti_file) input_d = input.get_data() if scrub_trs_file: scrub_trs = np.array( core.file_reader(scrub_trs_file)) # array of TRs to exclude keep_trs = np.nonzero(scrub_trs == 0)[0] # array of TRs to include input_d = input_d[:, :, :, keep_trs] nonzero_coords = core.get_nonzero_coords(nifti_file, mask_thresh) ts_array = np.array([ input_d[nz_coord[0], nz_coord[1], nz_coord[2], :] for nz_coord in nonzero_coords ]) if coord: nonzero_index = nonzero_coords.index(coord) seed_ts = ts_array[nonzero_index] else: seed_ts = core.file_reader(covariate_file) seed_ts = [item for sublist in seed_ts for item in sublist] # flatten list indiv_corr = [np.corrcoef([seed_ts, ts])[1][0] for ts in ts_array] coord_corrs = zip(nonzero_coords, indiv_corr) vox_corrs_image = np.zeros((input.shape[0:3])) for out_coord, out_corr in coord_corrs: vox_corrs_image[out_coord[0], out_coord[1], out_coord[2]] = out_corr if not outnii_filename: return vox_corrs_image else: outnifti = nib.Nifti1Image(vox_corrs_image, input.get_header().get_best_affine()) outnifti.to_filename(outnii_filename)
def mask_mutualinfo_matrix(nifti_file, masks, outfile, mask_thresh=0, nbins=10): """ Calculates mutual information matrix for a set of mask mean timeseries' """ from pyentropy import DiscreteSystem mutualinfo_mat = np.zeros((len(masks), len(masks))) input = nib.load(nifti_file) input_d = input.get_data() if len(input.shape) > 3: ts_length = input.shape[3] else: ts_length = 1 mean_bin_ts_array = np.zeros((len(masks), ts_length), dtype='int') for count, mask in enumerate(masks): mask_coords = core.get_nonzero_coords(mask, mask_thresh) mask_array = [ input_d[mask_coord[0], mask_coord[1], mask_coord[2], :] for mask_coord in mask_coords ] mean_ts = np.mean(mask_array, axis=0) l = np.linspace(min(mean_ts), max(mean_ts), nbins) mean_bin_ts_array[count, :] = np.digitize( mean_ts, l) - 1 # set range to start at 0 if count > 0: for prev in range(count): sys = DiscreteSystem(mean_bin_ts_array[count], (1, nbins), mean_bin_ts_array[prev], (1, nbins)) sys.calculate_entropies(method='qe', calc=['HX', 'HXY', 'HiXY', 'HshXY']) mutualinfo_mat[count, prev] = sys.I() mutualinfo_mat_sym = core.symmetrize_mat(mutualinfo_mat, 'bottom') np.savetxt('%s.txt' % outfile, mutualinfo_mat_sym) return mutualinfo_mat_sym
def mask_funcconnec_matrix(nifti_file,masks_files,outfile=None,masks_threshes = [], multi_labels=[],partial=False,cov=False,zero_diag=True, scrub_trs_file=None,pca=False,ts_outfile=None): """ Calculates correlation/covariance matrix for a set of mask mean timeseries' masks_files: list of mask filenames with full path, can either be one mask per file (in which case multi_labels should be []) or one file with multiple numerical labels (multi_labels = [num1, num2, ...]) masks_threshes: list of numerical values to use as lower threshold for separate mask files output options: 1) correlation matrix 2) partial correlation matrix 3) covariance matrix """ if multi_labels: masks_coords = core.get_mask_labels(masks_files[0], labels=multi_labels) else: if masks_threshes: masks_coords = [] for count, mask in enumerate(masks_files): masks_coords.append(core.get_nonzero_coords(mask, masks_threshes(count))) else: masks_coords = [core.get_nonzero_coords(mask) for mask in masks_files] connect_mat = np.zeros((len(masks_coords), len(masks_coords))) input = nib.load(nifti_file) input_d = input.get_data() if scrub_trs_file: scrub_trs = np.array(core.file_reader(scrub_trs_file)) # array of TRs to exclude keep_trs = np.nonzero(scrub_trs==0)[0] # array of TRs to include if len(input.shape) > 3: if scrub_trs_file: ts_length = len(keep_trs) else: ts_length = input.shape[3] else: ts_length = 1 masks_mean_ts_array = np.zeros((len(masks_coords), ts_length)) for count, mask_coords in enumerate(masks_coords): if scrub_trs_file: mask_array = [input_d[mask_coord[0], mask_coord[1], mask_coord[2], keep_trs] for mask_coord in mask_coords] else: mask_array = [input_d[mask_coord[0], mask_coord[1], mask_coord[2], :] for mask_coord in mask_coords] if pca: [coeff,score,latent] = princomp(np.matrix(mask_array)) masks_mean_ts_array[count, :] = score[0,:] else: masks_mean_ts_array[count, :] = np.mean(mask_array, axis=0) if partial: mat = core.partialcorr_matrix(masks_mean_ts_array) elif cov: mat = np.cov(masks_mean_ts_array) else: mat = np.corrcoef(masks_mean_ts_array) if zero_diag: mat = mat * abs(1-np.eye(mat.shape[0])) # zero matrix diagonal if outfile: np.savetxt('%s.txt'%outfile, mat) if ts_outfile: np.savetxt('%s.txt'%ts_outfile, masks_mean_ts_array) return mat, masks_mean_ts_array
def mask_tracks(tracks,header,masks,nonzero_thresh=0,through=1,write_nii=0,outprefix="mask",tracks_mm=0,length_thresh=0): """ Creates density files for all tracks passing through a set of masks """ # Each volume in vox_tracks_img is the density volume for a single mask # Leave 'through' argument as 0 to count number of tracks that originate/terminate # within a mask, set through to 1 to count number of tracks that intersect a mask xdim,ydim,zdim=header["dims"] mm_dims=np.array(header["vox_size"])*np.array(header["dims"]) masks_coords_list=[] if write_nii == 1: vox_tracks_img=np.zeros((xdim,ydim,zdim,len(masks))) tracknums=[[] for x in range(len(masks))] for mask in masks: masks_coords_list.append(set(core.get_nonzero_coords(mask,nonzero_thresh))) for tracknum,track in enumerate(tracks): if through == 0: track_start_set=set([track[0]]) track_end_set=set([track[-1]]) for count,mask_coords_set in enumerate(masks_coords_list): if track_start_set & mask_coords_set or track_end_set & mask_coords_set: if length_thresh: track_len = tracklength(np.array(tracks_mm[tracknum])) if track_len > length_thresh: tracknums[count].append(tracknum) if write_nii==1: for x,y,z in track: if all(np.array([x,y,z])<mm_dims): vox_tracks_img[x,y,z,count] += 1 else: tracknums[count].append(tracknum) if write_nii==1: for x,y,z in track: if all(np.array([x,y,z])<mm_dims): vox_tracks_img[x,y,z,count] += 1 elif through == 1: track_set=set(track) # track_set = track for count,mask_coords_set in enumerate(masks_coords_list): if track_set & mask_coords_set: if length_thresh: track_len = tracklength(np.array(tracks_mm[tracknum])) if track_len > length_thresh: tracknums[count].append(tracknum) if write_nii==1: for x,y,z in track: if all(np.array([x,y,z])<mm_dims): vox_tracks_img[x,y,z,count] += 1 else: tracknums[count].append(tracknum) if write_nii==1: for x,y,z in track: if all(np.array([x,y,z])<mm_dims): vox_tracks_img[x,y,z,count] += 1 mask_density = [len(hits) for hits in tracknums] if write_nii == 0: np.savetxt('%s_density.txt'%outprefix,mask_density) return tracknums else: outnifti = nib.Nifti1Image(vox_tracks_image, input.get_header().get_best_affine()) outnifti.to_filename(outnii_filename) return tracknums
def mask_connectivity_matrix_dsi(tracks, masks, outfile, nonzero_thresh=0, through=0, tracks_mm=0, length_thresh=0, header=None): """Calculate the (symmetric) connectivity matrix for a set of tracks from a DSI studio .txt file and a set of masks""" # Leave third argument as 0 to count number of tracks that originate/terminate at # either end of a pair of masks, set through to 1 to count number of tracks that # intersect both of the masks. connect_mat = np.zeros((len(masks), len(masks))) masks_coords_list = [] tracknums = [[] for x in range(len(masks) * len(masks))] for mask in masks: masks_coords_list.append( set(core.get_nonzero_coords(mask, nonzero_thresh))) for tracknum, track in enumerate(tracks): if through == 0: cur_start = [] cur_end = [] track_start_set = set([track[0]]) track_end_set = set([track[-1]]) for count, mask_coords_set in enumerate(masks_coords_list): if track_start_set & mask_coords_set: if length_thresh: track_len = tracklength(np.array(tracks_mm[tracknum])) if track_len > length_thresh: cur_start.append(count) else: cur_start.append(count) elif track_end_set & mask_coords_set: if length_thresh: track_len = tracklength(np.array(tracks_mm[tracknum])) if track_len > length_thresh: cur_end.append(count) else: cur_end.append(count) for x in cur_start: for y in cur_end: # allow for fiber to start/end in multiple (overlapping) masks connect_mat[x, y] += 1 tracknums[(x * len(masks)) + y].append(tracknum) elif through == 1: cur = [] track_set = set(track) for count, mask_coords_set in enumerate(masks_coords_list): if track_set & mask_coords_set: if length_thresh: track_len = tracklength(np.array(tracks_mm[tracknum])) if track_len > length_thresh: cur.append(count) else: cur.append(count) for x, y in list(core.combinations(cur, 2)): connect_mat[x, y] += 1 tracknums[(x * len(masks)) + y].append(tracknum) connect_mat_sym = core.symmetrize_mat_sum(connect_mat) tracknums_sym = core.symmetrize_tracknum_list(tracknums) np.savetxt('%s_connectmat.txt' % outfile, connect_mat_sym) return connect_mat_sym, tracknums_sym
def mask_connectivity_matrix(tracks, header, masks, outfile, nonzero_thresh=0, through=0, tracks_mm=0, length_thresh=0, mask_matrix_file=None, write_tracks=False, write_tracks_filename=None, track_file=None): """ Calculate the (symmetric) connectivity matrix for a set of tracks (from diffusion toolkit .trk file) and a set of masks """ # Leave third argument as 0 to count number of tracks that originate/terminate at # either end of a pair of masks, set through to 1 to count number of tracks that # intersect both of the masks. connect_mat = np.zeros((len(masks), len(masks))) masks_coords_list = [] tracknums = [[] for x in range(len(masks) * len(masks))] for mask in masks: masks_coords_list.append( set(core.get_nonzero_coords(mask, nonzero_thresh))) if mask_matrix_file: mask_matrix = core.file_reader(mask_matrix_file) mask_matrix_array = np.array(mask_matrix) for tracknum, track in enumerate(tracks): if through == 0: cur_start = [] cur_end = [] track_start_set = set([track[0]]) track_end_set = set([track[-1]]) for count, mask_coords_set in enumerate(masks_coords_list): if track_start_set & mask_coords_set: if length_thresh: track_len = tracklength(np.array(tracks_mm[tracknum])) if track_len > length_thresh: cur_start.append(count) else: cur_start.append(count) elif track_end_set & mask_coords_set: if length_thresh: track_len = tracklength(np.array(tracks_mm[tracknum])) if track_len > length_thresh: cur_end.append(count) else: cur_end.append(count) for x in cur_start: for y in cur_end: # allow for fiber to start/end in multiple (overlapping) masks if mask_matrix_file: if mask_matrix_array[x, y]: connect_mat[x, y] += 1 tracknums[(x * len(masks)) + y].append(tracknum) else: connect_mat[x, y] += 1 tracknums[(x * len(masks)) + y].append(tracknum) elif through == 1: cur = [] track_set = set(track) for count, mask_coords_set in enumerate(masks_coords_list): if track_set & mask_coords_set: if length_thresh: track_len = tracklength(np.array(tracks_mm[tracknum])) if track_len > length_thresh: cur.append(count) else: cur.append(count) for x, y in list(core.combinations(cur, 2)): if mask_matrix_file: if mask_matrix_array[x, y]: connect_mat[x, y] += 1 tracknums[(x * len(masks)) + y].append(tracknum) else: connect_mat[x, y] += 1 tracknums[(x * len(masks)) + y].append(tracknum) connect_mat_sym = core.symmetrize_mat_sum(connect_mat) tracknums_sym = core.symmetrize_tracknum_list(tracknums) np.savetxt('%s_connectmat.txt' % outfile, connect_mat_sym) if write_tracks: tracknum_list = list( set([item for sublist in tracknums for item in sublist])) tracknum_list_ordered = sorted(tracknum_list) track_list = [tracks_mm[n] for n in tracknum_list_ordered] make_floats(track_list, write_tracks_filename, track_file) return connect_mat_sym, tracknums_sym
def mask_tracks(tracks, header, masks, nonzero_thresh=0, through=1, write_nii=0, outprefix="mask", tracks_mm=0, length_thresh=0): """ Creates density files for all tracks passing through a set of masks """ # Each volume in vox_tracks_img is the density volume for a single mask # Leave 'through' argument as 0 to count number of tracks that originate/terminate # within a mask, set through to 1 to count number of tracks that intersect a mask xdim, ydim, zdim = header["dims"] mm_dims = np.array(header["vox_size"]) * np.array(header["dims"]) masks_coords_list = [] if write_nii == 1: vox_tracks_img = np.zeros((xdim, ydim, zdim, len(masks))) tracknums = [[] for x in range(len(masks))] for mask in masks: masks_coords_list.append( set(core.get_nonzero_coords(mask, nonzero_thresh))) for tracknum, track in enumerate(tracks): if through == 0: track_start_set = set([track[0]]) track_end_set = set([track[-1]]) for count, mask_coords_set in enumerate(masks_coords_list): if track_start_set & mask_coords_set or track_end_set & mask_coords_set: if length_thresh: track_len = tracklength(np.array(tracks_mm[tracknum])) if track_len > length_thresh: tracknums[count].append(tracknum) if write_nii == 1: for x, y, z in track: if all(np.array([x, y, z]) < mm_dims): vox_tracks_img[x, y, z, count] += 1 else: tracknums[count].append(tracknum) if write_nii == 1: for x, y, z in track: if all(np.array([x, y, z]) < mm_dims): vox_tracks_img[x, y, z, count] += 1 elif through == 1: track_set = set(track) # track_set = track for count, mask_coords_set in enumerate(masks_coords_list): if track_set & mask_coords_set: if length_thresh: track_len = tracklength(np.array(tracks_mm[tracknum])) if track_len > length_thresh: tracknums[count].append(tracknum) if write_nii == 1: for x, y, z in track: if all(np.array([x, y, z]) < mm_dims): vox_tracks_img[x, y, z, count] += 1 else: tracknums[count].append(tracknum) if write_nii == 1: for x, y, z in track: if all(np.array([x, y, z]) < mm_dims): vox_tracks_img[x, y, z, count] += 1 mask_density = [len(hits) for hits in tracknums] if write_nii == 0: np.savetxt('%s_density.txt' % outprefix, mask_density) return tracknums else: outnifti = nib.Nifti1Image(vox_tracks_img, np.eye(4)) outnifti.to_filename('%s_density.nii' % outprefix) return tracknums
def mask_funcconnec_matrix_sliding(nifti_file, masks_files, outfile=None, masks_threshes=[], multi_labels=[], zero_diag=True, ts_outfile=None, covariate_ts_file=None, window_length=30): """ Calculates correlation matrix for a set of mask mean timeseries' masks_files: list of mask filenames with full path, can either be one mask per file (in which case multi_labels should be []) or one file with multiple numerical labels (multi_labels = [num1, num2, ...]) masks_threshes: list of numerical values to use as lower threshold for separate mask files covariate_ts_file: text file with timeseries for nuisance covariates to partial out window_length: the number of volumes to include in a sliding window correlation output options: 1) correlation matrix """ if multi_labels: masks_coords = core.get_mask_labels(masks_files[0], labels=multi_labels) else: if masks_threshes: masks_coords = [] for count, mask in enumerate(masks_files): masks_coords.append( core.get_nonzero_coords(mask, masks_threshes(count))) else: masks_coords = [ core.get_nonzero_coords(mask) for mask in masks_files ] n_regions = len(masks_coords) input = nib.load(nifti_file) input_d = input.get_data() if len(input.shape) > 3: ts_length = input.shape[3] else: ts_length = 1 masks_mean_ts_array = np.zeros((len(masks_coords), ts_length)) for count, mask_coords in enumerate(masks_coords): mask_array = [ input_d[mask_coord[0], mask_coord[1], mask_coord[2], :] for mask_coord in mask_coords ] masks_mean_ts_array[count, :] = np.mean(mask_array, axis=0) if covariate_ts_file: nuis_reg = np.array(core.file_reader(covariate_ts_file)) masks_mean_ts_array_resid = np.zeros((n_regions, ts_length)) for i in range(n_regions): ts1 = np.atleast_2d(masks_mean_ts_array[i, :]) reg = np.linalg.lstsq(nuis_reg, ts1.T) beta = reg[0] ts1_resid = np.squeeze(ts1.T - nuis_reg.dot(beta)) masks_mean_ts_array_resid[i, :] = ts1_resid n_windows = len(range(ts_length - window_length)) mats = np.zeros((n_regions, n_regions, n_windows)) for k in range(ts_length - window_length): mat = np.zeros((n_regions, n_regions)) ts_start = k ts_stop = k + window_length if covariate_ts_file: mat = np.corrcoef(masks_mean_ts_array_resid[:, ts_start:ts_stop]) else: mat = np.corrcoef(masks_mean_ts_array[:, ts_start:ts_stop]) if zero_diag: mat = mat * abs(1 - np.eye(mat.shape[0])) # zero matrix diagonal mats[:, :, k] = mat if outfile: mats_2d = np.reshape(mats, [n_regions, n_regions * n_windows], 'F').T # stack matrices vertically np.savetxt('%s.txt' % outfile, mats_2d) if ts_outfile: np.savetxt('%s.txt' % ts_outfile, masks_mean_ts_array) return mats, masks_mean_ts_array
def mask_funcconnec_matrix(nifti_file, masks_files, outfile=None, masks_threshes=[], multi_labels=[], partial=False, cov=False, zero_diag=True, scrub_trs_file=None, pca=False, ts_outfile=None, covariate_ts_file=None): """ Calculates correlation/covariance matrix for a set of mask mean timeseries' masks_files: list of mask filenames with full path, can either be one mask per file (in which case multi_labels should be []) or one file with multiple numerical labels (multi_labels = [num1, num2, ...]) masks_threshes: list of numerical values to use as lower threshold for separate mask files covariate_ts_file: text file with timeseries for nuisance covariates to partial out output options: 1) correlation matrix 2) partial correlation matrix 3) covariance matrix """ if multi_labels: masks_coords = core.get_mask_labels(masks_files[0], labels=multi_labels) else: if masks_threshes: masks_coords = [] for count, mask in enumerate(masks_files): masks_coords.append( core.get_nonzero_coords(mask, masks_threshes(count))) else: masks_coords = [ core.get_nonzero_coords(mask) for mask in masks_files ] n_regions = len(masks_coords) connect_mat = np.zeros((len(masks_coords), len(masks_coords))) input = nib.load(nifti_file) input_d = input.get_data() if scrub_trs_file: scrub_trs = np.array( core.file_reader(scrub_trs_file)) # array of TRs to exclude keep_trs = np.nonzero(scrub_trs == 0)[0] # array of TRs to include if len(input.shape) > 3: if scrub_trs_file: ts_length = len(keep_trs) else: ts_length = input.shape[3] else: ts_length = 1 masks_mean_ts_array = np.zeros((len(masks_coords), ts_length)) for count, mask_coords in enumerate(masks_coords): if scrub_trs_file: mask_array = [ input_d[mask_coord[0], mask_coord[1], mask_coord[2], keep_trs] for mask_coord in mask_coords ] else: mask_array = [ input_d[mask_coord[0], mask_coord[1], mask_coord[2], :] for mask_coord in mask_coords ] if pca: [coeff, score, latent] = princomp(np.matrix(mask_array)) masks_mean_ts_array[count, :] = score[0, :] else: masks_mean_ts_array[count, :] = np.mean(mask_array, axis=0) if partial: mat = core.partialcorr_matrix(masks_mean_ts_array) elif cov: mat = np.cov(masks_mean_ts_array) elif covariate_ts_file: nuis_reg = np.array(core.file_reader(covariate_ts_file)) mat = np.zeros((n_regions, n_regions)) for i in range(n_regions): for j in range(i + 1, n_regions): n1n2 = np.hstack((np.atleast_2d(masks_mean_ts_array[i, :]).T, np.atleast_2d(masks_mean_ts_array[j, :]).T)) X = np.vstack((n1n2.T, nuis_reg.T)) try: pc_mat = core.partialcorr_matrix(X) mat[i, j] = pc_mat[0, 1] except: mat[i, j] = 0 print('Mask %d is empty, correlation will be stored as 0' % (j)) mat = mat + mat.T else: mat = np.corrcoef(masks_mean_ts_array) if zero_diag: mat = mat * abs(1 - np.eye(mat.shape[0])) # zero matrix diagonal if outfile: np.savetxt('%s.txt' % outfile, mat) if ts_outfile: np.savetxt('%s.txt' % ts_outfile, masks_mean_ts_array) return mat, masks_mean_ts_array