def dailyfc_visual(files): for onefile in files: lfpdata, chnAreas, fs = lfp_extract([onefile]) if lfpdata.shape[2] < 80: continue print(onefile) ciCOHs = calc_ciCOHs_rest(lfpdata) # permutation test: use the lfp data whose ciCOHs are the largest to get distribution [i, j] = np.unravel_index(np.argmax(ciCOHs), shape=ciCOHs.shape) lfp1, lfp2 = lfpdata[i, :, :], lfpdata[j, :, :] _, mu, std = pval_permciCOH_rest(lfp1, lfp2, ciCOHs[i, j], shuffleN=1000) pvals = norm.sf(abs(ciCOHs), loc=mu, scale=std) * 2 # multiple comparison correction, get weights reject, pval_corr = fdr_correction(pvals, alpha=0.05, method='indep') [rows, cols] = np.where(reject == True) weight = np.zeros(ciCOHs.shape) if len(rows) > 0: weight[rows, cols] = ciCOHs[rows, cols] # visual and save filename = os.path.basename(onefile) datestr = re.search('[0-9]{8}', filename).group() cond = re.search('_[a-z]*_[0-9]{8}', filename).group()[1:-9] freqstr = 'freq' + re.search('_filtered[0-9]*_[0-9]*', filename).group()[len('_filtered'):] save_prefix = 'all' saveFCGraph = os.path.join( savefolder, freqstr + '_' + cond + '_' + save_prefix + '_' + datestr + '.png') texts = dict() texts[cond + ',' + datestr] = [-80, 50, 15] weight_visual_save(weight, chnInf=assign_coord2chnArea( area_coord_file=area_coord_file, chnAreas=chnAreas), savefile=saveFCGraph, texts=texts, threds_edge=None) del texts, datestr, cond, weight
def subArea_dailyfc_visual(files): for onefile in files: lfpdata, chnAreas, fs = lfp_extract([onefile]) if lfpdata.shape[2] < 80: continue print(onefile) ciCOHs = calc_ciCOHs_rest(lfpdata) # permutation test: use the lfp data whose ciCOHs are the largest to get distribution [i, j] = np.unravel_index(np.argmax(ciCOHs), shape = ciCOHs.shape) lfp1, lfp2 = lfpdata[i, :, :], lfpdata[j, :, :] _, mu, std = pval_permciCOH_rest(lfp1, lfp2, ciCOHs[i, j], shuffleN = 1000) cond = re.search('_[a-z]*_[0-9]{8}', files[0]).group()[1:-9] datestr = re.search('[0-9]{8}', os.path.basename(onefile)).group() ### left thalamus and SMA/M1 ### save_prefix = 'leftThaCor_' areas_used = ['lVA', 'lVLo/VPLo', 'lSMA', 'rSMA','M1'] # subareas selection ciCOH_new, chnAreas_new = ciCOH_select(ciCOHs, chnAreas, areas_used) # multiple comparison correction, get weight matrix pvals = norm.sf(abs(ciCOH_new), loc = mu, scale = std) * 2 reject, pval_corr = fdr_correction(pvals, alpha = 0.05, method='indep') [rows, cols]= np.where(reject == True) weight = np.zeros(ciCOH_new.shape) if len(rows) > 0: weight[rows, cols] = ciCOH_new[rows, cols] # visual and save saveFCGraph = os.path.join(savefolder, cond + '_' + save_prefix + '_' + datestr + '.png') texts = dict() texts[datestr] = [80, 50, 15] weight_visual_save(weight, chnInf = assign_coord2chnArea(area_coord_file, chnAreas_new), savefile = saveFCGraph, texts = None, threds_edge = None) del ciCOH_new, chnAreas_new, save_prefix, areas_used del saveFCGraph, weight
def imcohs_daily_calc(onefile): lfpdata, chnAreas, fs = lfp_extract([onefile]) if lfpdata.shape[2] < 30: return None [imcohs, _, _, _, _] = spectral_connectivity(data=np.transpose(lfpdata, axes=(2, 0, 1)), method='imcoh', sfreq=fs, fmin=26, fmax=28, faverage=True) imcohs = np.squeeze(imcohs) imcohs = imcohs + np.transpose(imcohs, axes=(1, 0)) # permutation test: use the lfp data whose ciCOHs are the largest to get distribution [i, j] = np.unravel_index(np.argmax(imcohs), shape=imcohs.shape) lfp1, lfp2 = lfpdata[i, :, :], lfpdata[j, :, :] mu, std = pval_perm_imcohMNE_rest(lfp1, lfp2, fs=fs, fmin=freqs[0], fmax=freqs[1], shuffleN=300) pvals = norm.sf(abs(imcohs), loc=mu, scale=std) * 2 del lfp1, lfp2 fc = dict() fc['imcohs'] = imcohs fc['pvals'] = pvals fc['chnAreas'] = chnAreas # save filename = os.path.basename(onefile) datestr = re.search('[0-9]{8}', filename).group() cond = re.search('_[a-z]*_[0-9]{8}', filename).group()[1:-9] freqstr = 'freq' + str(freqs[0]) + '_' + str(freqs[1]) fcfile_pickle = os.path.join( savefolder, freqstr + '_' + cond + '_' + datestr + '.pickle') with open(fcfile_pickle, 'wb') as f: pickle.dump(fc, f) return fcfile_pickle
def segfc_visual(onefile): # lfpdata: nchns * ntemp * nsegs lfpdata, chnAreas, fs = lfp_extract([onefile]) nchns, _, nsegs = lfpdata.shape seg_ciCOHs = np.zeros(shape=(nchns, nchns, nsegs)) for segi in range(nsegs): seg_ciCOHs[:, :, segi] = calc_ciCOHs_rest( np.expand_dims(lfpdata[:, :, segi], axis=2)) # permutation test: use the lfp data whose ciCOHs are the largest to get distribution [i, j] = np.unravel_index(np.argmax(ciCOHs), shape=ciCOHs.shape) lfp1, lfp2 = lfpdata[i, :, :], lfpdata[j, :, :] _, mu, std = pval_permciCOH_rest(lfp1, lfp2, ciCOHs[i, j], shuffleN=1000) pvals = norm.sf(abs(ciCOHs), loc=mu, scale=std) * 2 # multiple comparison correction, get weights reject, pval_corr = fdr_correction(pvals, alpha=0.05, method='indep') [rows, cols] = np.where(reject == True) weight = np.zeros(ciCOHs.shape) if len(rows) > 0: weight[rows, cols] = ciCOHs[rows, cols] # visual and save filename = os.path.basename(onefile) datestr = re.search('[0-9]{8}', filename).group() cond = re.search('_[a-z]*_[0-9]{8}', filename).group()[1:-9] save_prefix = 'all' saveFCGraph = os.path.join( savefolder, cond + '_' + save_prefix + '_' + datestr + '.png') weight_visual_save(weight, chnInf=assign_coord2chnArea( area_coord_file=area_coord_file, chnAreas=chnAreas), savefile=saveFCGraph, texts=None, threds_edge=None)