def get_PDC(var_coef): #the var_coef has a shape of 16 x 16*lag order ''' Takes in the concatenated 16x80 VAR coefficient array and apply the pdc function from SCoT library with a sampling rate of 128/2hz. @return: 5x16x16 array, where each of the 5 array represents a frequency band, eg: Alpha, Beta, Gamma, Theta, Delta ''' no_of_bands = 5 patient_pdc = [] for x in range(no_of_bands): PDC_output = [None] * 16 for i in range(len(PDC_output)): PDC_output[i] = [None] * 16 patient_pdc.append(PDC_output) c = cn.connectivity(['PDC'], var_coef, nfft=64) patient = c['PDC'] for row in range(16): for column in range(16): bandwidths = [(0, 4), (4, 8), (8, 14), (14, 31), (31, 64)] for b in range( len(bandwidths)): #get the average value of the bandwidths lower_band = bandwidths[b][0] upper_band = bandwidths[b][1] patient_pdc[b][row][column] = average( patient[row][column][lower_band:upper_band]) return patient_pdc #returns 5x16x16
def testFunction(self): # Three sources: a <- b <- c # simply test if connectivity measures are 0 where expected b0 = np.array([[0, 0.9, 0], [0, 0, 0.9], [0, 0, 0]]) identity = np.eye(3) nfft = 5 measures = ['A', 'H', 'COH', 'DTF', 'PDC'] C = connectivity.Connectivity(b=b0, c=identity, nfft=nfft) c = connectivity.connectivity(measures, b=b0, c=identity, nfft=nfft) for m in measures: self.assertTrue(np.all(c[m] == getattr(C, m)()))
def testFunction(self): # Three sources: a <- b <-c # simply test if connectivity measures are 0 where expected to be so b0 = np.array([[0, 0.9, 0], [0, 0, 0.9], [0, 0, 0]]) identity = np.eye(3) nfft = 5 measures = ['A', 'H', 'COH', 'DTF', 'PDC'] C = connectivity.Connectivity(b=b0, c=identity, nfft=nfft) c = connectivity.connectivity(measures, b=b0, c=identity, nfft=nfft) for m in measures: self.assertTrue(np.all(c[m] == getattr(C, m)()))
print('computing GPDC connectivity...') mvar = scot.var.VAR(morder) # result : array, shape (`repeats`, n_channels, n_channels, nfft) surr = scs.surrogate_connectivity(gcmethod, label_ts, mvar, nfft=nfft, n_jobs=njobs, repeats=n_surr) mvar.fit(label_ts) # mvar coefficients (n_channels, n_channels * model_order) # mvar covariance matrix (n_channels, n_channels) # result : array, shape (n_channels, n_channels, `nfft`) cau = connectivity(gcmethod, mvar.coef, mvar.rescov, nfft=nfft) # get the band averaged, thresholded connectivity matrix caus, max_cons, max_surrs = prepare_causality_matrix( cau, surr, freqs, nfft=nfft, sfreq=epochs.info['sfreq'], surr_thresh=surr_thresh) print('Shape of causality matrix: ', caus.shape) # get label names used for plotting labels_fname = get_jumeg_path() + '/examples/label_names.list' yaml_fname = get_jumeg_path() + '/examples/aparc_cortex_based_grouping.yaml'
def causal_analysis(fn_norm, method='GPDC', morder=None, repeats=1000, msave=True, per=99.99, sfreq=678, freqs=[(4, 8), (8, 12), (12, 18), (18, 30), (30, 40)]): ''' Calculate causality matrices of real data and surrogates. And calculate the significant causal matrix for each frequency band. Parameters ---------- fnnorm: string The file name of model order estimation. morder: int The optimized model order. method: string causality measures. repeats: int Shuffling times for surrogates. msave: bool Save the causal matrix of the whole frequency domain or not. per: float or int Percentile of the surrogates. sfreq: float The sampling rate. freqs: list The list of interest frequency bands. ''' import scot.connectivity_statistics as scs from scot.connectivity import connectivity import scot path_list = get_files_from_list(fn_norm) # loop across all filenames for fnnorm in path_list: cau_path = os.path.split(fnnorm)[0] name = os.path.basename(fnnorm) condition = name.split('_')[0] sig_path = cau_path + '/sig_cau_%d/' % morder set_directory(sig_path) fncau = fnnorm[:fnnorm.rfind('.npy')] + ',morder%d,cau.npy' % morder fnsurr = fnnorm[:fnnorm.rfind('.npy')] + ',morder%d,surrcau.npy' % morder X = np.load(fnnorm) X = X.transpose(2, 0, 1) mvar = scot.var.VAR(morder) surr = scs.surrogate_connectivity(method, X, mvar, repeats=repeats) mvar.fit(X) cau = connectivity(method, mvar.coef, mvar.rescov) if msave: np.save(fncau, cau) np.save(fnsurr, surr) nfft = cau.shape[-1] delta_F = sfreq / float(2 * nfft) sig_freqs = [] nfreq = len(freqs) #surr_bands = [] #cau_bands = [] for ifreq in range(nfreq): print 'Frequency index used..', ifreq fmin, fmax = int(freqs[ifreq][0] / delta_F), int(freqs[ifreq][1] / delta_F) con_band = np.mean(cau[:, :, fmin:fmax + 1], axis=-1) np.fill_diagonal(con_band, 0) surr_band = np.mean(surr[:, :, :, fmin:fmax + 1], axis=-1) r, s, _ = surr_band.shape for i in xrange(r): ts = surr_band[i] np.fill_diagonal(ts, 0) #surr_bands.append(surr_band) #cau_bands.append(con_band) con_b = con_band.flatten() con_b = con_b[con_b > 0] surr_b = surr_band.reshape(r, s * s) surr_b = surr_b[surr_b > 0] thr = np.percentile(surr_band, per) print 'max surrogates %.4f' % thr con_band[con_band < thr] = 0 con_band[con_band >= thr] = 1 histout = sig_path + '%s,%d-%d,distribution.png'\ % (condition, freqs[ifreq][0], freqs[ifreq][1]) throut = sig_path + '%s,%d-%d,threshold.png'\ % (condition, freqs[ifreq][0], freqs[ifreq][1]) _plot_hist(con_b, surr_b, histout) # _plot_thr(con_b, thr, surr_band.max(), alpha, throut) _plot_thr(con_b, thr, surr_band.max(), per, throut) # con_band[con_band < z_thre] = 0 # con_band[con_band >= z_thre] = 1 sig_freqs.append(con_band) sig_freqs = np.array(sig_freqs) print 'Saving computed arrays..' np.save(sig_path + '%s_sig_con_band.npy' % condition, sig_freqs) #cau_bands = np.array(cau_bands) #np.save(fncau, cau_bands) #surr_bands = np.array(surr_bands) #np.save(fnsurr, surr_bands) return