def test_electrodes_selection(self): dd = cp.Data(np.random.randn(5, 100, 4), fs=10) with self.assertRaises(ValueError): dd.select_channels([0, 2, 5]) dd.select_channels([0, 1, 3]) self.assertEquals(dd.channelsnr, 3) self.assertEquals(dd.data.shape[0], 3)
def significance(self, max_order, method="DTF", order=None, signf_threshold=0.05, Nrep=200, alpha=0.05): """Compute and plot the binary matrix having as positive elements the related p-values of the significance matrix less than threshold. Args: ----------- method : string "PDC" or "EDF". Estimation method for connectivity. order : int Value of order of autoregressive multivariate model. max_order : int Max order computable by akaike algorithm. signf_threshold : float Threshold for p-values of the significance matrix. Nrep : int Number of resamples for the computation of the significance matrix. alpha : float Type 1 error rate for the significance level. """ if not order: best, crit = cp.Mvar.order_akaike(self.values, max_order) plt.plot(1 + np.arange(len(crit)), crit, marker='o', linestyle='dashed', markersize=8, markerfacecolor='yellow') plt.grid() plt.show() p = best else: p = order print('Best model order p: {}'.format(p)) data = cp.Data(self.values, chan_names=self.channels) data.fit_mvar(p, 'yw') if method == 'DTF': matrix_values = data.conn('dtf') else: matrix_values = data.conn('pdc') significance_matrix = data.significance(Nrep=Nrep, alpha=alpha, verbose=False) significance_matrix[significance_matrix < 0.05] = 1 significance_matrix[significance_matrix != 1] = 0 self.significance_matrix = significance_matrix plt.imshow(significance_matrix, cmap='Greys', interpolation='nearest') plt.show()
def fit_model(data, fs, resolution, method, freq=None, boot=False): ''' Fit an MVAR model, and compute connecitvity estimation via PDC or DTF. ''' if boot: data = connectivipy.Data(data=data, fs=160, chan_names=get_labels_nodes(19)) data.fit_mvar(method="yw") data.conn("pdc") res = data.significance(Nrep=200, alpha=0.05, verbose=False) np.fill_diagonal(res, 0) return res model = connectivipy.Mvar().fit(data, method="yw") if method == "dtf": if freq: res = connectivipy.conn.dtf_fun(model[0], model[1], fs=fs, resolution=resolution)[freq, :, :] np.fill_diagonal(res, 0) return res else: return connectivipy.conn.dtf_fun(model[0], model[1], fs=fs, resolution=resolution) elif method == "pdc": if freq: res = connectivipy.conn.pdc_fun(model[0], model[1], fs=fs, resolution=resolution)[freq, :, :] np.fill_diagonal(res, 0) return res else: return connectivipy.conn.pdc_fun(model[0], model[1], fs=fs, resolution=resolution) else: return "Wrong method. Use \"pdc\" or \"dtf\""
def s(self): table = pd.read_csv( self.txt_name, delimiter='\s+', ) labels = list(table.label) for i in range(len(labels)): # clean labels name labels[i] = labels[i].replace('..', '') labels[i] = labels[i].replace('.', '') sub_data, idx_19 = self.sub_data() labels_19 = [labels[i] for i in idx_19] data = cp.Data(sub_data, fs=self.fs, chan_names=labels) data.fit_mvar() pdc_values = data.conn('pdc') s_matrix = data.significance(Nrep=200, alpha=0.05) return s_matrix
def significance(self,signf_threshold=None,channels=[],order=None,Nrep=200,alpha=0.05,visual=False,path=None,freq=10,name='significance'): """Computes the significance (p-value) of the assoiated nodes connections using resampling method for the graph created Inputs: - signf_threshold: limits of significance under which we want to exclude specific nodes - channels: list of the channels we want to extract for the rest of the procedure, if empty we use them all. - order: order of the MVAR model - Nrep: number od resamples - number of repetitions for the resampling algorithm - alpha: (default 0.05) - type I error rate (significance level) - visual: if TRUE plots the new network - path: path of .edf file we want to import the data if visual TRUE - freq: sample frequency if visual TRUE - name: name of the output plot of the network if visual TRUE """ df = self.df if channels: df = df[channels] self.values = df.T.values self.channels = df.columns.values self.num_of_channels,self.num_of_samples = self.values.shape data = cp.Data(self.values,chan_names=self.channels) if order: self.p = order data.fit_mvar(self.p,self.conn_algorithm) if self.method == 'DTF': matrix_values = data.conn('dtf') else: matrix_values = data.conn('pdc') self.significance_matrix = data.significance(Nrep=Nrep, alpha=alpha,verbose=False) if signf_threshold: elim_indices = np.argwhere(self.significance_matrix>signf_threshold) try: self.import_data(path,channels = df.columns.values) self.connectivity(freq=freq,significance=elim_indices,order=self.p,threshold=round(self.density,1)) if visual: #self.import_data(path,channels = df.columns.values) #self.connectivity(freq=freq,significance=elim_indices,order=self.p,threshold=round(self.density,1)) self.show_graph(name) except: print("Oops! That was not a valid significance threshold number. Try again...")
def compute_connectivity(self, freq, method="PDC", order=None, max_order=10, plot=False, resolution=100, threshold=None): """Compute the connectivity matrix and the binary matrix of the EEG data, using PDC or DTF method for the estimation. Args: ----------- freq : int Frequency value for the connectivity matrix. method : string "PDC" or "EDF". Estimation method for connectivity. order : int Value of order of autoregressive multivariate model. max_order : int Max order computable by akaike algorithm. plot : boolean Whether to plot the akaike algo results or not. resolution : int Number of spectrum datapoints. threshold : float Density threshold for the computation of the connectivity matrix. Between 0 and 1. """ if not order: best, crit = cp.Mvar.order_akaike(self.values, max_order) p = best if plot: plt.plot(1 + np.arange(len(crit)), crit, marker='o', linestyle='dashed', markersize=8, markerfacecolor='yellow') plt.grid() plt.show() print('Best model order p: {}'.format(best)) else: p = order data = cp.Data(self.values, chan_names=self.channels) data.fit_mvar(p, "yw") # multivariate model coefficient (see slides) ar, vr = data.mvar_coefficients if method == 'DTF': Adj = cp.conn.dtf_fun(ar, vr, fs=self.sample_freq, resolution=100)[freq, :, :] else: Adj = cp.conn.pdc_fun(ar, vr, fs=self.sample_freq, resolution=100)[freq, :, :] np.fill_diagonal(Adj, 0) # create Graph from Adj matrix G = nx.from_numpy_matrix(np.array(Adj), create_using=nx.DiGraph) A = nx.adjacency_matrix(G) A = A.toarray() # set values of diagonal zero to avoid self-loops np.fill_diagonal(A, 0) # reduce Graph density while (nx.density(G) > threshold): # find min values different from zeros arg_min = np.argwhere(A == np.min(A[np.nonzero(A)])) i, j = arg_min[0][0], arg_min[0][1] # remove i,j edge from the graph G.remove_edge(i, j) # recalculate the graph A = nx.adjacency_matrix(G) A = A.toarray() np.fill_diagonal(A, 0) # np.fill_diagonal(A,diag) density = nx.density(G) connectivity_matrix = A.copy() A[A > 0] = 1 binary_adjacency_matrix = A self.connectivity_matrix = connectivity_matrix self.binary_adjacency_matrix = binary_adjacency_matrix # load coordinates self.load_channel_locations() # create directed binary graph G = nx.DiGraph(binary_adjacency_matrix) new_labels = {} for i, node in enumerate(G.nodes): new_labels[node] = self.channels[i] self.G = nx.relabel.relabel_nodes(G, new_labels, copy=True) # nx.set_node_attributes(self.G, self.channel_locations, "pos") # create directed weighted graph Gw = nx.DiGraph(connectivity_matrix) self.Gw = nx.relabel.relabel_nodes(Gw, new_labels, copy=True)
def connectivity(self,freq,algorithm='yw',order=None,max_order=10,plot=False,resolution=100,threshold=None,mode=0,significance=[]): """ Computes the connectivity matrix of a graph using a specific connectivity method (DTF or PDC) and MVar model fitting Inputs: -freq: sample frequency -algorithm: default Yule-Walker algorithm -order: MVAR model order -max_order: Maximum order to compute the best model's order -plot: (DEFAULT: FALSE) if TRUE, plotting of the model order respect to minimization of BIC critirion -resolution: frequency resolution -threshold: (float between 0,1) percentage of density threshold -mode: (default: 0 for Directed graph, else 1 for Undirected graph) -significance: list with the nodes we want to exclude - filter out from our analysis regarding their significance value """ self.conn_algorithm = algorithm if not order: #best,crit = cp.Mvar.order_schwartz(self.values,max_order) ##BIC best,crit = cp.Mvar.order_akaike(self.values,max_order) ##AIC if plot: plt.plot(1+np.arange(len(crit)), crit,marker='o', linestyle='dashed',markersize=8,markerfacecolor='yellow') plt.grid() plt.show() self.p = best print() print('best model order p: {}'.format(best)) print() else: self.p = order data = cp.Data(self.values,chan_names=self.channels) data.fit_mvar(self.p, self.conn_algorithm) ar, vr = data.mvar_coefficients if self.method == 'DTF': conn_matrix = cp.conn.DTF() else: conn_matrix = cp.conn.PDC() Adj = conn_matrix.calculate(ar,vr,self.freq_sample,resolution) self.res = np.linspace(0,self.freq_sample/2,resolution) ## matrix with resolution of frequencies ## choosing frequency equal to the sample_frequency Adj = Adj[np.where(self.res == self.find_closest_freq(freq)),:,:].reshape(self.num_of_channels,self.num_of_channels) ############################################################################### np.fill_diagonal(Adj,0) if len(significance) > 0: for a,b in significance: Adj[a,b] = 0 ################################################################################ self.G = Graph.Weighted_Adjacency(Adj.tolist(), mode = mode) self.G.vs["label"] = list(map(lambda x: re.sub('\.', '', x), self.channels)) locations = pd.read_csv("./data/channel_locations.csv") coords = {k[0]: (k[1],k[2]) for k in locations.values} self.G.vs["coords"] = [coords[k["label"]] for k in self.G.vs] A = np.array(self.G.get_adjacency(attribute = "weight").data) diag = np.diag(A) # set values of diagonal zero to avoid self-loops np.fill_diagonal(A,0) if threshold: while(self.G.density() > threshold): arg_min = np.argwhere(A == np.min(A[np.nonzero(A)])) i,j = arg_min[0][0],arg_min[0][1] self.G.delete_edges([(i,j)]) A = np.array(self.G.get_adjacency(attribute = "weight").data) np.fill_diagonal(A,0) np.fill_diagonal(A,diag) self.density = self.G.density() self.connectivity_matrix = A.copy() A[A>0] = 1 self.binary_adjacency_matrix = A self.G.vs['degree'] = self.G.degree()
A[0, 3, 4] = 0.25 * 2**0.5 A[0, 4, 3] = -0.25 * 2**0.5 A[0, 4, 4] = 0.25 * 2**0.5 # multitrial signal generation from a matrix above # let's generate 5-channel signal with 1000 data points # and 5 trials using function mvar_gen ysig = np.zeros((5, 10**3, 5)) ysig[:, :, 0] = mvar_gen(A, 10**3) ysig[:, :, 1] = mvar_gen(A, 10**3) ysig[:, :, 2] = mvar_gen(A, 10**3) ysig[:, :, 3] = mvar_gen(A, 10**3) ysig[:, :, 4] = mvar_gen(A, 10**3) #### connectivity analysis data = cp.Data(ysig, 128, ["Fp1", "Fp2", "Cz", "O1", "O2"]) # you may want to plot data (in multitrial case only one trial is shown) data.plot_data() # fit mvar using Yule-Walker algorithm and order 2 data.fit_mvar(2, 'yw') # you can capture fitted parameters and residual matrix ar, vr = data.mvar_coefficients # now we investigate connectivity using gDTF gdtf_values = data.conn('gdtf') gdtf_significance = data.significance(Nrep=200, alpha=0.05) data.plot_conn('gDTF')
def test_mvar_calc(self): data = cp.Data(ys, 128, ["Fp1", "Fp2", "Cz", "O1", "O2"]) data.fit_mvar(2, 'vm') acoef, vcoef = data.mvar_coefficients self.assertEquals(acoef.shape, (2, 5, 5)) self.assertEquals(vcoef.shape, (5, 5))
def test_conn2(self): dat = cp.Data(ys) dat.fit_mvar(2, 'vm') estm = dat.short_time_conn('dtf', nfft=100, no=10) stst = dat.short_time_significance(Nrep=100, alpha=0.5, verbose=False) self.assertTrue(np.all(stst <= 1))
def test_conn(self): dat = cp.Data(ys) with self.assertRaises(AttributeError): dat.conn('dtf')
def test_resample(self): dd = cp.Data(np.random.randn(3, 100, 4), fs=10) dd.resample(5) self.assertEquals(dd.fs, 5)