Exemple #1
0
def compute_and_save_spectral_connectivity(data,con_method,sfreq,fmin,fmax,index = 0,mode = 'multitaper',export_to_matlab = False):

    import sys,os
    from mne.connectivity import spectral_connectivity

    import numpy as np
    from scipy.io import savemat
    
    print data.shape

    if len(data.shape) < 3:
        if con_method in ['coh','cohy','imcoh']:
            data = data.reshape(1,data.shape[0],data.shape[1])

        elif con_method in ['pli','plv','ppc' ,'pli','pli2_unbiased' ,'wpli' ,'wpli2_debiased']:
            print "warning, only work with epoched time series"
            sys.exit()
        
    if mode == 'multitaper':
        
        con_matrix, freqs, times, n_epochs, n_tapers  = spectral_connectivity(data, method=con_method, sfreq=sfreq, fmin= fmin, fmax=fmax, faverage=True, tmin=None, mode = 'multitaper',   mt_adaptive=False, n_jobs=1)
        
        con_matrix = np.array(con_matrix[:,:,0])

    elif mode == 'cwt_morlet':
        
        frequencies = np.arange(fmin, fmax, 1)
        n_cycles = frequencies / 7.

        con_matrix, freqs, times, n_epochs, n_tapers  = spectral_connectivity(data, method=con_method, sfreq=sfreq, faverage=True, tmin=None, mode='cwt_morlet',   cwt_frequencies= frequencies, cwt_n_cycles= n_cycles, n_jobs=1)
        
        con_matrix = np.mean(np.array(con_matrix[:,:,0,:]),axis = 2)
    
    else:
        
        print "Error, mode = %s not implemented"%(mode)
        
        return []

    print con_matrix.shape
    print np.min(con_matrix),np.max(con_matrix)

    conmat_file = os.path.abspath("conmat_" + str(index) + "_" + con_method + ".npy")
    
    np.save(conmat_file,con_matrix)

    if export_to_matlab == True:
        
        conmat_matfile = os.path.abspath("conmat_" + str(index) + "_" + con_method + ".mat")
        
        savemat(conmat_matfile,{"conmat":con_matrix + np.transpose(con_matrix)})
        
    return conmat_file
Exemple #2
0
def fc_mnespectral(data, sfreq, freq_bands, metrics=None, verbose=None):
    """
    
    data is 
    freq bands is a dict of (lower,upper) freq tuples

    """

    if metrics is None:
        metrics = [
            'coh', 'cohy', 'imcoh', 'plv', 'ppc', 'pli', 'pli2_unbiased',
            'wpli', 'wpli2_debiased'
        ]

    scs = {}
    for freq_band, (lfreq, hfreq) in freq_bands.items():

        res = spectral_connectivity(data,
                                    method=metrics,
                                    mode='multitaper',
                                    sfreq=sfreq,
                                    fmin=lfreq,
                                    fmax=hfreq,
                                    faverage=True,
                                    n_jobs=1,
                                    verbose=verbose)

        con, freqs, times, n_epochs, n_tapers = res

        scs[freq_band] = con

    return scs
Exemple #3
0
 def transform(self, X):
     if self.sfreq is None:
         self.sfreq = X.info['sfreq']
     n_epochs, n_channels = X.shape[0], X.shape[1]
     epc = np.empty(shape=(n_epochs, n_channels, n_channels))
     for i in range(n_epochs):
         c = spectral_connectivity(X[i,:,:].reshape(1, n_channels, -1),
                                   method=self.method, sfreq=self.sfreq, 
                                   faverage=True, fmin=self.fmin, fmax=self.fmax,
                                   mt_bandwidth=self.mt_bandwidth, 
                                   mt_adaptive=self.mt_adaptive,
                                   mt_low_bias=self.mt_low_bias, 
                                   cwt_freqs=self.cwt_freqs,
                                   cwt_n_cycles=self.cwt_n_cycles, 
                                   block_size=self.block_size, 
                                   n_jobs=self.n_jobs, 
                                   verbose=self.verbose)
         # pour la coherence, la diagonale est nulle. Ajouter l'identite ?
         # c = np.squeeze(c[0]).T + np.squeeze(c[0])
         # c = np.power(np.squeeze(c[0]).T + np.squeeze(c[0]) + np.eye(n_channels), 2)
         c = np.linalg.matrix_power(np.squeeze(c[0]).T + np.squeeze(c[0]) + np.eye(n_channels), 2)
         if not isPD2(c):
             c = nearestPD(c)
         epc[i,:,:] = c
     return epc
Exemple #4
0
def calc_electrodes_coh(subject, conditions, mat_fname, t_max, from_t_ind, to_t_ind, sfreq=1000, fmin=55, fmax=110, bw=15,
                        dt=0.1, window_len=0.1, n_jobs=6):

    from mne.connectivity import spectral_connectivity
    import time

    input_file = op.join(SUBJECTS_DIR, subject, 'electrodes', mat_fname)
    d = sio.loadmat(input_file)
    output_file = op.join(MMVT_DIR, subject, 'electrodes_coh.npy')
    windows = np.linspace(0, t_max - dt, t_max / dt)
    for cond, data in enumerate([d[cond] for cond in conditions]):
        if cond == 0:
            coh_mat = np.zeros((data.shape[1], data.shape[1], len(windows), 2))
            # coh_mat = np.load(output_file)
            # continue
        ds_data = downsample_data(data)
        ds_data = ds_data[:, :, from_t_ind:to_t_ind]
        now = time.time()
        for win, tmin in enumerate(windows):
            print('cond {}, tmin {}'.format(cond, tmin))
            utils.time_to_go(now, win + 1, len(windows))
            con_cnd, _, _, _, _ = spectral_connectivity(
                ds_data, method='coh', mode='multitaper', sfreq=sfreq,
                fmin=fmin, fmax=fmax, mt_adaptive=True, n_jobs=n_jobs, mt_bandwidth=bw, mt_low_bias=True,
                tmin=tmin, tmax=tmin + window_len)
            con_cnd = np.mean(con_cnd, axis=2)
            coh_mat[:, :, win, cond] = con_cnd
            # plt.matshow(con_cnd)
            # plt.show()
        np.save(output_file[:-4], coh_mat)
    return coh_mat
def calc_coh(subject, conditions, task, meg_electordes_names, meg_electrodes_data, tmin=0, tmax=2.5, sfreq=1000, fmin=55, fmax=110, bw=15, n_jobs=6):
    input_file = op.join(ELECTRODES_DIR, subject, task, 'electrodes_data_trials.mat')
    output_file = op.join(ELECTRODES_DIR, subject, task, 'electrodes_coh.npy')
    d = sio.loadmat(input_file)
    # Remove and sort the electrodes according to the meg_electordes_names
    electrodes = get_electrodes_names(subject, task)
    electrodes_to_remove = set(electrodes) - set(meg_electordes_names)
    indices_to_remove = [electrodes.index(e) for e in electrodes_to_remove]
    electrodes = scipy.delete(electrodes, indices_to_remove).tolist()
    electrodes_indices = np.array([electrodes.index(e) for e in meg_electordes_names])
    electrodes = np.array(electrodes)[electrodes_indices].tolist()
    assert(np.all(electrodes==meg_electordes_names))

    for cond, data in enumerate([d[conditions[0]], d[conditions[1]]]):
        data = scipy.delete(data, indices_to_remove, 1)
        data = data[:, electrodes_indices, :]
        data = downsample_data(data)
        data = data[:, :, :meg_electrodes_data.shape[2]]
        if cond == 0:
            coh_mat = np.zeros((data.shape[1], data.shape[1], 2))

        con_cnd, _, _, _, _ = spectral_connectivity(
            data, method='coh', mode='multitaper', sfreq=sfreq,
            fmin=fmin, fmax=fmax, mt_adaptive=True, n_jobs=n_jobs, mt_bandwidth=bw, mt_low_bias=True,
            tmin=tmin, tmax=tmax)
        con_cnd = np.mean(con_cnd, axis=2)
        coh_mat[:, :, cond] = con_cnd
    np.save(output_file[:-4], coh_mat)
    return con_cnd
def h(raw_EEG_data, method,fmin,fmax):
	sfreq = raw_EEG_data.info['sfreq']  # the sampling frequency

	window = 30*sfreq
	epoch_size = 1000

	last_samp = int(raw_EEG_data.last_samp - window/3)

	t_events = np.arange(window, min(50000+window, last_samp), epoch_size)

	events = np.zeros((len(t_events), 3), dtype=np.int)
	events[:, 0] = t_events
	events[:, 2] = 1 # ID of the event

	event_id, tmin, tmax = 1, -0.2, 0.5

	epochs = mne.Epochs(raw_EEG_data, events, event_id, tmin, tmax, proj=False,
						baseline=(None, 0), preload=True)

	tmin = 0.0  # exclude the baseline period

	con, freqs, times, n_epochs, n_tapers = spectral_connectivity(
	    epochs, method=method, mode='multitaper', sfreq=sfreq, fmin=fmin, fmax=fmax,
	    faverage=True, tmin=tmin, mt_adaptive=False, n_jobs=1)

	ch_names = epochs.ch_names

	con = con[0:14]
	matrix = []
	for lista in con:
		sublista = []
		for elem in lista[0:14]:
			sublista.append(elem[0])
		matrix.append(sublista)
	return np.array(matrix)
def envelope_coherence(se_data, seed_l, seed_r, fmin, fmax):

        '''
        se_data == used adaptive linear spatial filtering (beamforming)
        to estimate the spectral amplitude and phase of neuronal signals at the source
        level   
        Example:
        seed_l = Index of Left somatosensory cortex source estimate data
        seed_r = Index of Right somatosensory cortex source estimate data
        '''
        se_data = se_data.data[[seed_l,seed_r]].copy()

        # logarithm of the squared amplitude envelopes (power envelopes)
        data_squared = np.abs(se_data) * np.abs(se_data)
        data_mag = np.log(data_squared)
    
        log_range = np.arange(-1.5,1.1,0.1)
        covar_freqs = [math.pow(10,val) for val in log_range]
        '''
        We chose a spectral bandwidth of (σf = f * 3.15) and spaced the center frequencies log-
        arithmically according to the exponentiation of the base 10 with exponents ranging from −1.5 in steps of 0.1
        We derived spectral estimates in successive half-overlapping temporal windows that cov-
        ered ±3 σ t . From these complex numbers, we derived the coherency between power envelopes and 
        took the real part of coherency as the frequency-specific measure of correlation
        '''
        covar_freq_list = []
        sfreq = 1000
        for freq in covar_freqs:
            sigma = 3.15 * freq
            wvlt = morlet(sfreq, [freq], sigma=sigma, n_cycles=7)
            spectral_estimate = cwt(data_mag, wvlt)
            spectral_estimate = spectral_estimate[:,0,:]

            spectral_estimate_squared = np.abs(spectral_estimate) * np.abs(spectral_estimate)
            power_envelope = np.log(spectral_estimate_squared)
            power_envelope = power_envelope[np.newaxis,:,:]

            coherency, freqs, times, n_epochs, n_tapers = spectral_connectivity(
                power_envelope, fmin=freq, fmax=freq+0.5, method='cohy',faverage=True, sfreq=sfreq, n_jobs=4)
            
            coherence_corr = np.real(coherency)
            coherence_corr = coherence_corr[1,:,:][0]
            covar_freq_list.append(coherence_corr)

        coherence_correlation = np.vstack(covar_freq_list)
        '''
        coherence_correlation.shape = (26,21)
        
        26 is the co-variation frequency (x-axis) [0.032 - 10]
        log_range = np.arange(-1.5,1.1,0.1)
        covar_freqs = [math.pow(10,val) for val in log_range]
        
        21 is the carrier freqeuncy (y-axis) [4 - 128]
        log_range = np.arange(2,7.25,0.25)
        carrier_freqs = [math.pow(2,val) for val in log_range]
        '''
        return coherence_correlation
Exemple #8
0
def multiple_windowed_spectral_proc(ts_file,sfreq,freq_band,con_method):

    import numpy as np
    import os

    from mne.connectivity import spectral_connectivity

    all_data = np.load(ts_file)

    print all_data.shape

    #print sfreq
                
    print freq_band

    if len(all_data.shape) != 4:
        
        print "Warning, all_data should have 4 dimensions: nb_trials * nb_wondows * nb_nodes * nb_timepoints"
        
        return []

    all_con_matrices = []

    for i in range(all_data.shape[0]):

        trial_con_matrices = []
        
        for j in range(all_data.shape[1]):
                        
            cur_data = all_data[i,j,:,:]

            print cur_data.shape
                
            data = cur_data.reshape(1,cur_data.shape[0],cur_data.shape[1])

            con_matrix, freqs, times, n_epochs, n_tapers  = spectral_connectivity(data, method=con_method, mode='multitaper', sfreq=sfreq, fmin= freq_band[0], fmax=freq_band[1], faverage=True, tmin=None,    mt_adaptive=False, n_jobs=1)

            print con_matrix.shape
            
            con_matrix = np.array(con_matrix[:,:,0])
            print con_matrix.shape

            print np.min(con_matrix),np.max(con_matrix)
    
            trial_con_matrices.append(con_matrix)
            
        all_con_matrices.append(trial_con_matrices)
        
    np_all_con_matrices = np.array(all_con_matrices)
    
    print np_all_con_matrices.shape
    
    conmat_file = os.path.abspath("multiple_windowed_conmat_"+ con_method + ".npy")

    np.save(conmat_file,np_all_con_matrices)

    return conmat_file
def calc_wpli_over_time(epochs):
    cwt_freqs = np.arange(8, 30, 1)
    cwt_cycles = cwt_freqs / 4.
    epochs.crop(-0.7, 0.7)
    con, freqs, times, _, _ = spectral_connectivity(epochs, method='wpli2_debiased', mode='cwt_morlet', sfreq=epochs.info['sfreq'],
                                                    cwt_frequencies=cwt_freqs, cwt_n_cycles=cwt_cycles, n_jobs=n_jobs)

    np.savez(op.join(study_path, 'results', 'wpli', '{}_{}_wpli' .format(epochs.info['subject_info'], epochs.info['cond'])),
             con=con, times=epochs.times, freqs=cwt_freqs, nave=len(epochs), info=epochs.info, chans=epochs.info['ch_names'])
def calc_wpli_over_epochs(epochs):
    fmin = (1, 4, 8, 13, 30)
    fmax = (4, 7, 12, 30, 40)

    epochs.crop(-0.7, 0.7)
    con, freqs, times, _, _ = spectral_connectivity(epochs, method='wpli_debiased', mode='multitaper', fmin=fmin,  fmax=fmax, faverage=True,
                                                    mt_adaptive=False, n_jobs=n_jobs, verbose=False)

    np.savez(op.join(study_path, 'results', 'wpli', '{}_{}_dwpli_epochs' .format(epochs.info['subject_info'], epochs.info['cond'])),
             con=con, times=epochs.times, freqs=(fmin, fmax), nave=len(epochs), info=epochs.info, chans=epochs.info['ch_names'])
Exemple #11
0
def calc_meg_electrodes_coh(subject, tmin=0, tmax=2.5, sfreq=1000, fmin=55, fmax=110, bw=15, n_jobs=6):
    input_file = op.join(ELECTRODES_DIR, mri_subject, task, 'meg_electrodes_ts.npy')
    output_file = op.join(ELECTRODES_DIR, mri_subject, task, 'meg_electrodes_ts_coh.npy')
    data = np.load(input_file)
    for cond in range(data.shape[3]):
        data_cond = data[:, :, :, cond]
        if cond == 0:
            coh_mat = np.zeros((data_cond.shape[1], data_cond.shape[1], 2))
        con_cnd, _, _, _, _ = spectral_connectivity(
            data_cond, method='coh', mode='multitaper', sfreq=sfreq,
            fmin=fmin, fmax=fmax, mt_adaptive=True, n_jobs=n_jobs, mt_bandwidth=bw, mt_low_bias=True,
            tmin=tmin, tmax=tmax)
        con_cnd = np.mean(con_cnd, axis=2)
        coh_mat[:, :, cond] = con_cnd
    np.save(output_file[:-4], coh_mat)
    return con_cnd
Exemple #12
0
def multiple_spectral_proc(ts_file,sfreq,freq_band,con_method):

    import numpy as np
    import os

    from mne.connectivity import spectral_connectivity

    all_data = np.load(ts_file)

    print all_data.shape
    
    #print sfreq
            
    print freq_band
    
    if len(all_data.shape) != 3:
        print "Warning, all_data should have several samples"
        
        return []
    
    conmat_files = []
    
    for i in range(all_data.shape[0]):

        cur_data = all_data[i,:,:]

        data = cur_data.reshape(1,cur_data.shape[0],cur_data.shape[1])

        print data.shape
        
        con_matrix, freqs, times, n_epochs, n_tapers  = spectral_connectivity(data, method=con_method, mode='multitaper', sfreq=sfreq, fmin= freq_band[0], fmax=freq_band[1], faverage=True, tmin=None,    mt_adaptive=False, n_jobs=1)

        con_matrix = np.array(con_matrix[:,:,0])

        print con_matrix.shape
        print np.min(con_matrix),np.max(con_matrix)
        
        conmat_file = os.path.abspath("conmat_"+ con_method + "_" + str(i) + ".npy")

        np.save(conmat_file,con_matrix)

        conmat_files.append(conmat_file)
            
    return conmat_files
def compute_channel_connectivity(epochs: Epochs, spectrum_mode: str,
                                 method: str,
                                 n_jobs: int) -> Tuple[np.ndarray, np.ndarray]:
    """
    Compute channel level connectivity matrix from Epochs instance.
    Returns the computed connectivity matrix (n_freqs, n_signals, n_signals).

    Args:
        str spectrum_mode: Valid estimation mode 'fourier' or 'multitaper'
        Epochs epochs: Epochs extracted from a Raw instance
        str method: connectivity estimation method
        int n_jobs: number of epochs to process in parallel
    :return: np.ndarray con: The computed connectivity matrix with a shape of
    (n_freqs, n_signals, n_signals).

    See Also
    --------
    For frequency-decomposition and frequency bin reference:
    https://docs.scipy.org/doc/numpy/reference/generated/numpy.fft.rfftfreq.html
    """
    # spacing between frequency bins
    spacing = epochs.info['sfreq'] / epochs.get_data().shape[-1]
    frequencies = frequency_bands[spectrum_mode].values()
    low_cutoff = tuple(band[0] for band in frequencies)
    high_cutoff = tuple(band[1] - spacing for band in frequencies)

    con, _, _, _, _ = spectral_connectivity(data=epochs,
                                            method=method,
                                            sfreq=epochs.info['sfreq'],
                                            mode=spectrum_mode,
                                            fmin=low_cutoff,
                                            fmax=high_cutoff,
                                            faverage=True,
                                            n_jobs=n_jobs,
                                            verbose=True)

    # from shape of (n_signals, n_signals, n_freqs) to
    # (n_freqs, n_signals, n_signals)
    con = np.transpose(con, (2, 0, 1))
    con = abs(con)
    return con
Exemple #14
0
def calc_meg_electrodes_coh_windows(subject, tmin=0, tmax=2.5, sfreq=1000,
        freqs = ((8, 12), (12, 25), (25,55), (55,110)), bw=15, dt=0.1, window_len=0.2, n_jobs=6):
    input_file = op.join(ELECTRODES_DIR, mri_subject, task, 'meg_electrodes_ts.npy')
    output_file = op.join(ELECTRODES_DIR, mri_subject, task, 'meg_electrodes_ts_coh_windows_{}.npy'.format(window_len))
    data = np.load(input_file)
    windows = np.linspace(tmin, tmax - dt, tmax / dt)
    for cond in range(data.shape[3]):
        data_cond = data[:, :, :, cond]
        if cond == 0:
            coh_mat = np.zeros((data_cond.shape[1], data_cond.shape[1], len(windows), len(freqs), 2))

        for freq_ind, (fmin, fmax) in enumerate(freqs):
            for win, tmin in enumerate(windows):
                con_cnd, _, _, _, _ = spectral_connectivity(
                    data[:, :, :, cond], method='coh', mode='multitaper', sfreq=sfreq,
                    fmin=fmin, fmax=fmax, mt_adaptive=True, n_jobs=n_jobs, mt_bandwidth=bw, mt_low_bias=True,
                    tmin=tmin, tmax=tmin + window_len)
                con_cnd = np.mean(con_cnd, axis=2)
                coh_mat[:, :, win, freq_ind, cond] = con_cnd
    np.save(output_file[:-4], coh_mat)
    return con_cnd
Exemple #15
0
def calc_electrodes_coh_windows(subject, input_fname, conditions, bipolar, meg_electordes_names, meg_electrodes_data, tmin=0, tmax=2.5, sfreq=1000,
                freqs=((8, 12), (12, 25), (25,55), (55,110)), bw=15, dt=0.1, window_len=0.2, n_jobs=6):
    output_file = op.join(ELECTRODES_DIR, subject, task, 'electrodes_coh_{}windows_{}.npy'.format('bipolar_' if bipolar else '', window_len))
    if input_fname[-3:] == 'mat':
        d = sio.loadmat(matlab_input_file)
        conds_data = [d[conditions[0]], d[conditions[1]]]
        electrodes = get_electrodes_names(subject, task)
    elif input_fname[-3:] == 'npz':
        d = np.load(input_fname)
        conds_data = d['data']
        conditions = d['conditions']
        electrodes = d['names'].tolist()
        pass

    indices_to_remove, electrodes_indices = electrodes_tp_remove(electrodes, meg_electordes_names)
    windows = np.linspace(tmin, tmax - dt, tmax / dt)
    for cond, data in enumerate(conds_data):
        data = scipy.delete(data, indices_to_remove, 1)
        data = data[:, electrodes_indices, :]
        data = downsample_data(data)
        data = data[:, :, :meg_electrodes_data.shape[2]]
        if cond == 0:
            coh_mat = np.zeros((data.shape[1], data.shape[1], len(windows), len(freqs), 2))
            # coh_mat = np.load(output_file)
            # continue
        now = time.time()
        for freq_ind, (fmin, fmax) in enumerate(freqs):
            for win, tmin in enumerate(windows):
                try:
                    print('cond {}, tmin {}'.format(cond, tmin))
                    utils.time_to_go(now, win + 1, len(windows))
                    con_cnd, _, _, _, _ = spectral_connectivity(
                        data, method='coh', mode='multitaper', sfreq=sfreq,
                        fmin=fmin, fmax=fmax, mt_adaptive=True, n_jobs=n_jobs, mt_bandwidth=bw, mt_low_bias=True,
                        tmin=tmin, tmax=tmin + window_len)
                    con_cnd = np.mean(con_cnd, axis=2)
                    coh_mat[:, :, win, freq_ind, cond] = con_cnd
                except:
                    print('Error with freq {} and win {}'.format(freq_ind, win))
    np.save(output_file[:-4], coh_mat)
def word_connectivity(wordEpochs,indices,step=2):
  wordconnectivity = numpy.empty((int(wordEpochs.shape[0]/step),wordEpochs.shape[1],wordEpochs.shape[1],len(cwt_frequencies),epochLength*samplingRate))
  # this array is wordEpochs x chans x chans x freqs x timeSamples
  print 'wordconnectivity',wordconnectivity.shape
  total = wordEpochs.shape[0]-wordEpochs.shape[0]%step
  for i in range(0,total/step):
    word = wordEpochs[step*i:step*(i+1)]
    if i == 0:
      print 'word',word.shape
    if step == 1:
      word = word.reshape((1,word.shape[0],word.shape[1]))
    if i == 0:
      if step == 1:
        print 'reshaped word',word.shape
      print 'cwt_frequencies',cwt_frequencies.shape
      print 'cwt_n_cycles',cwt_n_cycles.shape
    if i % 200 == 0:
      print 'Epoch %d/%d (%d)' % (i,total/step,total)
    wordconnectivity[i], freqs, times, _, _ = spectral_connectivity(word, indices=indices,
                                                                    method='coh', mode='cwt_morlet', sfreq=samplingRate,
                                                                    cwt_frequencies=cwt_frequencies, cwt_n_cycles=cwt_n_cycles, n_jobs=NJOBS, verbose='WARNING')
  return(wordconnectivity)
# Now we are ready to compute the connectivity in the alpha band. Notice
# from the status messages, how mne-python: 1) reads an epoch from the raw
# file, 2) applies SSP and baseline correction, 3) computes the inverse to
# obtain a source estimate, 4) averages the source estimate to obtain a
# time series for each label, 5) includes the label time series in the
# connectivity computation, and then moves to the next epoch. This
# behaviour is because we are using generators and allows us to
# compute connectivity in computationally efficient manner where the amount
# of memory (RAM) needed is independent from the number of epochs.
fmin = 8.
fmax = 13.
sfreq = raw.info['sfreq']  # the sampling frequency

con, freqs, times, n_epochs, n_tapers = spectral_connectivity(label_ts,
        method='wpli2_debiased', mode='multitaper', sfreq=sfreq, fmin=fmin,
        fmax=fmax, faverage=True, mt_adaptive=True, n_jobs=2)

# con is a 3D array, get the connectivity for the first (and only) freq. band
con = con[:, :, 0]

# Now, we visualize the connectivity using a circular graph layout

# First, we reorder the labels based on their location in the left hemi
label_names = [label.name for label in labels]

lh_labels = [name for name in label_names if name.endswith('lh')]

# Get the y-location of the label
label_ypos = list()
for name in lh_labels:
# Use 'MEG 2343' as seed
seed_ch = 'MEG 2343'
picks_ch_names = [raw.ch_names[i] for i in picks]

# Create seed-target indices for connectivity computation
seed = picks_ch_names.index(seed_ch)
targets = np.arange(len(picks))
indices = seed_target_indices(seed, targets)

# Define wavelet frequencies and number of cycles
cwt_freqs = np.arange(7, 30, 2)
cwt_n_cycles = cwt_freqs / 7.

# Run the connectivity analysis using 2 parallel jobs
sfreq = raw.info['sfreq']  # the sampling frequency
con, freqs, times, _, _ = spectral_connectivity(
    epochs, indices=indices,
    method='wpli2_debiased', mode='cwt_morlet', sfreq=sfreq,
    cwt_freqs=cwt_freqs, cwt_n_cycles=cwt_n_cycles, n_jobs=1)

# Mark the seed channel with a value of 1.0, so we can see it in the plot
con[np.where(indices[1] == seed)] = 1.0

# Show topography of connectivity from seed
title = 'WPLI2 - Visual - Seed %s' % seed_ch

layout = mne.find_layout(epochs.info, 'meg')  # use full layout

tfr = AverageTFR(epochs.info, con, times, freqs, len(epochs))
tfr.plot_topo(fig_facecolor='w', font_color='k', border='k')
Exemple #19
0
def test_spectral_connectivity():
    """Test frequency-domain connectivity methods"""
    # Use a case known to have no spurious correlations (it would bad if
    # nosetests could randomly fail):
    np.random.seed(0)

    sfreq = 50.
    n_signals = 3
    n_epochs = 10
    n_times = 500

    tmin = 0.
    tmax = (n_times - 1) / sfreq
    data = np.random.randn(n_epochs, n_signals, n_times)
    times_data = np.linspace(tmin, tmax, n_times)
    # simulate connectivity from 5Hz..15Hz
    fstart, fend = 5.0, 15.0
    for i in range(n_epochs):
        data[i, 1, :] = band_pass_filter(data[i, 0, :], sfreq, fstart, fend)
        # add some noise, so the spectrum is not exactly zero
        data[i, 1, :] += 1e-2 * np.random.randn(n_times)

    # First we test some invalid parameters:
    assert_raises(ValueError, spectral_connectivity, data, method='notamethod')
    assert_raises(ValueError, spectral_connectivity, data,
                  mode='notamode')

    # test invalid fmin fmax settings
    assert_raises(ValueError, spectral_connectivity, data, fmin=10,
                  fmax=10 + 0.5 * (sfreq / float(n_times)))
    assert_raises(ValueError, spectral_connectivity, data, fmin=10, fmax=5)
    assert_raises(ValueError, spectral_connectivity, data, fmin=(0, 11),
                  fmax=(5, 10))
    assert_raises(ValueError, spectral_connectivity, data, fmin=(11,),
                  fmax=(12, 15))

    methods = ['coh', 'imcoh', 'cohy', 'plv', 'ppc', 'pli', 'pli2_unbiased',
               'wpli', 'wpli2_debiased', 'coh']

    modes = ['multitaper', 'fourier', 'cwt_morlet']

    # define some frequencies for cwt
    cwt_frequencies = np.arange(3, 24.5, 1)

    for mode in modes:
        for method in methods:
            if method == 'coh' and mode == 'multitaper':
                # only check adaptive estimation for coh to reduce test time
                check_adaptive = [False, True]
            else:
                check_adaptive = [False]

            if method == 'coh' and mode == 'cwt_morlet':
                # so we also test using an array for num cycles
                cwt_n_cycles = 7. * np.ones(len(cwt_frequencies))
            else:
                cwt_n_cycles = 7.

            for adaptive in check_adaptive:

                if adaptive:
                    mt_bandwidth = 1.
                else:
                    mt_bandwidth = None

                con, freqs, times, n, _ = spectral_connectivity(data,
                        method=method, mode=mode,
                        indices=None, sfreq=sfreq, mt_adaptive=adaptive,
                        mt_low_bias=True, mt_bandwidth=mt_bandwidth,
                        cwt_frequencies=cwt_frequencies,
                        cwt_n_cycles=cwt_n_cycles)

                assert_true(n == n_epochs)
                assert_array_almost_equal(times_data, times)

                if mode == 'multitaper':
                    upper_t = 0.95
                    lower_t = 0.5
                else:
                    # other estimates have higher variance
                    upper_t = 0.8
                    lower_t = 0.75

                # test the simulated signal
                if method == 'coh':
                    idx = np.searchsorted(freqs, (fstart + 1, fend - 1))
                    # we see something for zero-lag
                    assert_true(np.all(con[1, 0, idx[0]:idx[1]] > upper_t))

                    if mode != 'cwt_morlet':
                        idx = np.searchsorted(freqs, (fstart - 1, fend + 1))
                        assert_true(np.all(con[1, 0, :idx[0]] < lower_t))
                        assert_true(np.all(con[1, 0, idx[1]:] < lower_t))
                elif method == 'cohy':
                    idx = np.searchsorted(freqs, (fstart + 1, fend - 1))
                    # imaginary coh will be zero
                    assert_true(np.all(np.imag(con[1, 0, idx[0]:idx[1]])
                                < lower_t))
                    # we see something for zero-lag
                    assert_true(np.all(np.abs(con[1, 0, idx[0]:idx[1]])
                                > upper_t))

                    idx = np.searchsorted(freqs, (fstart - 1, fend + 1))
                    if mode != 'cwt_morlet':
                        assert_true(np.all(np.abs(con[1, 0, :idx[0]])
                                    < lower_t))
                        assert_true(np.all(np.abs(con[1, 0, idx[1]:])
                                    < lower_t))
                elif method == 'imcoh':
                    idx = np.searchsorted(freqs, (fstart + 1, fend - 1))
                    # imaginary coh will be zero
                    assert_true(np.all(con[1, 0, idx[0]:idx[1]] < lower_t))
                    idx = np.searchsorted(freqs, (fstart - 1, fend + 1))
                    assert_true(np.all(con[1, 0, :idx[0]] < lower_t))
                    assert_true(np.all(con[1, 0, idx[1]:] < lower_t))

                # compute same connections using indices and 2 jobs,
                # also add a second method
                indices = tril_indices(n_signals, -1)

                test_methods = (method, _CohEst)
                combo = True if method == 'coh' else False
                stc_data = _stc_gen(data, sfreq, tmin)
                con2, freqs2, times2, n2, _ = spectral_connectivity(stc_data,
                        method=test_methods, mode=mode, indices=indices,
                        sfreq=sfreq, mt_adaptive=adaptive, mt_low_bias=True,
                        mt_bandwidth=mt_bandwidth, tmin=tmin, tmax=tmax,
                        cwt_frequencies=cwt_frequencies,
                        cwt_n_cycles=cwt_n_cycles, n_jobs=2)

                assert_true(isinstance(con2, list))
                assert_true(len(con2) == 2)

                if method == 'coh':
                    assert_array_almost_equal(con2[0], con2[1])

                con2 = con2[0]  # only keep the first method

                # we get the same result for the probed connections
                assert_array_almost_equal(freqs, freqs2)
                assert_array_almost_equal(con[indices], con2)
                assert_true(n == n2)
                assert_array_almost_equal(times_data, times2)

                # compute same connections for two bands, fskip=1, and f. avg.
                fmin = (5., 15.)
                fmax = (15., 30.)
                con3, freqs3, times3, n3, _ = spectral_connectivity(data,
                        method=method, mode=mode,
                        indices=indices, sfreq=sfreq, fmin=fmin, fmax=fmax,
                        fskip=1, faverage=True, mt_adaptive=adaptive,
                        mt_low_bias=True, mt_bandwidth=mt_bandwidth,
                        cwt_frequencies=cwt_frequencies,
                        cwt_n_cycles=cwt_n_cycles)

                assert_true(isinstance(freqs3, list))
                assert_true(len(freqs3) == len(fmin))
                for i in range(len(freqs3)):
                    assert_true(np.all((freqs3[i] >= fmin[i])
                                       & (freqs3[i] <= fmax[i])))

                # average con2 "manually" and we get the same result
                for i in range(len(freqs3)):
                    freq_idx = np.searchsorted(freqs2, freqs3[i])
                    con2_avg = np.mean(con2[:, freq_idx], axis=1)
                    assert_array_almost_equal(con2_avg, con3[:, i])
                            pick_ori="normal", return_generator=True)

# Now we are ready to compute the coherence in the alpha and beta band.
# fmin and fmax specify the lower and upper freq. for each band, resp.
fmin = (8., 13.)
fmax = (13., 30.)
sfreq = raw.info['sfreq']  # the sampling frequency

# Now we compute connectivity. To speed things up, we use 2 parallel jobs
# and use mode='fourier', which uses a FFT with a Hanning window
# to compute the spectra (instead of multitaper estimation, which has a
# lower variance but is slower). By using faverage=True, we directly
# average the coherence in the alpha and beta band, i.e., we will only
# get 2 frequency bins
coh, freqs, times, n_epochs, n_tapers = spectral_connectivity(stcs,
    method='coh', mode='fourier', indices=indices,
    sfreq=sfreq, fmin=fmin, fmax=fmax, faverage=True, n_jobs=2)

print 'Frequencies in Hz over which coherence was averaged for alpha: '
print freqs[0]
print 'Frequencies in Hz over which coherence was averaged for beta: '
print freqs[1]

# Generate a SourceEstimate with the coherence. This is simple since we
# used a single seed. For more than one seeds we would have to split coh.
# Note: We use a hack to save the frequency axis as time
tmin = np.mean(freqs[0])
tstep = np.mean(freqs[1]) - tmin
coh_stc = mne.SourceEstimate(coh, vertices=stc.vertno, tmin=1e-3 * tmin,
                             tstep=1e-3 * tstep, subject='sample')
# Pick MEG gradiometers
picks = mne.pick_types(raw.info, meg='grad', eeg=False, stim=False, eog=True,
                        exclude='bads')

# Create epochs for the visual condition
event_id, tmin, tmax = 3, -0.2, 0.5
epochs = mne.Epochs(raw, events, event_id, tmin, tmax, picks=picks,
                    baseline=(None, 0), reject=dict(grad=4000e-13, eog=150e-6))

# Compute connectivity for band containing the evoked response.
# We exclude the baseline period
fmin, fmax = 3., 9.
sfreq = raw.info['sfreq']  # the sampling frequency
tmin = 0.0  # exclude the baseline period
con, freqs, times, n_epochs, n_tapers = spectral_connectivity(epochs,
    method='pli', mode='multitaper', sfreq=sfreq,
    fmin=fmin, fmax=fmax, faverage=True, tmin=tmin,
    mt_adaptive=False, n_jobs=2)

# the epochs contain an EOG channel, which we remove now
ch_names = epochs.ch_names
idx = [ch_names.index(name) for name in ch_names if name.startswith('MEG')]
con = con[idx][:, idx]

# con is a 3D array where the last dimension is size one since we averaged
# over frequencies in a single band. Here we make it 2D
con = con[:, :, 0]

# Now, visualize the connectivity in 3D
try:
    from enthought.mayavi import mlab
except:
Exemple #22
0
def snr_epochs(epochs, n_perm=10, fmin=1, fmax=300, tmin=None, tmax=None,
               kind='coh', normalize_coherence=False):
    '''
    Computes the coherence between the mean of subsets of epochs. This can
    be used to assess signal stability in response to a stimulus (repeated or
    otherwise).

    Parameters
    ----------
    epochs : instance of Epochs
        The data on which to calculate coherence. Coherence will be calculated
        between the mean of random splits of trials
    n_perm : int
        The number of permuatations to run
    fmin : float
        The minimum coherence frequency
    fmax : float
        The maximum coherence frequency
    tmin : float
        Start time for coherence estimation
    tmax : float
        Stop time for coherence estimation
    kind : 'coh' | 'corr'
        Specifies the similarity statistic.
        If corr, calculate correlation between the mean of subsets of epochs.
        If coh, then calculate the coherence.
    normalize_coherence : bool
        If True, subtract the grand mean coherence across permutations and
        channels from the output matrix. This is a way to "baseline" your
        coherence values to show deviations from the global means

    Outputs
    -------
    permutations : np.array, shape (n_perms, n_signals, n_freqs)
        A collection of coherence values for each permutation.

    coh_freqs : np.array, shape (n_freqs,)
        The frequency values in the coherence analysis
    '''
    sfreq = epochs.info['sfreq']
    epochs = epochs.crop(tmin, tmax, copy=True)
    nep, n_chan, ntime = epochs._data.shape

    # Run permutations
    permutations = []
    for iperm in tqdm(xrange(n_perm)):
        # Split our epochs into two random groups, take mean of each
        t1, t2 = np.split(np.random.permutation(np.arange(nep)), [nep/2.])
        mn1, mn2 = [epochs[this_ixs]._data.mean(0)
                    for this_ixs in [t1, t2]]

        # Now compute similarity between the two
        this_similarity = []
        for ch, this_mean1, this_mean2 in zip(epochs.ch_names, mn1, mn2):
            this_means = np.vstack([this_mean1, this_mean2])
            if kind == 'coh':
                # import ecogtools as et; et.embed()
                this_means = this_means[np.newaxis, :, :]
                ixs = ([0], [1])
                sim, coh_freqs, _, _, _ = spectral_connectivity(
                    this_means, sfreq=sfreq, method='coh', fmin=fmin,
                    fmax=fmax, tmin=tmin, tmax=tmax, indices=ixs, verbose=0)
                sim = sim.squeeze()
            elif kind == 'corr':
                sim, _ = pearsonr(*this_means)
            else:
                raise ValueError('Unknown similarity type: {0}'.format(kind))
            this_similarity.append(sim)
        permutations.append(this_similarity)
    permutations = np.array(permutations)

    if normalize_coherence is True:
        # Normalize coherence values to their grand average
        permutations -= permutations.mean((0, 1))

    if kind == 'coh':
        return permutations, coh_freqs
    elif kind == 'corr':
        return permutations
Exemple #23
0
def epochs_snr(epochs, n_perm=10, fmin=0, fmax=300, tmin=None, tmax=None,
               kind='coh', normalize_coherence=True):
    '''
    Computes the coherence between the mean of subsets of trails. This can
    be used to assess signal stability in response to a stimulus (repeated or
    otherwise).

    Parameters
    ----------
    epochs : instance of Epochs
        The data on which to calculate coherence. Coherence will be calculated
        between the mean of random splits of trials
    n_perm : int
        The number of permuatations to run
    fmin : float
        The minimum coherence frequency
    fmax : float
        The maximum coherence frequency
    tmin : float
        Start time for coherence estimation
    tmax : float
        Stop time for coherence estimation
    kind : 'coh' | 'corr'
        Whether to use coherence or correlation as the similarity statistic
    normalize_coherence : bool
        If True, subtract the grand mean coherence across permutations and
        channels from the output matrix. This is a way to "baseline" your
        coherence values to show deviations from the global means

    Outputs
    -------
    permutations : np.array, shape (n_perms, n_signals, n_freqs)
        A collection of coherence values for each permutation.

    coh_freqs : np.array, shape (n_freqs,)
        The frequency values in the coherence analysis
    '''
    sfreq = epochs.info['sfreq']
    nep, n_chan, ntime = epochs._data.shape
    permutations = []
    for iperm in tqdm(xrange(n_perm)):
        # Split our epochs into two random groups, take mean of each
        t1, t2 = np.split(np.random.permutation(np.arange(nep)), [nep/2.])
        mn1, mn2 = [epochs[this_ixs]._data.mean(0)
                    for this_ixs in [t1, t2]]

        # Now compute coherence between the two
        this_similarity = []
        for ch, this_mean1, this_mean2 in zip(epochs.ch_names, mn1, mn2):
            this_means = np.vstack([this_mean1, this_mean2])
            if kind == 'coh':
                sim, coh_freqs, _, _, _ = spectral_connectivity(
                    this_means[None, :, :], sfreq=sfreq, fmin=fmin, fmax=fmax,
                    tmin=tmin, tmax=tmax, mt_adaptive=True, verbose=0)
                sim = sim[1, 0, :].squeeze()
            elif kind == 'corr':
                sim, _ = sp.stats.pearsonr(vals1, vals2)
            this_similarity.append(sim)
        permutations.append(this_similarity)
    permutations = np.array(permutations)

    if normalize_coherence is True:
        # Normalize coherence values be their grand average
        permutations -= permutations.mean((0, 1))

    if kind == 'coh':
        return permutations, coh_freqs
    elif kind == 'corr':
        return permutations
Exemple #24
0
def test_spectral_connectivity():
    """Test frequency-domain connectivity methods"""
    # Use a case known to have no spurious correlations (it would bad if
    # nosetests could randomly fail):
    np.random.seed(0)

    sfreq = 50.0
    n_signals = 3
    n_epochs = 8
    n_times = 256

    tmin = 0.0
    tmax = (n_times - 1) / sfreq
    data = np.random.randn(n_epochs, n_signals, n_times)
    times_data = np.linspace(tmin, tmax, n_times)
    # simulate connectivity from 5Hz..15Hz
    fstart, fend = 5.0, 15.0
    for i in range(n_epochs):
        data[i, 1, :] = band_pass_filter(data[i, 0, :], sfreq, fstart, fend, **filt_kwargs)
        # add some noise, so the spectrum is not exactly zero
        data[i, 1, :] += 1e-2 * np.random.randn(n_times)

    # First we test some invalid parameters:
    assert_raises(ValueError, spectral_connectivity, data, method="notamethod")
    assert_raises(ValueError, spectral_connectivity, data, mode="notamode")

    # test invalid fmin fmax settings
    assert_raises(ValueError, spectral_connectivity, data, fmin=10, fmax=10 + 0.5 * (sfreq / float(n_times)))
    assert_raises(ValueError, spectral_connectivity, data, fmin=10, fmax=5)
    assert_raises(ValueError, spectral_connectivity, data, fmin=(0, 11), fmax=(5, 10))
    assert_raises(ValueError, spectral_connectivity, data, fmin=(11,), fmax=(12, 15))

    methods = ["coh", "cohy", "imcoh", ["plv", "ppc", "pli", "pli2_unbiased", "wpli", "wpli2_debiased", "coh"]]

    modes = ["multitaper", "fourier", "cwt_morlet"]

    # define some frequencies for cwt
    cwt_frequencies = np.arange(3, 24.5, 1)

    for mode in modes:
        for method in methods:
            if method == "coh" and mode == "multitaper":
                # only check adaptive estimation for coh to reduce test time
                check_adaptive = [False, True]
            else:
                check_adaptive = [False]

            if method == "coh" and mode == "cwt_morlet":
                # so we also test using an array for num cycles
                cwt_n_cycles = 7.0 * np.ones(len(cwt_frequencies))
            else:
                cwt_n_cycles = 7.0

            for adaptive in check_adaptive:

                if adaptive:
                    mt_bandwidth = 1.0
                else:
                    mt_bandwidth = None

                con, freqs, times, n, _ = spectral_connectivity(
                    data,
                    method=method,
                    mode=mode,
                    indices=None,
                    sfreq=sfreq,
                    mt_adaptive=adaptive,
                    mt_low_bias=True,
                    mt_bandwidth=mt_bandwidth,
                    cwt_frequencies=cwt_frequencies,
                    cwt_n_cycles=cwt_n_cycles,
                )

                assert_true(n == n_epochs)
                assert_array_almost_equal(times_data, times)

                if mode == "multitaper":
                    upper_t = 0.95
                    lower_t = 0.5
                elif mode == "fourier":
                    # other estimates have higher variance
                    upper_t = 0.8
                    lower_t = 0.75
                else:  # cwt_morlet
                    upper_t = 0.64
                    lower_t = 0.63

                # test the simulated signal
                if method == "coh":
                    idx = np.searchsorted(freqs, (fstart + trans_bandwidth, fend - trans_bandwidth))
                    # we see something for zero-lag
                    assert_true(np.all(con[1, 0, idx[0] : idx[1]] > upper_t), con[1, 0, idx[0] : idx[1]].min())

                    if mode != "cwt_morlet":
                        idx = np.searchsorted(freqs, (fstart - trans_bandwidth * 2, fend + trans_bandwidth * 2))
                        assert_true(np.all(con[1, 0, : idx[0]] < lower_t))
                        assert_true(np.all(con[1, 0, idx[1] :] < lower_t), con[1, 0, idx[1:]].max())
                elif method == "cohy":
                    idx = np.searchsorted(freqs, (fstart + 1, fend - 1))
                    # imaginary coh will be zero
                    check = np.imag(con[1, 0, idx[0] : idx[1]])
                    assert_true(np.all(check < lower_t), check.max())
                    # we see something for zero-lag
                    assert_true(np.all(np.abs(con[1, 0, idx[0] : idx[1]]) > upper_t))

                    idx = np.searchsorted(freqs, (fstart - trans_bandwidth * 2, fend + trans_bandwidth * 2))
                    if mode != "cwt_morlet":
                        assert_true(np.all(np.abs(con[1, 0, : idx[0]]) < lower_t))
                        assert_true(np.all(np.abs(con[1, 0, idx[1] :]) < lower_t))
                elif method == "imcoh":
                    idx = np.searchsorted(freqs, (fstart + 1, fend - 1))
                    # imaginary coh will be zero
                    assert_true(np.all(con[1, 0, idx[0] : idx[1]] < lower_t))
                    idx = np.searchsorted(freqs, (fstart - 1, fend + 1))
                    assert_true(np.all(con[1, 0, : idx[0]] < lower_t))
                    assert_true(np.all(con[1, 0, idx[1] :] < lower_t), con[1, 0, idx[1] :].max())

                # compute same connections using indices and 2 jobs
                indices = np.tril_indices(n_signals, -1)

                if not isinstance(method, list):
                    test_methods = (method, _CohEst)
                else:
                    test_methods = method

                stc_data = _stc_gen(data, sfreq, tmin)
                con2, freqs2, times2, n2, _ = spectral_connectivity(
                    stc_data,
                    method=test_methods,
                    mode=mode,
                    indices=indices,
                    sfreq=sfreq,
                    mt_adaptive=adaptive,
                    mt_low_bias=True,
                    mt_bandwidth=mt_bandwidth,
                    tmin=tmin,
                    tmax=tmax,
                    cwt_frequencies=cwt_frequencies,
                    cwt_n_cycles=cwt_n_cycles,
                    n_jobs=2,
                )

                assert_true(isinstance(con2, list))
                assert_true(len(con2) == len(test_methods))

                if method == "coh":
                    assert_array_almost_equal(con2[0], con2[1])

                if not isinstance(method, list):
                    con2 = con2[0]  # only keep the first method

                    # we get the same result for the probed connections
                    assert_array_almost_equal(freqs, freqs2)
                    assert_array_almost_equal(con[indices], con2)
                    assert_true(n == n2)
                    assert_array_almost_equal(times_data, times2)
                else:
                    # we get the same result for the probed connections
                    assert_true(len(con) == len(con2))
                    for c, c2 in zip(con, con2):
                        assert_array_almost_equal(freqs, freqs2)
                        assert_array_almost_equal(c[indices], c2)
                        assert_true(n == n2)
                        assert_array_almost_equal(times_data, times2)

                # compute same connections for two bands, fskip=1, and f. avg.
                fmin = (5.0, 15.0)
                fmax = (15.0, 30.0)
                con3, freqs3, times3, n3, _ = spectral_connectivity(
                    data,
                    method=method,
                    mode=mode,
                    indices=indices,
                    sfreq=sfreq,
                    fmin=fmin,
                    fmax=fmax,
                    fskip=1,
                    faverage=True,
                    mt_adaptive=adaptive,
                    mt_low_bias=True,
                    mt_bandwidth=mt_bandwidth,
                    cwt_frequencies=cwt_frequencies,
                    cwt_n_cycles=cwt_n_cycles,
                )

                assert_true(isinstance(freqs3, list))
                assert_true(len(freqs3) == len(fmin))
                for i in range(len(freqs3)):
                    assert_true(np.all((freqs3[i] >= fmin[i]) & (freqs3[i] <= fmax[i])))

                # average con2 "manually" and we get the same result
                if not isinstance(method, list):
                    for i in range(len(freqs3)):
                        freq_idx = np.searchsorted(freqs2, freqs3[i])
                        con2_avg = np.mean(con2[:, freq_idx], axis=1)
                        assert_array_almost_equal(con2_avg, con3[:, i])
                else:
                    for j in range(len(con2)):
                        for i in range(len(freqs3)):
                            freq_idx = np.searchsorted(freqs2, freqs3[i])
                            con2_avg = np.mean(con2[j][:, freq_idx], axis=1)
                            assert_array_almost_equal(con2_avg, con3[j][:, i])
Exemple #25
0
def test_spectral_connectivity(method, mode):
    """Test frequency-domain connectivity methods."""
    # Use a case known to have no spurious correlations (it would bad if
    # tests could randomly fail):
    rng = np.random.RandomState(0)
    trans_bandwidth = 2.

    sfreq = 50.
    n_signals = 3
    n_epochs = 8
    n_times = 256

    tmin = 0.
    tmax = (n_times - 1) / sfreq
    data = rng.randn(n_signals, n_epochs * n_times)
    times_data = np.linspace(tmin, tmax, n_times)
    # simulate connectivity from 5Hz..15Hz
    fstart, fend = 5.0, 15.0
    data[1, :] = filter_data(data[0, :], sfreq, fstart, fend,
                             filter_length='auto', fir_design='firwin2',
                             l_trans_bandwidth=trans_bandwidth,
                             h_trans_bandwidth=trans_bandwidth)
    # add some noise, so the spectrum is not exactly zero
    data[1, :] += 1e-2 * rng.randn(n_times * n_epochs)
    data = data.reshape(n_signals, n_epochs, n_times)
    data = np.transpose(data, [1, 0, 2])

    # First we test some invalid parameters:
    pytest.raises(ValueError, spectral_connectivity, data, method='notamethod')
    pytest.raises(ValueError, spectral_connectivity, data,
                  mode='notamode')

    # test invalid fmin fmax settings
    pytest.raises(ValueError, spectral_connectivity, data, fmin=10,
                  fmax=10 + 0.5 * (sfreq / float(n_times)))
    pytest.raises(ValueError, spectral_connectivity, data, fmin=10, fmax=5)
    pytest.raises(ValueError, spectral_connectivity, data, fmin=(0, 11),
                  fmax=(5, 10))
    pytest.raises(ValueError, spectral_connectivity, data, fmin=(11,),
                  fmax=(12, 15))

    # define some frequencies for cwt
    cwt_freqs = np.arange(3, 24.5, 1)

    if method == 'coh' and mode == 'multitaper':
        # only check adaptive estimation for coh to reduce test time
        check_adaptive = [False, True]
    else:
        check_adaptive = [False]

    if method == 'coh' and mode == 'cwt_morlet':
        # so we also test using an array for num cycles
        cwt_n_cycles = 7. * np.ones(len(cwt_freqs))
    else:
        cwt_n_cycles = 7.

    for adaptive in check_adaptive:

        if adaptive:
            mt_bandwidth = 1.
        else:
            mt_bandwidth = None

        con, freqs, times, n, _ = spectral_connectivity(
            data, method=method, mode=mode, indices=None, sfreq=sfreq,
            mt_adaptive=adaptive, mt_low_bias=True,
            mt_bandwidth=mt_bandwidth, cwt_freqs=cwt_freqs,
            cwt_n_cycles=cwt_n_cycles)

        assert (n == n_epochs)
        assert_array_almost_equal(times_data, times)

        if mode == 'multitaper':
            upper_t = 0.95
            lower_t = 0.5
        else:  # mode == 'fourier' or mode == 'cwt_morlet'
            # other estimates have higher variance
            upper_t = 0.8
            lower_t = 0.75

        # test the simulated signal
        gidx = np.searchsorted(freqs, (fstart, fend))
        bidx = np.searchsorted(freqs,
                               (fstart - trans_bandwidth * 2,
                                fend + trans_bandwidth * 2))
        if method == 'coh':
            assert np.all(con[1, 0, gidx[0]:gidx[1]] > upper_t), \
                con[1, 0, gidx[0]:gidx[1]].min()
            # we see something for zero-lag
            assert (np.all(con[1, 0, :bidx[0]] < lower_t))
            assert np.all(con[1, 0, bidx[1]:] < lower_t), \
                con[1, 0, bidx[1:]].max()
        elif method == 'cohy':
            # imaginary coh will be zero
            check = np.imag(con[1, 0, gidx[0]:gidx[1]])
            assert np.all(check < lower_t), check.max()
            # we see something for zero-lag
            assert np.all(np.abs(con[1, 0, gidx[0]:gidx[1]]) > upper_t)
            assert np.all(np.abs(con[1, 0, :bidx[0]]) < lower_t)
            assert np.all(np.abs(con[1, 0, bidx[1]:]) < lower_t)
        elif method == 'imcoh':
            # imaginary coh will be zero
            assert np.all(con[1, 0, gidx[0]:gidx[1]] < lower_t)
            assert np.all(con[1, 0, :bidx[0]] < lower_t)
            assert np.all(con[1, 0, bidx[1]:] < lower_t), \
                con[1, 0, bidx[1]:].max()

        # compute a subset of connections using indices and 2 jobs
        indices = (np.array([2, 1]), np.array([0, 0]))

        if not isinstance(method, list):
            test_methods = (method, _CohEst)
        else:
            test_methods = method

        stc_data = _stc_gen(data, sfreq, tmin)
        con2, freqs2, times2, n2, _ = spectral_connectivity(
            stc_data, method=test_methods, mode=mode, indices=indices,
            sfreq=sfreq, mt_adaptive=adaptive, mt_low_bias=True,
            mt_bandwidth=mt_bandwidth, tmin=tmin, tmax=tmax,
            cwt_freqs=cwt_freqs,
            cwt_n_cycles=cwt_n_cycles, n_jobs=2)

        assert isinstance(con2, list)
        assert len(con2) == len(test_methods)

        if method == 'coh':
            assert_array_almost_equal(con2[0], con2[1])

        if not isinstance(method, list):
            con2 = con2[0]  # only keep the first method

            # we get the same result for the probed connections
            assert_array_almost_equal(freqs, freqs2)
            assert_array_almost_equal(con[indices], con2)
            assert (n == n2)
            assert_array_almost_equal(times_data, times2)
        else:
            # we get the same result for the probed connections
            assert (len(con) == len(con2))
            for c, c2 in zip(con, con2):
                assert_array_almost_equal(freqs, freqs2)
                assert_array_almost_equal(c[indices], c2)
                assert (n == n2)
                assert_array_almost_equal(times_data, times2)

        # compute same connections for two bands, fskip=1, and f. avg.
        fmin = (5., 15.)
        fmax = (15., 30.)
        con3, freqs3, times3, n3, _ = spectral_connectivity(
            data, method=method, mode=mode, indices=indices,
            sfreq=sfreq, fmin=fmin, fmax=fmax, fskip=1, faverage=True,
            mt_adaptive=adaptive, mt_low_bias=True,
            mt_bandwidth=mt_bandwidth, cwt_freqs=cwt_freqs,
            cwt_n_cycles=cwt_n_cycles)

        assert (isinstance(freqs3, list))
        assert (len(freqs3) == len(fmin))
        for i in range(len(freqs3)):
            assert np.all((freqs3[i] >= fmin[i]) &
                          (freqs3[i] <= fmax[i]))

        # average con2 "manually" and we get the same result
        if not isinstance(method, list):
            for i in range(len(freqs3)):
                freq_idx = np.searchsorted(freqs2, freqs3[i])
                con2_avg = np.mean(con2[:, freq_idx], axis=1)
                assert_array_almost_equal(con2_avg, con3[:, i])
        else:
            for j in range(len(con2)):
                for i in range(len(freqs3)):
                    freq_idx = np.searchsorted(freqs2, freqs3[i])
                    con2_avg = np.mean(con2[j][:, freq_idx], axis=1)
                    assert_array_almost_equal(con2_avg, con3[j][:, i])
    # test _get_n_epochs
    full_list = list(range(10))
    out_lens = np.array([len(x) for x in _get_n_epochs(full_list, 4)])
    assert ((out_lens == np.array([4, 4, 2])).all())
    out_lens = np.array([len(x) for x in _get_n_epochs(full_list, 11)])
    assert (len(out_lens) > 0)
    assert (out_lens[0] == 10)
permutations_results = np.empty(number_of_permutations)
fmin, fmax = 7, 12
tmin, tmax = 0, 1
con_method = "plv"

diff_permuatation = np.empty([6, 6, number_of_permutations])


# diff
con_ctl, freqs_ctl, times_ctl, n_epochs_ctl, n_tapers_ctl =\
        spectral_connectivity(
            ts_ctl_left,
            method=con_method,
            mode='multitaper',
            sfreq=250,
            fmin=fmin, fmax=fmax,
            faverage=True,
            tmin=tmin, tmax=tmax,
            mt_adaptive=False,
            n_jobs=1,
            verbose=None)

con_ent, freqs_ent, times_ent, n_epochs_ent, n_tapers_ent =\
        spectral_connectivity(
            ts_ent_left,
            method=con_method,
            mode='multitaper',
            sfreq=250,
            fmin=fmin, fmax=fmax,
            faverage=True,
            tmin=tmin, tmax=tmax,
Exemple #27
0
def test_spectral_connectivity(main_path = "/mnt/Data/Projet-Karim", con_method = 'coh',  freq_bands = [[15.,40.]], freq_band_names = ['beta']):

    import os 
    from mne.connectivity import spectral_connectivity

    from mne.io import RawFIF
    
    subj_path = os.path.join(main_path ,'balai')

    print subj_path

    fif_files = [f for f in os.listdir(subj_path) if f.endswith("fif")]

    print fif_files

    for fif_f in fif_files:

        basename = os.path.splitext(fif_f)[0]

        raw = RawFIF(os.path.join(subj_path,fif_f),preload = True)

        print raw

        print len(raw.ch_names)

        sfreq = raw.info['sfreq']

        select_sensors, = np.where(np.array([ch_name[0] == 'M' for ch_name in raw.ch_names],dtype = 'bool') == True)

        ### save electrode locations
        sens_loc = [raw.info['chs'][i]['loc'][:3] for i in select_sensors]
        sens_loc = np.array(sens_loc)

        loc_filename = os.path.join(subj_path,basename +"_correct_channel_coords.txt")
        np.savetxt(loc_filename,sens_loc , fmt = '%s')

        print sens_loc

        ### save electrode names

        sens_names = np.array([raw.ch_names[pos] for pos in select_sensors],dtype = "str")
        names_filename = os.path.join(subj_path,basename +"_correct_channel_names.txt")
        np.savetxt(names_filename,sens_names , fmt = '%s')

        #start, stop = raw.time_as_index([0, 100])

        data,times = raw[select_sensors,:]
        print np.max(data,axis = 0)

        for i,freq_band in enumerate(freq_band_names):
            
            con_matrix, freqs, times, n_epochs, n_tapers = spectral_connectivity(data.reshape(1,data.shape[0],data.shape[1]), method=con_method, mode='multitaper', sfreq=sfreq, fmin= freq_bands[i][0], fmax=freq_bands[i][1], faverage=True, tmin=None,    mt_adaptive=False, n_jobs=1)

            #print con

            con_matrix = np.array(con_matrix[:,:,0])
            print con_matrix.shape
            print np.min(con_matrix),np.max(con_matrix)

            #0/0

            #print data_filtered.shape

            #print data-data
            #print np.max(data-data_filtered,axis = 0)
            #0/0
            np_filename = os.path.join(subj_path,basename+ "_" + con_method +"_" + freq_band +".npy")

            np.save(np_filename,con_matrix)
# Compute inverse solution and for each epoch. By using "return_generator=True"
# stcs will be a generator object instead of a list. This allows us so to
# compute the coherence without having to keep all source estimates in memory.
snr = 1.0  # use lower SNR for single epochs
lambda2 = 1.0 / snr ** 2
stcs = apply_inverse_epochs(epochs, inverse_operator, lambda2, method,
                            pick_ori="normal", return_generator=True)

# Now we are ready to compute the coherence in the gamma band.
# fmin and fmax specify the lower and upper freq. for each band, resp.
sfreq = raw.info['sfreq']  # the sampling frequency

cwt_frequencies = np.array([40])
coh, freqs, times, n_epochs, n_tapers = spectral_connectivity(stcs,
    method='plv', mode='cwt_morlet', indices=indices,
    sfreq=sfreq, cwt_frequencies=cwt_frequencies, cwt_n_cycles=7, faverage=True, n_jobs=2)

#tmin = np.mean(freqs[0])
#tstep = np.mean(freqs[1]) - tmin
coh_new=coh.squeeze()
coh_stc = mne.SourceEstimate(coh_new, vertices=stc.vertno, tmin=tmin,#tmin=1e-3 * tmin,
                             tstep=1e-3*1, subject=subject)
                             #tstep=1e-3 * tstep, subject=subject)
coh_stc.save(subject_path+'/ROI_'+subject+'_' +trigger, ftype='stc')
# Now we can visualize the coherence using the plot method
#brain = coh_stc.plot(subject, 'inflated', 'lh', fmin=0.25, fmid=0.4,
 #                    fmax=0.65, time_label='Coherence %0.1f ms', time_viewer=True,
  #                   subjects_dir=subjects_dir)

#brain.show_view('lateral')       
Exemple #29
0
def epoched_multiple_spectral_proc(ts_file,sfreq,freq_band_name,freq_band,con_method,epoch_window_length):

    import numpy as np
    import os

    from mne.connectivity import spectral_connectivity

    all_data = np.load(ts_file)

    print all_data.shape

    #print sfreq
                
    print freq_band
    print freq_band_name

    if len(all_data.shape) != 3:
        
        print "Warning, all_data should have several samples"
        
        return []

    conmat_files = []

    for i in range(all_data.shape[0]):

        cur_data = all_data[i,:,:]

        print cur_data.shape
            
        if epoch_window_length == None :
            
            data = cur_data.reshape(1,cur_data.shape[0],cur_data.shape[1])

        else: 
                
            nb_splits = cur_data.shape[1] // (epoch_window_length * sfreq)
            
            print "epoching data with {}s by window, resulting in {} epochs".format(epoch_window_length,nb_splits)
            
            list_epoched_data = np.array_split(cur_data,nb_splits,axis = 1)
            
            print len(list_epoched_data)
            
            data = np.array(list_epoched_data)
            
            print data.shape

        con_matrix, freqs, times, n_epochs, n_tapers  = spectral_connectivity(data, method=con_method, 
                                                                              mode='multitaper', sfreq=sfreq, 
                                                                              fmin= freq_band[0], fmax=freq_band[1], 
                                                                              faverage=True, tmin=None,    
                                                                              mt_adaptive=False, n_jobs=1)

        print con_matrix.shape
        con_matrix = np.array(con_matrix[:,:,0])

        print con_matrix.shape
        print np.min(con_matrix),np.max(con_matrix)
        
        conmat_file = os.path.abspath("conmat_"+ con_method + "_" + str(i) + ".npy")

        np.save(conmat_file,con_matrix)

        conmat_files.append(conmat_file)
            
    return conmat_files
##################################################3333
# Now we are ready to compute the connectivity in the alpha band. Notice
# from the status messages, how mne-python: 1) reads an epoch from the raw
# file, 2) applies SSP and baseline correction, 3) computes the inverse to
# obtain a source estimate, 4) averages the source estimate to obtain a
# time series for each label, 5) includes the label time series in the
# connectivity computation, and then moves to the next epoch. This
# behaviour is because we are using generators and allows us to
# compute connectivity in computationally efficient manner where the amount
# of memory (RAM) needed is independent from the number of epochs.
# #fmin = 4.
# #fmax = 8.
sfreq = raw.info['sfreq']  # the sampling frequency
con_methods = ['plv', 'pli']
con, freqs, times, n_epochs, n_tapers = spectral_connectivity(label_ts,
        method=con_methods, mode='fourier', sfreq=sfreq, fmin=fmin,
        fmax=fmax, faverage=True)      
print con
# con is a 3D array, get the connectivity for the first (and only) freq. band for each method
con_res = dict()
for method, c in zip(con_methods, con):
    con_res[method] = c[:, :, 0]
print con_res

####Save ConnectivityMatrix as text file for fuirther averaging 
#np.savetxt(coh_fname, con_res['coh'], delimiter = ',')
np.savetxt(plv_fname, con_res['plv'], delimiter = ',')
np.savetxt(pli_fname, con_res['pli'], delimiter = ',')
##################################################################################################
###################### OR #######################
#######Read from text - coherence matrix file and plot the connectivity circle. :)