示例#1
0
                    n_fft=1000,
                    n_per_seg=1000,
                    epochs_average=True)
data_psd = np.array([psd1.psd, psd2.psd])

# Connectivity

# initializing data and storage
data_inter = np.array([preproc_S1, preproc_S2])
result_intra = []
# computing analytic signal per frequency band
complex_signal = analyses.compute_freq_bands(data_inter, sampling_rate,
                                             freq_bands)
# computing frequency- and time-frequency-domain connectivity,
# 'ccorr' for example
result = analyses.compute_sync(complex_signal, mode="ccorr")

# slicing results to get the Inter-brain part of the matrix
n_ch = len(epo1.info["ch_names"])
theta, alpha_low, alpha_high, beta, gamma = result[:, 0:n_ch, n_ch:2 * n_ch]
# choosing Alpha_Low for futher analyses for example
values = alpha_low
values -= np.diag(np.diag(values))
# computing Cohens'D for further analyses for example
C = (values - np.mean(values[:])) / np.std(values[:])

# slicing results to get the Intra-brain part of the matrix
for i in [0, 1]:
    theta, alpha_low, alpha_high, beta, gamma = result[:, i:i + n_ch,
                                                       i:i + n_ch]
    # choosing Alpha_Low for futher analyses for example
示例#2
0
freq_bands = OrderedDict(freq_bands)

sampling_rate = epochs_a.info['sfreq']

#Connectivity

#Data and storage
data_inter = np.array([epochs_a['Control'], epochs_b['Control']])

#Analytic signal per frequency band
complex_signal = analyses.compute_freq_bands(data_inter, sampling_rate,
                                             freq_bands)

result, angle, _, phase = analyses.compute_sync(complex_signal,
                                                mode='plv',
                                                epochs_average=True)

#%% Loading a pair from short
epochs_a_s = mne.read_epochs('epochs_a_short_10.fif')
epochs_b_s = mne.read_epochs('epochs_b_short_10.fif')

#%% ccorr
#drop_list_10 = [342, 351, 352, 353, 534, 603, 624, 625, 626, 832, 988, 1014, 1131, 1144, 1196, 1222, 1228, 1456, 1612, 1613, 1614]
theta, alpha, beta, angle_s, complex_signal_s = ccorr(epochs_a_s,
                                                      epochs_b_s,
                                                      'pair0010',
                                                      'short',
                                                      drop_list=[])

#%% coh
示例#3
0
def test_simple_corr(epochs):
    """
    Test simple_corr timing
    """
    import time

    # taking random freq-of-interest to test CSD measures
    frequencies = [11, 12, 13]
    # Note: fmin and fmax excluded, here n_freq = 1
    # (for MNE and Phoebe functions)

    # intra-ind CSD
    # data = np.array([epo1, epo1])
    # data_mne = epo1
    # sensors = None

    # inter-ind CSD
    data = np.array([epochs.epo1, epochs.epo2])
    data_mne = epochs.epoch_merge

    l = list(range(0, int(len(epochs.epoch_merge.info['ch_names']) / 2)))
    L = []
    M = []
    for i in range(0, len(l)):
        for p in range(0, len(l)):
            L.append(l[i])
    M = len(l) * list(range(len(l), len(l) * 2))
    sensors = (np.array(L), np.array(M))

    # trace running time
    now = time.time()
    # mode to transform signal to analytic signal on which
    # synchrony is computed
    # mode = 'fourier'
    mode = 'multitaper'

    # Phoebe: multitaper: mne.time_frequency.tfr_array_multitaper
    # BUT step = 1s, while coh (including the multitaper step) < 1s...
    # optimized in MNE
    # how to optimize the mutitaper step in Phoebe script?
    # and then the second step: same question

    coh_mne, _, _, _, _ = mne.connectivity.spectral_connectivity(
        data=data_mne,
        method='plv',
        mode=mode,
        indices=sensors,
        sfreq=500,
        fmin=11,
        fmax=13,
        faverage=True)
    now2 = time.time()
    # coh = analyses.simple_corr(data, frequencies, mode='plv',
    #                            epoch_wise=True,
    #                            time_resolved=True)
    # substeps cf. multitaper step too long?
    values = analyses.compute_single_freq(data, frequencies)
    now3 = time.time()
    result = analyses.compute_sync(values,
                                   mode='plv',
                                   epoch_wise=True,
                                   time_resolved=True)
    now4 = time.time()
    # convert time to pick seconds only in GTM ref
    now = time.localtime(now)
    now2 = time.localtime(now2)
    now3 = time.localtime(now3)
    now4 = time.localtime(now4)

    # assess time running equivalence for each script
    # assert (int(now2.tm_sec) - int(now.tm_sec)) == (int(now3.tm_sec) - int(now2.tm_sec))
    # takes 2 versus 0 second with MNE function
    # (and here n_channels 31, n_epochs not a lot, n_freq 1)
    # idem en inter-ind

    # test substeps
    assert (int(now2.tm_sec) -
            int(now.tm_sec)) == ((int(now4.tm_sec) - int(now3.tm_sec)) +
                                 (int(now3.tm_sec) - int(now2.tm_sec)))
def ccorr(epochs_a, epochs_b, pair_name, length, drop_list):

    event_dict = {
        'Resting': 101,
        'Uncoupled': 102,
        'Coupled': 103,
        'Leader': 105,
        'Follower': 107,
        'Control': 108
    }

    conditions = ['Resting', 'Coupled', 'Uncoupled', 'Leader', 'Control']

    if length == 'long':
        epochs_a.crop(tmin=2, tmax=23)
        epochs_a.plot(n_epochs=1, n_channels=10)
        epochs_b.crop(tmin=2, tmax=23)

        for i in conditions:

            # Merging the leader and follower
            if i == 'Leader':
                epo_a_l = epochs_a['Leader']
                epo_b_l = epochs_b['Leader']
                epo_a_f = epochs_a['Follower']
                epo_b_f = epochs_b['Follower']
                epo_a = mne.concatenate_epochs([epo_a_l, epo_b_f])
                epo_b = mne.concatenate_epochs([epo_b_l, epo_a_f])
                i = 'Leader-Follower'
            else:
                print(i)
                epo_a = epochs_a[i]
                epo_b = epochs_b[i]

            #Defining frequency bands
            freq_bands = {'Theta': [4, 7], 'Alpha': [8, 13], 'Beta': [15, 25]}

            freq_bands = OrderedDict(freq_bands)

            sampling_rate = epo_a.info['sfreq']

            #Connectivity

            #Data and storage
            data_inter = np.array([epo_a, epo_b])
            #result_intra = []

            #Analytic signal per frequency band
            complex_signal = analyses.compute_freq_bands(
                data_inter, sampling_rate, freq_bands)

            result = analyses.compute_sync(complex_signal,
                                           mode='ccorr',
                                           epochs_average=True)

            #Get inter brain part of the matrix
            n_ch = len(epochs_a.info['ch_names'])
            theta, alpha, beta = result[:, 0:n_ch, n_ch:2 * n_ch]

            plt.figure()
            plt.imshow(theta, cmap=plt.cm.hot)
            plt.clim(0, 0.8)
            plt.colorbar()
            plt.show()

            plt.figure()
            plt.imshow(alpha, cmap=plt.cm.hot)
            plt.clim(0, 0.8)
            plt.colorbar()
            plt.show()

            plt.figure()
            plt.imshow(beta, cmap=plt.cm.hot)
            plt.clim(0, 0.8)
            plt.colorbar()
            plt.show()

            theta = abs(theta - np.mean(theta[:]) / np.std(theta[:]))
            alpha = abs(alpha - np.mean(alpha[:]) / np.std(alpha[:]))
            beta = abs(beta - np.mean(beta[:]) / np.std(beta[:]))

            print('Range of the connectivities:')
            print('Theta max:' + str(np.max(theta)))
            print('Theta min:' + str(np.min(theta)))
            print('Alpha max:' + str(np.max(alpha)))
            print('Alpha min:' + str(np.min(alpha)))
            print('Beta max:' + str(np.max(beta)))
            print('Beta min:' + str(np.min(beta)))

            np.save(
                'con matrices/ccorr/' + 'ccorr_' + pair_name + '_theta_' + i +
                '_' + length, theta)
            np.save(
                'con matrices/ccorr/' + 'ccorr_' + pair_name + '_alpha_' + i +
                '_' + length, alpha)
            np.save(
                'con matrices/ccorr/' + 'ccorr_' + pair_name + '_beta_' + i +
                '_' + length, beta)

    if length == 'short':

        #conditions = ['Coupled', 'Uncoupled', 'Leader', 'Follower', 'Control']

        epo_drop = []
        epo_drop.append(0)
        epo_drop.append(1)
        epo_drop.append(2)
        epo_drop.append(24)
        epo_drop.append(25)
        for i in range(64 * 5):
            epo_drop.append(epo_drop[i] + 26)

        # Ensuring that already removed epochs are not in list
        for i in epo_drop:
            Epoch_no = drop_list
            if i in Epoch_no:
                #print(i)
                epo_drop.remove(i)

        # Ensuring list is no longer than the number of epochs
        while epo_drop[-1] > (len(epochs_b) - 1):
            epo_drop.pop(-1)

        # Dropping the beginning and end of a trial
        epo_a = epochs_a.drop(epo_drop)
        epo_b = epochs_b.drop(epo_drop)

        # Getting the number of epochs of specific condition in a row

        a = epo_a.events[:, 2]
        d = dict()

        for k, v in groupby(a):
            d.setdefault(k, []).append(len(list(v)))
        #print(d)

        #equalize number of epochs used to calculate connectivity values
        #mne.epochs.equalize_epoch_counts([epo_a, epo_b])

        for c in conditions:
            # Merging the leader and follower
            if c == 'Leader':
                epo_a_l = epochs_a['Leader']
                epo_b_l = epochs_b['Leader']
                epo_a_f = epochs_a['Follower']
                epo_b_f = epochs_b['Follower']
                epo_a_c = mne.concatenate_epochs([epo_a_l, epo_b_f])
                epo_b_c = mne.concatenate_epochs([epo_b_l, epo_a_f])
                c = 'Leader-Follower'

                freq_bands = {
                    'Theta': [4, 7],
                    'Alpha': [8, 13],
                    'Beta': [15, 25]
                }

                freq_bands = OrderedDict(freq_bands)

                sampling_rate = epo_a_c.info['sfreq']

                #Connectivity

                #Data and storage
                data_inter = np.array([epo_a_c, epo_b_c])

                #Analytic signal per frequency band
                complex_signal = analyses.compute_freq_bands(
                    data_inter, sampling_rate, freq_bands)

                result = analyses.compute_sync(complex_signal,
                                               mode='ccorr',
                                               epochs_average=False)

                #Defining the number of channels
                n_ch = len(epochs_a.info['ch_names'])

                #Averaging over the epochs specific to the given trial
                trials = []

                for j in range(3):
                    for i in d[event_dict['Leader']] or d[
                            event_dict['Follower']]:
                        trials.append(sum(result[j, 0:i, :, :]) / i)
                '''
                if c == 'Leader' or c == 'Follower':
                    print('LF')
                    print(len(trials))
                    theta = sum(trials[::3])/8
                    alpha = sum(trials[1::3])/8
                    beta = sum(trials[2::3])/8
                
                else:
                    theta = sum(trials[::3])/16
                    alpha = sum(trials[1::3])/16
                    beta = sum(trials[2::3])/16
                '''
                theta = sum(trials[::3]) / 16
                alpha = sum(trials[1::3]) / 16
                beta = sum(trials[2::3]) / 16

                theta = theta[0:n_ch, n_ch:2 * n_ch]
                alpha = alpha[0:n_ch, n_ch:2 * n_ch]
                beta = beta[0:n_ch, n_ch:2 * n_ch]

                theta = abs(theta - np.mean(theta[:]) / np.std(theta[:]))
                alpha = abs(alpha - np.mean(alpha[:]) / np.std(alpha[:]))
                beta = abs(beta - np.mean(beta[:]) / np.std(beta[:]))
                print(c)
                print('Range of the connectivities:')
                print('Theta max:' + str(np.max(theta)))
                print('Theta min:' + str(np.min(theta)))
                print('Alpha max:' + str(np.max(alpha)))
                print('Alpha min:' + str(np.min(alpha)))
                print('Beta max:' + str(np.max(beta)))
                print('Beta min:' + str(np.min(beta)))

                np.save(
                    'con matrices/ccorr/' + 'ccorr_' + pair_name + '_theta_' +
                    c + '_' + length, theta)
                np.save(
                    'con matrices/ccorr/' + 'ccorr_' + pair_name + '_alpha_' +
                    c + '_' + length, alpha)
                np.save(
                    'con matrices/ccorr/' + 'ccorr_' + pair_name + '_beta_' +
                    c + '_' + length, beta)

            else:
                epo_a_c = epo_a[c]
                epo_b_c = epo_b[c]

                #Defining frequency bands
                freq_bands = {
                    'Theta': [4, 7],
                    'Alpha': [8, 13],
                    'Beta': [15, 25]
                }

                freq_bands = OrderedDict(freq_bands)

                sampling_rate = epo_a_c.info['sfreq']

                #Connectivity

                #Data and storage
                data_inter = np.array([epo_a_c, epo_b_c])

                #Analytic signal per frequency band
                complex_signal = analyses.compute_freq_bands(
                    data_inter, sampling_rate, freq_bands)

                result = analyses.compute_sync(complex_signal,
                                               mode='ccorr',
                                               epochs_average=False)

                #Defining the number of channels
                n_ch = len(epochs_a.info['ch_names'])

                #Averaging over the epochs specific to the given trial
                trials = []

                for j in range(3):
                    for i in d[event_dict[c]]:
                        trials.append(sum(result[j, 0:i, :, :]) / i)
                '''
                if c == 'Leader' or c == 'Follower':
                    print('LF')
                    print(len(trials))
                    theta = sum(trials[::3])/8
                    alpha = sum(trials[1::3])/8
                    beta = sum(trials[2::3])/8
                
                else:
                    theta = sum(trials[::3])/16
                    alpha = sum(trials[1::3])/16
                    beta = sum(trials[2::3])/16
                '''
                theta = sum(trials[::3]) / 16
                alpha = sum(trials[1::3]) / 16
                beta = sum(trials[2::3]) / 16

                theta = theta[0:n_ch, n_ch:2 * n_ch]
                alpha = alpha[0:n_ch, n_ch:2 * n_ch]
                beta = beta[0:n_ch, n_ch:2 * n_ch]

                theta = abs(theta - np.mean(theta[:]) / np.std(theta[:]))
                alpha = abs(alpha - np.mean(alpha[:]) / np.std(alpha[:]))
                beta = abs(beta - np.mean(beta[:]) / np.std(beta[:]))
                print(c)
                print('Range of the connectivities:')
                print('Theta max:' + str(np.max(theta)))
                print('Theta min:' + str(np.min(theta)))
                print('Alpha max:' + str(np.max(alpha)))
                print('Alpha min:' + str(np.min(alpha)))
                print('Beta max:' + str(np.max(beta)))
                print('Beta min:' + str(np.min(beta)))

                np.save(
                    'con matrices/ccorr/' + 'ccorr_' + pair_name + '_theta_' +
                    c + '_' + length, theta)
                np.save(
                    'con matrices/ccorr/' + 'ccorr_' + pair_name + '_alpha_' +
                    c + '_' + length, alpha)
                np.save(
                    'con matrices/ccorr/' + 'ccorr_' + pair_name + '_beta_' +
                    c + '_' + length, beta)

    return theta, alpha, beta
示例#5
0
                    n_fft=1000, n_per_seg=1000, epochs_average=True)
data_psd = np.array([psd1.psd, psd2.psd])

#Connectivity

#Data and storage
data_inter = np.array([preproc_S1, preproc_S2])
result_intra = []

#Analytic signal per frequency band
complex_signal = analyses.compute_freq_bands(data_inter, sampling_rate,
                                             freq_bands)



result = analyses.compute_sync(complex_signal, mode='coh')


#Get interbrain part of the matrix
n_ch = len(epo1.info['ch_names'])
theta, alpha_low, alpha_high, beta, gamma = result[:, 0:n_ch, n_ch:2*n_ch]

# Alpha low for example
values = alpha_low
values -= np.diag(np.diag(values))

C = (values - np.mean(values[:])) / np.std(values[:])

#Slicing results to get the intra-brain part of matrix
for i in [0, 1]:
    theta, alpha_low, alpha_high, beta, gamma = result[:, i:i+n_ch, i:i+n_ch]
def ccorr(epochs_a, epochs_b, pair_name, length, drop_list):
    
    event_dict = {'Uncoupled': 102, 'Coupled': 103, 'Leader': 105,
              'Follower': 107, 'Control':108 }
    
    conditions = ['Coupled', 'Uncoupled', 'Leader', 'Control']
    
    if length == 'long':
        epochs_a.crop(tmin = 2, tmax = 23)
        epochs_a.plot(n_epochs = 1, n_channels = 10)
        epochs_b.crop(tmin = 2, tmax = 23)
    
        for i in conditions:
            
            # Merging the leader and follower
            if i == 'Leader':
                epo_a_l = epochs_a['Leader']
                epo_b_l = epochs_b['Leader']
                epo_a_f = epochs_a['Follower']
                epo_b_f = epochs_b['Follower']
                epo_a = mne.concatenate_epochs([epo_a_l, epo_b_f])
                epo_b = mne.concatenate_epochs([epo_b_l, epo_a_f])
                i = 'Leader-Follower'
            else: 
                print(i)
                epo_a = epochs_a[i]
                epo_b = epochs_b[i]
                
            #Defining frequency bands
            freq_bands = {'Theta': [4, 7],
                          'Alpha' :[8, 13],
                          'Beta': [15, 25]}
            
            freq_bands = OrderedDict(freq_bands)
            
            sampling_rate = epo_a.info['sfreq']
            
            #Connectivity
            
            #Data and storage
            data_inter = np.array([epo_a, epo_b])
            #result_intra = []
            
            #Analytic signal per frequency band
            complex_signal = analyses.compute_freq_bands(data_inter, sampling_rate,
                                                         freq_bands)
            
            result, angle,_,_ = analyses.compute_sync(complex_signal, mode='ccorr', epochs_average = True)
            
            #Get inter brain part of the matrix
            n_ch = len(epochs_a.info['ch_names'])
            #result = result[0]
            theta, alpha, beta = result[:, 0:n_ch, n_ch:2*n_ch]
            '''
            plt.figure()
            plt.imshow(theta,cmap=plt.cm.hot)
            plt.clim(0,0.8)
            plt.colorbar()
            plt.show()
            
            plt.figure()
            plt.imshow(alpha,cmap=plt.cm.hot)
            plt.clim(0,0.8)
            plt.colorbar()
            plt.show()
            
            plt.figure()
            plt.imshow(beta,cmap=plt.cm.hot)
            plt.clim(0,0.8)
            plt.colorbar()
            plt.show()
            '''
            theta = abs(theta - np.mean(theta[:]) / np.std(theta[:]))
            alpha = abs(alpha - np.mean(alpha[:]) / np.std(alpha[:]))
            beta = abs(beta - np.mean(beta[:]) / np.std(beta[:]))
            
            print('Range of the connectivities:')
            print('Theta max:' + str(np.max(theta)))
            print('Theta min:' + str(np.min(theta)))
            print('Alpha max:' + str(np.max(alpha)))
            print('Alpha min:' + str(np.min(alpha)))
            print('Beta max:' + str(np.max(beta)))
            print('Beta min:' + str(np.min(beta)))
            
            np.save('Connectivity matrices/ccorr/' + 'ccorr_' + pair_name + '_theta_' + i + '_' + length, theta)
            np.save('Connectivity matrices/ccorr/' + 'ccorr_' + pair_name + '_alpha_' + i + '_' + length, alpha)
            np.save('Connectivity matrices/ccorr/' + 'ccorr_' + pair_name + '_beta_' + i + '_' + length, beta)
            epo_a = []
            epo_a_cleaned = []
            
    if length == 'short':
        
        #conditions = ['Coupled', 'Uncoupled', 'Leader', 'Follower', 'Control']
        epo_drop = []
        epo_drop.append(0)
        epo_drop.append(1)
        epo_drop.append(2)
        epo_drop.append(24)
        epo_drop.append(25)
        for i in range(63*5): #Previously was 64*5 changed as first trial is already appended
            epo_drop.append(epo_drop[i]+26)
        print(len(epo_drop))
        '''
        # Ensuring that already removed epochs are not in list
        for i in epo_drop:
            Epoch_no = drop_list
            if i in Epoch_no:
                #print(i)
                epo_drop.remove(i)
        
        # Ensuring list is no longer than the number of epochs     
        while epo_drop[-1]>(len(epochs_b)-1):
            epo_drop.pop(-1)
        '''  
        # Dropping the beginning and end of a trial      
        epo_a = epochs_a.drop(epo_drop)        
        epo_b = epochs_b.drop(epo_drop)
        
        epo_a_copy = epo_a.copy()
        epo_b_copy = epo_b.copy()

        
        # Running autoreject function
        cleaned_epochs_AR, dic_AR = prep.AR_local([epo_a_copy, epo_b_copy],
                                        strategy="union",
                                        threshold=50.0,
                                        verbose=True)
        
        epo_a_cleaned = cleaned_epochs_AR[0]
        epo_b_cleaned = cleaned_epochs_AR[1]

        # Getting the number of epochs of specific condition in a row
        
        a = epo_a_cleaned.events[:,2]
        d = dict()
        
        for k, v in groupby(a):
            d.setdefault(k, []).append(len(list(v)))
        #print(d)
        
        #equalize number of epochs used to calculate connectivity values
        #mne.epochs.equalize_epoch_counts([epo_a, epo_b])
        
        for c in conditions:
               # Merging the leader and follower
            if c == 'Leader':
                epo_a_l = epo_a_cleaned['Leader']
                epo_b_l = epo_b_cleaned['Leader']
                epo_a_f = epo_a_cleaned['Follower']
                epo_b_f = epo_b_cleaned['Follower']
                epo_a_c = mne.concatenate_epochs([epo_a_l, epo_b_f])
                epo_b_c = mne.concatenate_epochs([epo_b_l, epo_a_f])
                c = 'Leader-Follower'
                
                freq_bands = {'Theta': [4, 7],
                          'Alpha' :[8, 13],
                          'Beta': [15, 25]}
            
                freq_bands = OrderedDict(freq_bands)
                
                sampling_rate = epo_a_c.info['sfreq']
                
                #Connectivity
                
                #Data and storage
                data_inter = np.array([epo_a_c, epo_b_c])
                
                #Analytic signal per frequency band
                complex_signal = analyses.compute_freq_bands(data_inter, sampling_rate,
                                                             freq_bands)
                
                result, angle, _,_ = analyses.compute_sync(complex_signal, mode='ccorr', epochs_average = False)
                
                #Defining the number of channels
                n_ch = len(epochs_a.info['ch_names'])
                
                #Averaging over the epochs specific to the given trial
                trials = []
                
                for j in range(3):
                    for i in d[event_dict['Leader']] + d[event_dict['Follower']]:
                        trials.append(sum(result[j,0:i,:,:])/i)
                
                
                theta = sum(trials[::3])/len(trials[::3])
                alpha = sum(trials[1::3])/len(trials[::3])
                beta = sum(trials[2::3])/len(trials[::3])
                    
                theta = theta[0:n_ch, n_ch:2*n_ch]
                alpha = alpha[0:n_ch, n_ch:2*n_ch]
                beta = beta[0:n_ch, n_ch:2*n_ch]
                
                theta = abs(theta - np.mean(theta[:]) / np.std(theta[:]))
                alpha = abs(alpha - np.mean(alpha[:]) / np.std(alpha[:]))
                beta = abs(beta - np.mean(beta[:]) / np.std(beta[:]))
                
                print(c)
                print('Range of the connectivities:')
                print('Theta max:' + str(np.max(theta)))
                print('Theta min:' + str(np.min(theta)))
                print('Alpha max:' + str(np.max(alpha)))
                print('Alpha min:' + str(np.min(alpha)))
                print('Beta max:' + str(np.max(beta)))
                print('Beta min:' + str(np.min(beta)))
                
                np.save('Connectivity matrices/ccorr/' + 'ccorr_' + pair_name + '_theta_' + c + '_' + length, theta)
                np.save('Connectivity matrices/ccorr/' + 'ccorr_' + pair_name + '_alpha_' + c + '_' + length, alpha)
                np.save('Connectivity matrices/ccorr/' + 'ccorr_' + pair_name + '_beta_' + c + '_' + length, beta)
                
            else: 
                epo_a_c = epo_a_cleaned[c]
                epo_b_c = epo_b_cleaned[c]
                
                print('no. of epochs')
                len(epo_a_c)
                #Defining frequency bands
                freq_bands = {'Theta': [4, 7],
                              'Alpha' :[8, 13],
                              'Beta': [15, 25]}
                
                freq_bands = OrderedDict(freq_bands)
                
                sampling_rate = epo_a_c.info['sfreq']
                
                #Connectivity
                
                #Data and storage
                data_inter = np.array([epo_a_c, epo_b_c])
                
                #Analytic signal per frequency band
                complex_signal = analyses.compute_freq_bands(data_inter, sampling_rate,
                                                             freq_bands)
                
                result, angle, _,_ = analyses.compute_sync(complex_signal, mode='ccorr', epochs_average = False)
                
                #Defining the number of channels
                n_ch = len(epochs_a.info['ch_names'])
                
                #Averaging over the epochs specific to the given trial
                trials = []
                
                for j in range(3):
                    for i in d[event_dict[c]]:
                        trials.append(sum(result[j,0:i,:,:])/i)
             
                print(c)
                print(len(trials[::3]))
                theta = sum(trials[::3])/len(trials[::3])
                alpha = sum(trials[1::3])/len(trials[::3])
                beta = sum(trials[2::3])/len(trials[::3])
                    
                theta = theta[0:n_ch, n_ch:2*n_ch] # Skal det her være her?? Bliver den ikke allerede sliced i for-loopet??
                alpha = alpha[0:n_ch, n_ch:2*n_ch]
                beta = beta[0:n_ch, n_ch:2*n_ch]
                
                theta = abs(theta - np.mean(theta[:]) / np.std(theta[:]))
                alpha = abs(alpha - np.mean(alpha[:]) / np.std(alpha[:]))
                beta = abs(beta - np.mean(beta[:]) / np.std(beta[:]))
                
                print(c)
                print('Range of the connectivities:')
                print('Theta max:' + str(np.max(theta)))
                print('Theta min:' + str(np.min(theta)))
                print('Alpha max:' + str(np.max(alpha)))
                print('Alpha min:' + str(np.min(alpha)))
                print('Beta max:' + str(np.max(beta)))
                print('Beta min:' + str(np.min(beta)))
                
                np.save('Connectivity matrices/ccorr/' + 'ccorr_' + pair_name + '_theta_' + c + '_' + length, theta)
                np.save('Connectivity matrices/ccorr/' + 'ccorr_' + pair_name + '_alpha_' + c + '_' + length, alpha)
                np.save('Connectivity matrices/ccorr/' + 'ccorr_' + pair_name + '_beta_' + c + '_' + length, beta)

    if length == '3sec':
            
            #conditions = ['Coupled', 'Uncoupled', 'Leader', 'Follower', 'Control']
            epo_drop = []
            epo_drop.append(0)
            epo_drop.append(8)
            for i in range(63*2): #Previously was 64*5 changed as first trial is already appended
                epo_drop.append(epo_drop[i]+9)
            print(len(epo_drop))
            '''
            # Ensuring that already removed epochs are not in list
            for i in epo_drop:
                Epoch_no = drop_list
                if i in Epoch_no:
                    #print(i)
                    epo_drop.remove(i)
            
            # Ensuring list is no longer than the number of epochs     
            while epo_drop[-1]>(len(epochs_b)-1):
                epo_drop.pop(-1)
            '''  
            # Dropping the beginning and end of a trial      
            epo_a = epochs_a.drop(epo_drop)        
            epo_b = epochs_b.drop(epo_drop)
            
            epo_a_copy = epo_a.copy()
            epo_b_copy = epo_b.copy()
    
            
            # Running autoreject function
            cleaned_epochs_AR, dic_AR = prep.AR_local([epo_a_copy, epo_b_copy],
                                            strategy="union",
                                            threshold=50.0,
                                            verbose=True)
            
            epo_a_cleaned = cleaned_epochs_AR[0]
            epo_b_cleaned = cleaned_epochs_AR[1]
    
            # Getting the number of epochs of specific condition in a row
            
            a = epo_a_cleaned.events[:,2]
            d = dict()
            
            for k, v in groupby(a):
                d.setdefault(k, []).append(len(list(v)))
            print(d)
            
            #equalize number of epochs used to calculate connectivity values
            #mne.epochs.equalize_epoch_counts([epo_a, epo_b])
            
            for c in conditions:
                   # Merging the leader and follower
                if c == 'Leader':
                    epo_a_l = epo_a_cleaned['Leader']
                    epo_b_l = epo_b_cleaned['Leader']
                    epo_a_f = epo_a_cleaned['Follower']
                    epo_b_f = epo_b_cleaned['Follower']
                    epo_a_c = mne.concatenate_epochs([epo_a_l, epo_b_f])
                    epo_b_c = mne.concatenate_epochs([epo_b_l, epo_a_f])
                    c = 'Leader-Follower'
                    
                    freq_bands = {'Theta': [4, 7],
                              'Alpha' :[8, 13],
                              'Beta': [15, 25]}
                
                    freq_bands = OrderedDict(freq_bands)
                    
                    sampling_rate = epo_a_c.info['sfreq']
                    
                    #Connectivity
                    
                    #Data and storage
                    data_inter = np.array([epo_a_c, epo_b_c])
                    
                    #Analytic signal per frequency band
                    complex_signal = analyses.compute_freq_bands(data_inter, sampling_rate,
                                                                 freq_bands)
                    
                    result, angle, _,_ = analyses.compute_sync(complex_signal, mode='ccorr', epochs_average = False)
                    
                    #Defining the number of channels
                    n_ch = len(epochs_a.info['ch_names'])
                    
                    #Averaging over the epochs specific to the given trial
                    trials = []
                    
                    for j in range(3):
                        for i in d[event_dict['Leader']] + d[event_dict['Follower']]:
                            trials.append(sum(result[j,0:i,:,:])/i)
                    
                    
                    theta = sum(trials[::3])/len(trials[::3])
                    alpha = sum(trials[1::3])/len(trials[::3])
                    beta = sum(trials[2::3])/len(trials[::3])
                        
                    theta = theta[0:n_ch, n_ch:2*n_ch]
                    alpha = alpha[0:n_ch, n_ch:2*n_ch]
                    beta = beta[0:n_ch, n_ch:2*n_ch]
                    
                    theta = abs(theta - np.mean(theta[:]) / np.std(theta[:]))
                    alpha = abs(alpha - np.mean(alpha[:]) / np.std(alpha[:]))
                    beta = abs(beta - np.mean(beta[:]) / np.std(beta[:]))
                    
                    print(c)
                    print('Range of the connectivities:')
                    print('Theta max:' + str(np.max(theta)))
                    print('Theta min:' + str(np.min(theta)))
                    print('Alpha max:' + str(np.max(alpha)))
                    print('Alpha min:' + str(np.min(alpha)))
                    print('Beta max:' + str(np.max(beta)))
                    print('Beta min:' + str(np.min(beta)))
                    
                    np.save('Connectivity matrices/ccorr/' + 'ccorr_' + pair_name + '_theta_' + c + '_' + length, theta)
                    np.save('Connectivity matrices/ccorr/' + 'ccorr_' + pair_name + '_alpha_' + c + '_' + length, alpha)
                    np.save('Connectivity matrices/ccorr/' + 'ccorr_' + pair_name + '_beta_' + c + '_' + length, beta)
                    
                else: 
                    epo_a_c = epo_a_cleaned[c]
                    epo_b_c = epo_b_cleaned[c]
                    
                    print('no. of epochs')
                    len(epo_a_c)
                    #Defining frequency bands
                    freq_bands = {'Theta': [4, 7],
                                  'Alpha' :[8, 13],
                                  'Beta': [15, 25]}
                    
                    freq_bands = OrderedDict(freq_bands)
                    
                    sampling_rate = epo_a_c.info['sfreq']
                    
                    #Connectivity
                    
                    #Data and storage
                    data_inter = np.array([epo_a_c, epo_b_c])
                    
                    #Analytic signal per frequency band
                    complex_signal = analyses.compute_freq_bands(data_inter, sampling_rate,
                                                                 freq_bands)
                    
                    result, angle, _,_ = analyses.compute_sync(complex_signal, mode='ccorr', epochs_average = False)
                    
                    #Defining the number of channels
                    n_ch = len(epochs_a.info['ch_names'])
                    
                    #Averaging over the epochs specific to the given trial
                    trials = []
                    
                    for j in range(3):
                        for i in d[event_dict[c]]:
                            trials.append(sum(result[j,0:i,:,:])/i)
                 
                    print(c)
                    print(len(trials[::3]))
                    theta = sum(trials[::3])/len(trials[::3])
                    alpha = sum(trials[1::3])/len(trials[::3])
                    beta = sum(trials[2::3])/len(trials[::3])
                        
                    theta = theta[0:n_ch, n_ch:2*n_ch] # Skal det her være her?? Bliver den ikke allerede sliced i for-loopet??
                    alpha = alpha[0:n_ch, n_ch:2*n_ch]
                    beta = beta[0:n_ch, n_ch:2*n_ch]
                    
                    theta = abs(theta - np.mean(theta[:]) / np.std(theta[:]))
                    alpha = abs(alpha - np.mean(alpha[:]) / np.std(alpha[:]))
                    beta = abs(beta - np.mean(beta[:]) / np.std(beta[:]))
                    
                    print(c)
                    print('Range of the connectivities:')
                    print('Theta max:' + str(np.max(theta)))
                    print('Theta min:' + str(np.min(theta)))
                    print('Alpha max:' + str(np.max(alpha)))
                    print('Alpha min:' + str(np.min(alpha)))
                    print('Beta max:' + str(np.max(beta)))
                    print('Beta min:' + str(np.min(beta)))
                    
                    np.save('Connectivity matrices/ccorr/' + 'ccorr_' + pair_name + '_theta_' + c + '_' + length, theta)
                    np.save('Connectivity matrices/ccorr/' + 'ccorr_' + pair_name + '_alpha_' + c + '_' + length, alpha)
                    np.save('Connectivity matrices/ccorr/' + 'ccorr_' + pair_name + '_beta_' + c + '_' + length, beta)        
          
    return theta, alpha, beta, angle, complex_signal, epo_a_cleaned, epo_a
示例#7
0
cleaned_epochs_AR = AR_local(cleaned_epochs_ICA, verbose=True)
input("Press ENTER to continue")
plt.close('all')

preproc_S1 = cleaned_epochs_AR[0]
preproc_S2 = cleaned_epochs_AR[1]

# Connectivity
# Create array
data = np.array([preproc_S1, preproc_S2])

# Compute analytic signal per frequency band
complex_signal = compute_freq_bands(data, freq_bands)

# Compute frequency- and time-frequency-domain connectivity measures.
result = compute_sync(complex_signal,
                      mode='ccorr')

# slicing to get the inter-brain part of the matrix
theta, alpha_low, alpha_high, beta, gamma = result[:, 0:n_ch, n_ch:2*n_ch]

values = alpha_low
values -= np.diag(np.diag(values))

C = (values - np.mean(values[:])) / np.std(values[:])

# Defined bad channel for viz test
epo1.info['bads'] = ['F8', 'Fp2', 'Cz', 'O2']
epo2.info['bads'] = ['F7', 'O1']

# Visualization of inter-brain connectivity in 2D
fig, ax = plt.subplots(1,1)
示例#8
0
input("Press ENTER to continue")
plt.close('all')

preproc_S1 = cleaned_epochs_AR[0]
preproc_S2 = cleaned_epochs_AR[1]

# Connectivity
# Create array
data = np.array([preproc_S1, preproc_S2])

# Compute analytic signal per frequency band
complex_signal = compute_freq_bands(data, freq_bands)

# Compute frequency- and time-frequency-domain connectivity measures.
result = compute_sync(complex_signal,
                      mode='plv',
                      epoch_wise=True,
                      time_resolved=True)

theta, alpha_low, alpha_high, beta, gamma = result

C = (alpha_low - np.mean(alpha_low[:])) / np.std(alpha_low[:])

# Visualization of inter-brain connectivity in 2D
plt.figure(figsize=(10, 20))
plt.gca().set_aspect('equal', 'box')
plt.axis('off')
plot_sensors_2d(loc1, loc2, lab1, lab2)
plot_links_2d(loc1, loc2, C=C, threshold=2, steps=10)
plt.tight_layout()
plt.show()