Example #1
0
def test_read_ch_connectivity():
    "Test reading channel connectivity templates"
    a = partial(np.array, dtype='<U7')
    # no pep8
    nbh = np.array([[(['MEG0111'], [[a(['MEG0131'])]]),
                     (['MEG0121'], [[a(['MEG0111'])],
                                    [a(['MEG0131'])]]),
                     (['MEG0131'], [[a(['MEG0111'])],
                                    [a(['MEG0121'])]])]],
                   dtype=[('label', 'O'), ('neighblabel', 'O')])
    mat = dict(neighbours=nbh)
    mat_fname = op.join(tempdir, 'test_mat.mat')
    savemat(mat_fname, mat)

    ch_connectivity = read_ch_connectivity(mat_fname)
    x = ch_connectivity
    assert_equal(x.shape, (3, 3))
    assert_equal(x[0, 1], False)
    assert_equal(x[0, 2], True)
    assert_true(np.all(x.diagonal()))
    assert_raises(ValueError, read_ch_connectivity, mat_fname, [0, 3])
    ch_connectivity = read_ch_connectivity(mat_fname, picks=[0, 2])
    assert_equal(ch_connectivity.shape[0], 2)

    ch_names = ['EEG01', 'EEG02', 'EEG03']
    neighbors = [['EEG02'], ['EEG04'], ['EEG02']]
    assert_raises(ValueError, ch_neighbor_connectivity, ch_names, neighbors)
    neighbors = [['EEG02'], ['EEG01', 'EEG03'], ['EEG 02']]
    assert_raises(ValueError, ch_neighbor_connectivity, ch_names[:2],
                  neighbors)
    neighbors = [['EEG02'], 'EEG01', ['EEG 02']]
    assert_raises(ValueError, ch_neighbor_connectivity, ch_names, neighbors)
Example #2
0
def test_read_ch_connectivity():
    "Test reading channel connectivity templates"
    tempdir = _TempDir()
    a = partial(np.array, dtype='<U7')
    # no pep8
    nbh = np.array([[(['MEG0111'], [[a(['MEG0131'])]]),
                     (['MEG0121'], [[a(['MEG0111'])], [a(['MEG0131'])]]),
                     (['MEG0131'], [[a(['MEG0111'])], [a(['MEG0121'])]])]],
                   dtype=[('label', 'O'), ('neighblabel', 'O')])
    mat = dict(neighbours=nbh)
    mat_fname = op.join(tempdir, 'test_mat.mat')
    savemat(mat_fname, mat)

    ch_connectivity = read_ch_connectivity(mat_fname)
    x = ch_connectivity
    assert_equal(x.shape, (3, 3))
    assert_equal(x[0, 1], False)
    assert_equal(x[0, 2], True)
    assert_true(np.all(x.diagonal()))
    assert_raises(ValueError, read_ch_connectivity, mat_fname, [0, 3])
    ch_connectivity = read_ch_connectivity(mat_fname, picks=[0, 2])
    assert_equal(ch_connectivity.shape[0], 2)

    ch_names = ['EEG01', 'EEG02', 'EEG03']
    neighbors = [['EEG02'], ['EEG04'], ['EEG02']]
    assert_raises(ValueError, ch_neighbor_connectivity, ch_names, neighbors)
    neighbors = [['EEG02'], ['EEG01', 'EEG03'], ['EEG 02']]
    assert_raises(ValueError, ch_neighbor_connectivity, ch_names[:2],
                  neighbors)
    neighbors = [['EEG02'], 'EEG01', ['EEG 02']]
    assert_raises(ValueError, ch_neighbor_connectivity, ch_names, neighbors)
Example #3
0
    def __init__(self, sensor_type='mag'):
        # @sensor_type "mag" or "grad"
        sensor_type = sensor_type
        code_dir = os.path.dirname(os.path.realpath(__file__))
        info_path = os.path.join(code_dir, 'neuromag_info')
        if (sensor_type == 'mag'):
            neighboring_filename = os.path.join(info_path,
                                                'neuromag306mag_neighb.mat')
        if (sensor_type == 'grad'):
            neighboring_filename = os.path.join(
                info_path, 'neuromag306planar_neighb.mat')
        neuromag = read_raw_fif(sample.data_path() +
                                '/MEG/sample/sample_audvis_raw.fif')
        self.topography_2D = find_layout(neuromag.info,
                                         ch_type=sensor_type).pos
        topography_3D = np.array([
            ch['loc'][:3] for ch in neuromag.info['chs']
            if (ch['ch_name'][-1] == '1') & (ch['ch_name'][0:3] == 'MEG')
        ])

        neighboring, ch_names = read_ch_connectivity(
            neighboring_filename,
            picks=None)  #ch. names written  in 'MEG1111' format
        neighboring = neighboring.toarray()
        num_channels = len(ch_names)
        Convolutions.__init__(self, neighboring, topography_3D, ch_names,
                              num_channels)
Example #4
0
    def __init__(self,sensor_type = 'mag'):
        # sensor_type - mag or grad. Grad not yet implemented

        code_dir = os.path.dirname(os.path.realpath(__file__))
        info_path = os.path.join(code_dir, 'neuromag_info')
        neuromag = read_raw_fif(sample.data_path() +
                                '/MEG/sample/sample_audvis_raw.fif')
        topography_2D = find_layout(neuromag.info, ch_type=sensor_type).pos[:,:2]
        if (sensor_type == 'mag'):
            topography_3D = np.array([ch['loc'][:3] for ch in neuromag.info['chs'] if
                                      (ch['ch_name'][-1] == '1') &
                                      (ch['ch_name'][0:3] == 'MEG')]) #Numbers of mag sensors ended by '1'

            neighboring_filename = os.path.join(info_path, 'neuromag306mag_neighb.mat')

        if (sensor_type == 'grad'):
            topography_3D = np.array([ch['loc'][:3] for ch in neuromag.info['chs'] if
                                      ((ch['ch_name'][-1] == '2') | (ch['ch_name'][-1] == '3')) &
                                      (ch['ch_name'][0:3] == 'MEG')])  # Numbers of grad sensors ended by '2' or '3'
            neighboring_filename = os.path.join(info_path, 'neuromag306planar_neighb.mat')

        neighboring, ch_names = read_ch_connectivity(neighboring_filename,
                                                     picks=None)  # ch. names written  in 'MEG1111' format
        neighboring = neighboring.toarray()
        num_channels = len(ch_names)
        self.sensor_type = sensor_type
        Device.__init__(self,ch_names, topography_3D, topography_2D, neighboring, num_channels)
Example #5
0
    def __init__(self):
        code_dir = os.path.dirname(os.path.realpath(__file__))
        info_path = os.path.join(code_dir, 'gsn128_info')
        neighboring_filename = os.path.join(info_path, 'gsn128_neighb.mat')
        topography_2D = read_layout('GSN-128.lay', info_path).pos[:, :2]
        num_channels, ch_names, topography_3D = self._parse_mat(os.path.join(info_path, 'bs_topography.mat'))

        neighboring, self.ch_names = read_ch_connectivity(neighboring_filename, picks=None)
        neighboring = neighboring.toarray()
        Device.__init__(self, ch_names, topography_3D, topography_2D, neighboring, num_channels)
def meg_to_gradmag(chan_types):
    """force separation of magnetometers and gradiometers"""
    from mne.channels import read_ch_connectivity
    if 'meg' in [chan['name'] for chan in chan_types]:
        mag_connectivity, _ = read_ch_connectivity('neuromag306mag')
        # FIXME grad connectivity? Need virtual sensor?
        # grad_connectivity, _ = read_ch_connectivity('neuromag306grad')
        chan_types = [dict(name='mag', connectivity=mag_connectivity),
                      dict(name='grad', connectivity='missing')] + \
                     [chan for chan in chan_types if chan['name'] != 'meg']
    return chan_types
Example #7
0
def test_read_ch_connectivity():
    """Test reading channel connectivity templates"""
    tempdir = _TempDir()
    a = partial(np.array, dtype='<U7')
    # no pep8
    nbh = np.array([[(['MEG0111'], [[a(['MEG0131'])]]),
                     (['MEG0121'], [[a(['MEG0111'])],
                                    [a(['MEG0131'])]]),
                     (['MEG0131'], [[a(['MEG0111'])],
                                    [a(['MEG0121'])]])]],
                   dtype=[('label', 'O'), ('neighblabel', 'O')])
    mat = dict(neighbours=nbh)
    mat_fname = op.join(tempdir, 'test_mat.mat')
    savemat(mat_fname, mat, oned_as='row')

    ch_connectivity, ch_names = read_ch_connectivity(mat_fname)
    x = ch_connectivity
    assert_equal(x.shape[0], len(ch_names))
    assert_equal(x.shape, (3, 3))
    assert_equal(x[0, 1], False)
    assert_equal(x[0, 2], True)
    assert_true(np.all(x.diagonal()))
    assert_raises(ValueError, read_ch_connectivity, mat_fname, [0, 3])
    ch_connectivity, ch_names = read_ch_connectivity(mat_fname, picks=[0, 2])
    assert_equal(ch_connectivity.shape[0], 2)
    assert_equal(len(ch_names), 2)

    ch_names = ['EEG01', 'EEG02', 'EEG03']
    neighbors = [['EEG02'], ['EEG04'], ['EEG02']]
    assert_raises(ValueError, _ch_neighbor_connectivity, ch_names, neighbors)
    neighbors = [['EEG02'], ['EEG01', 'EEG03'], ['EEG 02']]
    assert_raises(ValueError, _ch_neighbor_connectivity, ch_names[:2],
                  neighbors)
    neighbors = [['EEG02'], 'EEG01', ['EEG 02']]
    assert_raises(ValueError, _ch_neighbor_connectivity, ch_names, neighbors)
    connectivity, ch_names = read_ch_connectivity('neuromag306mag')
    assert_equal(connectivity.shape, (102, 102))
    assert_equal(len(ch_names), 102)
    assert_raises(ValueError, read_ch_connectivity, 'bananas!')

    # In EGI 256, E31 sensor has no neighbour
    a = partial(np.array)
    nbh = np.array([[(['E31'], []),
                     (['E1'], [[a(['E2'])],
                               [a(['E3'])]]),
                     (['E2'], [[a(['E1'])],
                               [a(['E3'])]]),
                     (['E3'], [[a(['E1'])],
                               [a(['E2'])]])]],
                   dtype=[('label', 'O'), ('neighblabel', 'O')])
    mat = dict(neighbours=nbh)
    mat_fname = op.join(tempdir, 'test_isolated_mat.mat')
    savemat(mat_fname, mat, oned_as='row')
    ch_connectivity, ch_names = read_ch_connectivity(mat_fname)
    x = ch_connectivity.todense()
    assert_equal(x.shape[0], len(ch_names))
    assert_equal(x.shape, (4, 4))
    assert_true(np.all(x.diagonal()))
    assert_false(np.any(x[0, 1:]))
    assert_false(np.any(x[1:, 0]))

    # Check for neighbours consistency. If a sensor is marked as a neighbour,
    # then it should also have its neighbours defined.
    a = partial(np.array)
    nbh = np.array([[(['E31'], []),
                     (['E1'], [[a(['E8'])],
                               [a(['E3'])]]),
                     (['E2'], [[a(['E1'])],
                               [a(['E3'])]]),
                     (['E3'], [[a(['E1'])],
                               [a(['E2'])]])]],
                   dtype=[('label', 'O'), ('neighblabel', 'O')])
    mat = dict(neighbours=nbh)
    mat_fname = op.join(tempdir, 'test_error_mat.mat')
    savemat(mat_fname, mat, oned_as='row')
    assert_raises(ValueError, read_ch_connectivity, mat_fname)
                    picks=picks,
                    baseline=None,
                    reject=reject,
                    preload=True)

epochs.drop_channels(['EOG 061'])
epochs.equalize_event_counts(event_id, copy=False)

condition_names = 'Aud_L', 'Aud_R', 'Vis_L', 'Vis_R'
X = [epochs[k].get_data() for k in condition_names]  # as 3D matrix
X = [np.transpose(x, (0, 2, 1)) for x in X]  # transpose for clustering

###############################################################################
# Load FieldTrip neighbor definition to setup sensor connectivity
# ---------------------------------------------------------------
connectivity, ch_names = read_ch_connectivity('neuromag306mag')

print(type(connectivity))  # it's a sparse matrix!

plt.imshow(connectivity.toarray(),
           cmap='gray',
           origin='lower',
           interpolation='nearest')
plt.xlabel('{} Magnetometers'.format(len(ch_names)))
plt.ylabel('{} Magnetometers'.format(len(ch_names)))
plt.title('Between-sensor adjacency')

###############################################################################
# Compute permutation statistic
# -----------------------------
#
Example #9
0
def group_stats(subjects,
                path,
                exp,
                filt,
                analysis,
                c_names,
                seed=42,
                threshold=1.96,
                p_accept=0.05,
                chance=.5,
                n_perm=10000,
                reg_type='rerf'):
    # Load the subject gats
    group_gat = list()
    group_td = list()
    group_reg = list()
    group_dict = dict()
    group_dev = list()
    for subject in subjects:
        subject_template = op.join(path, subject, 'mne', subject + '_%s%s.%s')
        fname_gat = subject_template % (exp, '_calm_' + filt + '_filt_' +
                                        analysis + '_gat', 'npy')
        fname_reg = subject_template % (exp, '_calm_' + filt + '_filt_' +
                                        analysis + '_%s-ave' % reg_type, 'fif')
        gat = np.load(fname_gat)
        group_gat.append(gat)
        group_td.append(np.diag(gat))

        reg = mne.read_evokeds(fname_reg)
        if isinstance(c_names, list):
            evokeds = list()
            for r in reg:
                if r.comment == c_names[0]:
                    evokeds.insert(0, r)
                elif r.comment == c_names[1]:
                    evokeds.append(r)
            assert len(evokeds) == 2
            reg = mne.evoked.combine_evoked([evokeds[0], evokeds[1]],
                                            weights=[1, -1])
        elif isinstance(c_names, str):
            reg = reg[0]
        # transpose for the stats func
        group_reg.append(reg.data.T)

    n_subjects = len(subjects)
    connectivity, ch_names = read_ch_connectivity('KIT-208')
    ##################
    # Auxiliary info #
    ##################
    # define a layout
    layout = mne.find_layout(reg.info)
    group_dict['layout'] = layout
    group_dict['times'] = reg.times
    group_dict['sfreq'] = reg.info['sfreq']
    group_dict['subjects'] = subjects

    #############################
    # run a spatio-temporal REG #
    #############################
    group_reg = np.array(group_reg)
    group_dict['reg_stats'] = stc_1samp_test(group_reg,
                                             n_permutations=n_perm,
                                             threshold=threshold,
                                             tail=0,
                                             connectivity=connectivity,
                                             seed=seed,
                                             n_jobs=-1)

    #########################
    # run a GAT clustering  #
    #########################
    # remove chance from the gats
    group_gat = np.array(group_gat) - chance
    _, clusters, p_values, _ = stc_1samp_test(group_gat,
                                              n_permutations=n_perm,
                                              threshold=threshold,
                                              tail=0,
                                              seed=seed,
                                              out_type='mask',
                                              n_jobs=-1)
    p_values_ = np.ones_like(group_gat[0]).T
    for cluster, pval in zip(clusters, p_values):
        p_values_[cluster.T] = pval
    group_dict['gat_sig'] = p_values_ < p_accept

    ########################
    # run a TD clustering  #
    ########################
    # remove chance from the gats
    group_td = np.array(group_td) - chance
    group_dict['td_stats'] = pc_1samp_test(group_td,
                                           n_permutations=n_perm,
                                           threshold=threshold,
                                           tail=0,
                                           seed=seed,
                                           n_jobs=-1)

    #########################
    # run a GAT clustering  #
    #########################
    # determining deviation from diag
    group_diag = np.array([np.diag(gat)[:, np.newaxis] for gat in group_gat])
    group_dev = group_gat - group_diag

    _, clusters, p_values, _ = stc_1samp_test(group_dev,
                                              n_permutations=n_perm,
                                              threshold=threshold,
                                              tail=0,
                                              seed=seed,
                                              out_type='mask',
                                              n_jobs=-1)
    p_values_ = np.ones_like(group_dev[0]).T
    for cluster, pval in zip(clusters, p_values):
        p_values_[cluster.T] = pval
    group_dict['gat_dev_sig'] = p_values_ < p_accept

    return group_dict
Example #10
0
    # combine grad with RMS
    data[:, grad[::2], :] = np.sqrt((data[:, grad[::2], :] ** 2 +
                                    data[:, grad[1::2], :] ** 2) / 2.)
    data[:, grad[1::2], :] = data[:, grad[::2], :]
    data[:, grad, :] += analysis['chance']

    # keep robust averaging for plotting
    epochs = EpochsArray(data, evoked.info,
                         events=np.zeros((len(data), 3), dtype=int),
                         tmin=evoked.times[0])
    evoked = epochs.average()
    evoked.data = robust_mean(data, axis=0)

    # Run stats
    p_values_chans = list()
    epochs.pick_types('mag')
    connectivity, _ = read_ch_connectivity('neuromag306mag')
    X = np.transpose(epochs._data, [0, 2, 1])
    _, clusters, cl_p_val, _ = stats(
        X, out_type='mask', n_permutations=2 ** 11,
        connectivity=connectivity, n_jobs=-1)
    p_values = np.ones_like(X[0]).T
    for cluster, pval in zip(clusters, cl_p_val):
        p_values[cluster.T] = pval
    sig = p_values < .05

    # Save contrast
    save([evoked, data, p_values, sig, analysis],
         'evoked', analysis=('stats_' + analysis['name']),
         overwrite=True)
Example #11
0
def SensorStatsPlot(condcomb, ListSubj, colors):

    #ListSubj = ('sd130343','cb130477' , 'rb130313', 'jm100109',
    #             'sb120316', 'tk130502','lm130479' , 'ms130534', 'ma100253', 'sl130503',
    #             'mb140004','mp140019' , 'dm130250', 'hr130504', 'wl130316', 'rl130571')

    #ListSubj = ('sd130343','cb130477' , 'rb130313', 'jm100109',
    #             'tk130502','lm130479' , 'ms130534', 'ma100253', 'sl130503',
    #            'mb140004','mp140019' , 'dm130250', 'hr130504', 'rl130571')

    #condcomb = ('QtPast' ,'QtPre','QtFut' )
    #condcomb = ('QsWest' ,'QsPar','QsEast')

    #ipython --pylab
    import mne
    import numpy as np
    import matplotlib.pyplot as plt
    from mpl_toolkits.axes_grid1 import make_axes_locatable
    from mne.viz import plot_topomap
    from mne.stats import spatio_temporal_cluster_test
    from mne.datasets import sample
    from mne.channels import read_ch_connectivity
    from scipy import stats as stats
    from mne.viz import plot_topo
    import os
    os.chdir('/neurospin/meg/meg_tmp/MTT_MEG_Baptiste/SCRIPTS/MNE_PYTHON')
    os.environ['MNE_ROOT'] = '/neurospin/local/mne'
    wdir = "/neurospin/meg/meg_tmp/MTT_MEG_Baptiste/MEG/"

    # load FieldTrip neighbor definition to setup sensor connectivity
    neighbor_file_mag = '/neurospin/local/fieldtrip/template/neighbours/neuromag306mag_neighb.mat'  # mag
    neighbor_file_grad = '/neurospin/local/fieldtrip/template/neighbours/neuromag306planar_neighb.mat'  # grad
    neighbor_file_eeg = '/neurospin/local/fieldtrip/template/neighbours/easycap64ch-avg_neighb.mat'  # eeg
    connectivity, ch_names = mne.channels.read_ch_connectivity(
        neighbor_file_eeg, picks=range(60))
    connectivity_mag, ch_names_mag = read_ch_connectivity(neighbor_file_mag)
    connectivity_grad, ch_names_grad = read_ch_connectivity(neighbor_file_grad)
    connectivity_eeg, ch_names_eeg = read_ch_connectivity(neighbor_file_eeg)

    # evoked 0 to get the size of the matrix
    fname0 = (wdir + ListSubj[0] + "/mne_python/MEEG_" + condcomb[0] + "_" +
              ListSubj[0] + "-ave.fif")
    evoked0 = mne.read_evokeds(fname0, condition=0, baseline=(-0.2, 0))
    sensordatamat_meg_mag = np.empty(
        [len(condcomb),
         len(ListSubj), 102, evoked0.data.shape[1]])
    sensordatamat_meg_grad = np.empty(
        [len(condcomb),
         len(ListSubj), 204, evoked0.data.shape[1]])
    sensordatamat_meg_eeg = np.empty(
        [len(condcomb),
         len(ListSubj), 60, evoked0.data.shape[1]])

    # define statistical threshold
    p_threshold = 0.05
    t_threshold = -stats.distributions.t.ppf(p_threshold / 2.,
                                             len(ListSubj) - 1)

    # compute grand averages
    GDAVGmag, GDAVGgrad, GDAVGeeg = [], [], []
    sensordatamat_meg_mag = np.empty(
        (len(condcomb), len(ListSubj), 102, len(evoked0.times)))
    sensordatamat_meg_grad = np.empty(
        (len(condcomb), len(ListSubj), 204, len(evoked0.times)))
    #sensordatamat_eeg       = np.empty((len(condcomb),len(ListSubj),60 ,len(evoked0.times)))

    for c in range(len(condcomb)):

        evoked2plotmag, evoked2plotgrad, evoked2ploteeg = [], [], []
        for i in range(len(ListSubj)):

            fname_ave_meg = (wdir + ListSubj[i] + "/mne_python/MEEG_" +
                             condcomb[c] + "_" + ListSubj[i] + "-ave.fif")

            tmp_evoked_meg = mne.read_evokeds(fname_ave_meg,
                                              condition=0,
                                              baseline=(-0.2, 0))
            evoked2plotmag.append(tmp_evoked_meg.pick_types('mag'))
            sensordatamat_meg_mag[c, i, ::, ::] = tmp_evoked_meg.data

            tmp_evoked_meg = mne.read_evokeds(fname_ave_meg,
                                              condition=0,
                                              baseline=(-0.2, 0))
            evoked2plotgrad.append(tmp_evoked_meg.pick_types('grad'))
            sensordatamat_meg_grad[c, i, ::, ::] = tmp_evoked_meg.data

            #tmp_evoked_meg  = mne.read_evokeds(fname_ave_meg,   condition=0, baseline=(-0.2, 0))
            #evoked2ploteeg.append(tmp_evoked_meg.pick_types('eeg'))
            #sensordatamat_eeg[c,i,::,::]  = tmp_evoked_meg.data

        GDAVGmag.append(mne.grand_average(evoked2plotmag))
        GDAVGgrad.append(mne.grand_average(evoked2plotgrad))
        #GDAVGeeg.append(mne.grand_average(evoked2ploteeg))

    # plot topomaps of grand_averages
    plot_topo(GDAVGmag, color=colors)
    plt.savefig(
        "/neurospin/meg/meg_tmp/MTT_MEG_Baptiste/MEG/GROUP/decoding_context_yousra/"
        + "_".join([str(cond) for cond in condcomb]) + "_GDAVG_mags")

    plot_topo(GDAVGgrad, color=colors)
    plt.savefig(
        "/neurospin/meg/meg_tmp/MTT_MEG_Baptiste/MEG/GROUP/decoding_context_yousra/"
        + "_".join([str(cond) for cond in condcomb]) + "_GDAVG_grads")

    times = np.arange(-0.1, 0.9, 0.05)
    for c in range(len(condcomb)):

        GDAVGmag[c].plot_topomap(times,
                                 ch_type='mag',
                                 vmin=-40,
                                 vmax=40,
                                 average=0.05)
        plt.savefig(
            "/neurospin/meg/meg_tmp/MTT_MEG_Baptiste/MEG/GROUP/decoding_context_yousra/"
            + str(condcomb[c]) + "_GDAVG_mags")

        GDAVGgrad[c].plot_topomap(times,
                                  ch_type='grad',
                                  vmin=-10,
                                  vmax=10,
                                  average=0.05)
        plt.savefig(
            "/neurospin/meg/meg_tmp/MTT_MEG_Baptiste/MEG/GROUP/decoding_context_yousra/"
            + str(condcomb[c]) + "_GDAVG_grads")

    allcond_meg_mag = [
        np.transpose(x, (0, 2, 1)) for x in sensordatamat_meg_mag
    ]
    allcond_meg_grad = [
        np.transpose(x, (0, 2, 1)) for x in sensordatamat_meg_grad
    ]

    ###############################################################################

    t_threshold = -stats.distributions.t.ppf(0 / 2, len(ListSubj) - 1)
    T_obs, clusters, cluster_p_values, HO = spatio_temporal_cluster_test(
        allcond_meg_mag[0::1],
        n_permutations=1024,
        threshold=t_threshold,
        tail=0,
        n_jobs=4,
        connectivity=connectivity_mag)

    t_threshold = -stats.distributions.t.ppf(0 / 2, len(ListSubj) - 1)
    T_obs, clusters, cluster_p_values, HO = spatio_temporal_cluster_test(
        allcond_meg_grad,
        n_permutations=1024,
        threshold=t_threshold,
        tail=0,
        n_jobs=4,
        connectivity=connectivity_grad)
Example #12
0
from main import get_data
from mne.channels import read_ch_connectivity
from mne.stats import permutation_cluster_test
import numpy as np
from mne.io import read_raw_fif
from mne.datasets import sample
from mne.channels import find_layout
from mne.viz import plot_topomap
import os
import matplotlib.pyplot as plt
from scipy.signal import butter, lfilter


info = read_raw_fif(sample.data_path() + '/MEG/sample/sample_audvis_raw.fif',verbose=False).info
connectivity = read_ch_connectivity('neuromag306planar_neighb.mat', picks=None)



def butter_lowpass(cutoff, fs, order=5):
    nyq = 0.5 * fs
    normal_cutoff = cutoff / nyq
    b, a = butter(order, normal_cutoff, btype='low', analog=False)
    return b, a

def butter_lowpass_filter(data, cutoff, fs, order=5):
    b, a = butter_lowpass(cutoff, fs, order=order)
    y=np.zeros(data.shape)
    for tr in range(data.shape[0]):
        for ch in range(data.shape[2]):
            y[tr,:,ch] = lfilter(b, a, data[tr,:,ch])
    return y
Example #13
0
    def make_scales(s):
        ''' populate attributes describing channels and units '''

        # channels
        use_ext = '.sfp'
        path, file = os.path.split(s.params['Coordinates file'])
        fn, ext = file.split('.')
        s.montage = mne.channels.read_montage(os.path.join(path, fn) + use_ext)

        # channel "connectivity" (adjacency)
        target_path = '/active_projects/matlab_common/hbnl_neighbs.mat'
        s.ch_connectivity, ch_names = read_ch_connectivity(target_path)

        # units and suggested limits
        s.db_lims, s.db_units = [-3, 3], 'decibels (dB)'
        s.itc_lims, s.itc_units = [-.06, 0.3], 'ITC'
        s.coh_lims, s.coh_units = [-.06, 0.3], 'ISPS'
        s.phi_lims, s.phi_units = [-np.pi, np.pi], 'Radians'
        s.z_lims, s.z_units = [-1, 9], 'Z-score'

        # ERP times
        s.srate = s.params['Sampling rate'][0][0]

        if s.source_pipeline is 'matlab':
            s.make_tfscales_matlab()
        elif s.source_pipeline is 'erostack':
            s.make_tfscales_erostack()

        s.zero_tf = convert_ms(s.time_tf, 0)
        freq_tick_space = freq_tick_heuristic(s.freq)
        s.freq_ticks_pt = range(0, len(s.freq), freq_tick_space)
        s.freq_ticks_hz = ['{0:.1f}'.format(f) for f in s.freq[::freq_tick_space]]
        s.time_ticks()  # additional helper function for ticks

        # time plot limits
        
        s.time_plotlims = [convert_ms(s.time, ms) for ms in time_plotlims_ms]
        s.time_tf_plotlims = [convert_ms(s.time_tf, ms)
                              for ms in time_plotlims_ms]

        # freq plot limits
        s.freq_tf_plotlims = [convert_ms(s.freq, hz)
                              for hz in freq_plotlims_hz]

        # only CSD beyond this if statement
        if not s.params['CSD matrix']:
            s.pot_lims, s.pot_units = [-10, 16], r'$\mu$V'
            return
        if s.params['CSD matrix'].shape[0] <= 2:
            s.pot_lims, s.pot_units = [-10, 16], r'$\mu$V'
            return

        s.pot_lims, s.pot_units = [-.2, .2], r'$\mu$V / $cm^2$'
        s.cohpair_inds = [[int(chan_num) - 1 for chan_num in pair]  # MATLAB index
                          for pair in s.params['Coherence pairs'].T]
        if len(s.params['Coherence pairs'].shape) > 1:
            s.cohpair_lbls = ['~'.join([s.montage.ch_names[chan_ind]
                                        for chan_ind in pair])
                              for pair in s.cohpair_inds]
            s.cohpair_sets = OrderedDict()
            s.cohchan_sets = OrderedDict()
            for pind, pset in enumerate(s.params['Coherence pair subsets']):
                tmp_pairs = np.where(
                    s.params['Coherence pair subset index'] == pind + 1)[1]
                setpairs = [s.cohpair_lbls[pair] for pair in tmp_pairs]
                s.cohpair_sets[pset] = setpairs
                setchans = np.unique(np.array([s.cohpair_inds[p]
                                               for p in tmp_pairs]))
                s.cohchan_sets[pset] = [s.montage.ch_names[i] for i in setchans]
Example #14
0
def SensorStatsPlot(condcomb, ListSubj, colors):

    #ListSubj = ('sd130343','cb130477' , 'rb130313', 'jm100109',
    #             'sb120316', 'tk130502','lm130479' , 'ms130534', 'ma100253', 'sl130503',
    #             'mb140004','mp140019' , 'dm130250', 'hr130504', 'wl130316', 'rl130571')

    ListSubj = ('sd130343', 'cb130477', 'rb130313', 'jm100109', 'tk130502',
                'lm130479', 'ms130534', 'ma100253', 'sl130503', 'mb140004',
                'mp140019', 'dm130250', 'hr130504', 'rl130571')

    condcomb = ('EtPast', 'EtPre', 'EtFut')

    colors = ((1, 0, 0), (1, 0.37, 0.15), (1, 0.75, 0.3))

    #ipython --pylab
    import mne
    import numpy as np
    import matplotlib.pyplot as plt
    import itertools
    from mne.stats import spatio_temporal_cluster_test
    from mne.channels import read_ch_connectivity
    from scipy import stats as stats
    from mne.viz import plot_evoked_topo, iter_topography

    import os
    os.chdir('/neurospin/meg/meg_tmp/MTT_MEG_Baptiste/SCRIPTS/MNE_PYTHON')
    os.environ['MNE_ROOT'] = '/neurospin/local/mne'
    wdir = "/neurospin/meg/meg_tmp/MTT_MEG_Baptiste/MEG/"

    # load FieldTrip neighbor definition to setup sensor connectivity
    neighbor_file_mag = '/neurospin/local/fieldtrip/template/neighbours/neuromag306mag_neighb.mat'  # mag
    neighbor_file_grad = '/neurospin/local/fieldtrip/template/neighbours/neuromag306planar_neighb.mat'  # grad
    neighbor_file_eeg = '/neurospin/local/fieldtrip/template/neighbours/easycap64ch-avg_neighb.mat'  # eeg
    connectivity, ch_names = mne.channels.read_ch_connectivity(
        neighbor_file_eeg, picks=range(60))
    connectivity_mag, ch_names_mag = read_ch_connectivity(neighbor_file_mag)
    connectivity_grad, ch_names_grad = read_ch_connectivity(neighbor_file_grad)
    connectivity_eeg, ch_names_eeg = read_ch_connectivity(neighbor_file_eeg)

    # evoked 0 to get the size of the matrix
    fname0 = (wdir + ListSubj[0] + "/mne_python/MEEG_" + condcomb[0] + "_" +
              ListSubj[0] + "-ave.fif")
    evoked0 = mne.read_evokeds(fname0, condition=0, baseline=(-0.2, 0))
    sensordatamat_meg_mag = np.empty(
        [len(condcomb),
         len(ListSubj), 102, evoked0.data.shape[1]])
    sensordatamat_meg_grad = np.empty(
        [len(condcomb),
         len(ListSubj), 204, evoked0.data.shape[1]])
    sensordatamat_meg_eeg = np.empty(
        [len(condcomb),
         len(ListSubj), 60, evoked0.data.shape[1]])

    # define statistical threshold
    p_threshold = 0.05
    t_threshold = -stats.distributions.t.ppf(p_threshold / 2.,
                                             len(ListSubj) - 1)

    # compute grand averages
    GDAVGmag, GDAVGgrad, GDAVGeeg = [], [], []
    sensordatamat_meg_mag = np.empty(
        (len(condcomb), len(ListSubj), 102, len(evoked0.times)))
    sensordatamat_meg_grad = np.empty(
        (len(condcomb), len(ListSubj), 204, len(evoked0.times)))
    #sensordatamat_eeg       = np.empty((len(condcomb),len(ListSubj),60 ,len(evoked0.times)))

    for c in range(len(condcomb)):

        evoked2plotmag, evoked2plotgrad, evoked2ploteeg = [], [], []
        for i in range(len(ListSubj)):

            fname_ave_meg = (wdir + ListSubj[i] + "/mne_python/MEEG_" +
                             condcomb[c] + "_" + ListSubj[i] + "-ave.fif")

            tmp_evoked_meg = mne.read_evokeds(fname_ave_meg,
                                              condition=0,
                                              baseline=(-0.2, 0))
            evoked2plotmag.append(tmp_evoked_meg.pick_types('mag'))
            sensordatamat_meg_mag[c, i, ::, ::] = tmp_evoked_meg.data

            tmp_evoked_meg = mne.read_evokeds(fname_ave_meg,
                                              condition=0,
                                              baseline=(-0.2, 0))
            evoked2plotgrad.append(tmp_evoked_meg.pick_types('grad'))
            sensordatamat_meg_grad[c, i, ::, ::] = tmp_evoked_meg.data

            #tmp_evoked_meg  = mne.read_evokeds(fname_ave_meg,   condition=0, baseline=(-0.2, 0))
            #evoked2ploteeg.append(tmp_evoked_meg.pick_types('eeg'))
            #sensordatamat_eeg[c,i,::,::]  = tmp_evoked_meg.data

        GDAVGmag.append(mne.grand_average(evoked2plotmag))
        GDAVGgrad.append(mne.grand_average(evoked2plotgrad))
        #GDAVGeeg.append(mne.grand_average(evoked2ploteeg))

    # plot topomaps of grand_averages
    for ax, idx in iter_topography(GDAVGmag[0].info,
                                   fig_facecolor='black',
                                   axis_facecolor='black',
                                   axis_spinecolor='k'):
        for c, cond in enumerate(condcomb):
            ax.plot(GDAVGmag[c].data[idx], color=colors[c])
    figManager = plt.get_current_fig_manager()
    figManager.window.showMaximized()
    plt.savefig(
        "/neurospin/meg/meg_tmp/MTT_MEG_Baptiste/MEG/GROUP/decoding_context_yousra/TOPOPLOT_ERF/"
        + "_".join([str(cond) for cond in condcomb]) + "_GDAVG_mags.eps",
        format='eps',
        dpi=1500)
    plt.close()

    mne.viz.plot_topo(GDAVGmag, color=['r', 'orange', 'y'])
    figManager = plt.get_current_fig_manager()
    figManager.window.showMaximized()
    plt.savefig(
        "/neurospin/meg/meg_tmp/MTT_MEG_Baptiste/MEG/GROUP/decoding_context_yousra/TOPOPLOT_ERF/"
        + "_".join([str(cond) for cond in condcomb]) + "_bis_GDAVG_mags.eps",
        format='eps',
        dpi=1500)
    plt.close()

    for ax, idx in iter_topography(GDAVGgrad[0].info,
                                   fig_facecolor='black',
                                   axis_facecolor='black',
                                   axis_spinecolor='k'):
        for c, cond in enumerate(condcomb):
            ax.plot(GDAVGgrad[c].data[idx], color=colors[c])
    figManager = plt.get_current_fig_manager()
    figManager.window.showMaximized()
    plt.savefig(
        "/neurospin/meg/meg_tmp/MTT_MEG_Baptiste/MEG/GROUP/decoding_context_yousra/TOPOPLOT_ERF/"
        + "_".join([str(cond) for cond in condcomb]) + "_GDAVG_grads.eps",
        format='eps',
        dpi=1500)
    plt.close()

    mne.viz.plot_topo(GDAVGmag, color=['r', 'orange', 'y'])
    figManager = plt.get_current_fig_manager()
    figManager.window.showMaximized()
    plt.savefig(
        "/neurospin/meg/meg_tmp/MTT_MEG_Baptiste/MEG/GROUP/decoding_context_yousra/TOPOPLOT_ERF/"
        + "_".join([str(cond) for cond in condcomb]) + "_bis_GDAVG_grads.eps",
        format='eps',
        dpi=1500)
    plt.close()

    times = np.arange(-0.1, 0.9, 0.05)
    for c in range(len(condcomb)):

        GDAVGmag[c].plot_topomap(times,
                                 ch_type='mag',
                                 vmin=-40,
                                 vmax=40,
                                 average=0.05)
        plt.savefig(
            "/neurospin/meg/meg_tmp/MTT_MEG_Baptiste/MEG/GROUP/decoding_context_yousra/TOPOPLOT_ERF/"
            + str(condcomb[c]) + "_GDAVG_mags.eps",
            format='eps',
            dpi=1500)
        plt.close()

        GDAVGgrad[c].plot_topomap(times,
                                  ch_type='grad',
                                  vmin=-10,
                                  vmax=10,
                                  average=0.05)
        plt.savefig(
            "/neurospin/meg/meg_tmp/MTT_MEG_Baptiste/MEG/GROUP/decoding_context_yousra/TOPOPLOT_ERF/"
            + str(condcomb[c]) + "_GDAVG_grads.eps",
            format='eps',
            dpi=1500)
        plt.close()

    for combination in itertools.combinations(range(3), 2):
        tmp = []
        tmp = GDAVGmag[combination[0]] - GDAVGmag[combination[1]]
        tmp.plot_topomap(times, ch_type='mag', vmin=-15, vmax=15, average=0.05)
        plt.savefig(
            "/neurospin/meg/meg_tmp/MTT_MEG_Baptiste/MEG/GROUP/decoding_context_yousra/TOPOPLOT_ERF/"
            + str(condcomb[combination[0]]) + '_minus_' +
            str(condcomb[combination[1]]) + "_GDAVG_mags.eps",
            format='eps',
            dpi=1000)
        plt.close()

        tmp = []
        tmp = GDAVGgrad[combination[0]] - GDAVGgrad[combination[1]]
        tmp.plot_topomap(times, ch_type='grad', vmin=0, vmax=2, average=0.05)
        plt.savefig(
            "/neurospin/meg/meg_tmp/MTT_MEG_Baptiste/MEG/GROUP/decoding_context_yousra/TOPOPLOT_ERF/"
            + str(condcomb[combination[0]]) + '_minus_' +
            str(condcomb[combination[1]]) + "_GDAVG_grads.eps",
            format='eps',
            dpi=1000)
        plt.close()

    allcond_meg_mag = [
        np.transpose(x, (0, 2, 1)) for x in sensordatamat_meg_mag
    ]
    allcond_meg_grad = [
        np.transpose(x, (0, 2, 1)) for x in sensordatamat_meg_grad
    ]

    ###############################################################################

    t_threshold = -stats.distributions.t.ppf(0 / 2, len(ListSubj) - 1)
    T_obs, clusters, cluster_p_values, HO = spatio_temporal_cluster_test(
        allcond_meg_mag,
        n_permutations=1024,
        threshold=t_threshold,
        tail=0,
        n_jobs=4,
        connectivity=connectivity_mag)

    t_threshold = -stats.distributions.t.ppf(0 / 2, len(ListSubj) - 1)
    T_obs, clusters, cluster_p_values, HO = spatio_temporal_cluster_test(
        allcond_meg_grad,
        n_permutations=1024,
        threshold=t_threshold,
        tail=0,
        n_jobs=4,
        connectivity=connectivity_grad)
Example #15
0
def test_read_ch_connectivity():
    """Test reading channel connectivity templates."""
    tempdir = _TempDir()
    a = partial(np.array, dtype='<U7')
    # no pep8
    nbh = np.array([[(['MEG0111'], [[a(['MEG0131'])]]),
                     (['MEG0121'], [[a(['MEG0111'])],
                                    [a(['MEG0131'])]]),
                     (['MEG0131'], [[a(['MEG0111'])],
                                    [a(['MEG0121'])]])]],
                   dtype=[('label', 'O'), ('neighblabel', 'O')])
    mat = dict(neighbours=nbh)
    mat_fname = op.join(tempdir, 'test_mat.mat')
    savemat(mat_fname, mat, oned_as='row')

    ch_connectivity, ch_names = read_ch_connectivity(mat_fname)
    x = ch_connectivity
    assert_equal(x.shape[0], len(ch_names))
    assert_equal(x.shape, (3, 3))
    assert_equal(x[0, 1], False)
    assert_equal(x[0, 2], True)
    assert np.all(x.diagonal())
    pytest.raises(ValueError, read_ch_connectivity, mat_fname, [0, 3])
    ch_connectivity, ch_names = read_ch_connectivity(mat_fname, picks=[0, 2])
    assert_equal(ch_connectivity.shape[0], 2)
    assert_equal(len(ch_names), 2)

    ch_names = ['EEG01', 'EEG02', 'EEG03']
    neighbors = [['EEG02'], ['EEG04'], ['EEG02']]
    pytest.raises(ValueError, _ch_neighbor_connectivity, ch_names, neighbors)
    neighbors = [['EEG02'], ['EEG01', 'EEG03'], ['EEG 02']]
    pytest.raises(ValueError, _ch_neighbor_connectivity, ch_names[:2],
                  neighbors)
    neighbors = [['EEG02'], 'EEG01', ['EEG 02']]
    pytest.raises(ValueError, _ch_neighbor_connectivity, ch_names, neighbors)
    connectivity, ch_names = read_ch_connectivity('neuromag306mag')
    assert_equal(connectivity.shape, (102, 102))
    assert_equal(len(ch_names), 102)
    pytest.raises(ValueError, read_ch_connectivity, 'bananas!')

    # In EGI 256, E31 sensor has no neighbour
    a = partial(np.array)
    nbh = np.array([[(['E31'], []),
                     (['E1'], [[a(['E2'])],
                               [a(['E3'])]]),
                     (['E2'], [[a(['E1'])],
                               [a(['E3'])]]),
                     (['E3'], [[a(['E1'])],
                               [a(['E2'])]])]],
                   dtype=[('label', 'O'), ('neighblabel', 'O')])
    mat = dict(neighbours=nbh)
    mat_fname = op.join(tempdir, 'test_isolated_mat.mat')
    savemat(mat_fname, mat, oned_as='row')
    ch_connectivity, ch_names = read_ch_connectivity(mat_fname)
    x = ch_connectivity.todense()
    assert_equal(x.shape[0], len(ch_names))
    assert_equal(x.shape, (4, 4))
    assert np.all(x.diagonal())
    assert not np.any(x[0, 1:])
    assert not np.any(x[1:, 0])

    # Check for neighbours consistency. If a sensor is marked as a neighbour,
    # then it should also have its neighbours defined.
    a = partial(np.array)
    nbh = np.array([[(['E31'], []),
                     (['E1'], [[a(['E8'])],
                               [a(['E3'])]]),
                     (['E2'], [[a(['E1'])],
                               [a(['E3'])]]),
                     (['E3'], [[a(['E1'])],
                               [a(['E2'])]])]],
                   dtype=[('label', 'O'), ('neighblabel', 'O')])
    mat = dict(neighbours=nbh)
    mat_fname = op.join(tempdir, 'test_error_mat.mat')
    savemat(mat_fname, mat, oned_as='row')
    pytest.raises(ValueError, read_ch_connectivity, mat_fname)
Example #16
0
    epochs = mne.read_epochs(epochs_folder +
                             '%s_trial_start-epo.fif' % subject)

    epochs.pick_types(meg="grad")

    X_ctl_left[j, :, :] = epochs["ctl/left"].average().data.T
    X_ctl_right[j, :, :] = epochs["ctl/right"].average().data.T
    X_ent_left[j, :, :] = epochs["ent/left"].average().data.T
    X_ent_right[j, :, :] = epochs["ent/right"].average().data.T


X = [X_ctl_left, X_ctl_right, X_ent_left, X_ent_right]

###############################################################################
# load FieldTrip neighbor definition to setup sensor connectivity
connectivity, ch_names = read_ch_connectivity('neuromag306planar')

# set cluster threshold
# set family-wise p-value
p_accept = 0.05

cluster_stats = spatio_temporal_cluster_test(X, n_permutations=5000,
                                             tail=0,
                                             n_jobs=4,
                                             connectivity=connectivity)

T_obs, clusters, p_values, _ = cluster_stats
good_cluster_inds = np.where(p_values < p_accept)[0]

pickle.dump(cluster_stats, open(result_dir +
                                "/cluster_stats_pow_grad.p", "wb"))
Example #17
0
from __future__ import print_function
from __future__ import division

import numpy as np
import mne
import matplotlib.pyplot as plt
import pycnbi_utils as pu
from mne import Epochs, pick_types
import q_common as qc
from mne.viz import topomap
from mne.stats import spatio_temporal_cluster_test
from mne.channels import read_ch_connectivity
from mne.viz import plot_topomap
from mpl_toolkits.axes_grid1 import make_axes_locatable

connectivity, ch_names = read_ch_connectivity('biosemi16')

# load parameters
import imp
cfg_module = "config.py"
if cfg_module[-3:] == '.py':
    cfg_module = cfg_module[:-3]
cfg = imp.load_source(cfg_module, "./config.py")

spfilter = cfg.SP_FILTER
tpfilter = cfg.TP_FILTER
triggers = {cfg.tdef.by_value[c]: c for c in set(cfg.TRIGGER_DEF)}
print(triggers)

# ftrain =r'D:\data\Records\fif\20170309-195357-raw.fif'
ftrain = []
reject = dict(mag=4e-12, eog=150e-6)
epochs = mne.Epochs(raw, events, event_id, tmin, tmax, picks=picks, baseline=None, reject=reject, preload=True)

epochs.drop_channels(["EOG 061"])
epochs.equalize_event_counts(event_id, copy=False)

condition_names = "Aud_L", "Aud_R", "Vis_L", "Vis_R"
X = [epochs[k].get_data() for k in condition_names]  # as 3D matrix
X = [np.transpose(x, (0, 2, 1)) for x in X]  # transpose for clustering


###############################################################################
# Load FieldTrip neighbor definition to setup sensor connectivity
# ---------------------------------------------------------------
connectivity, ch_names = read_ch_connectivity("neuromag306mag")

print(type(connectivity))  # it's a sparse matrix!

plt.imshow(connectivity.toarray(), cmap="gray", origin="lower", interpolation="nearest")
plt.xlabel("{} Magnetometers".format(len(ch_names)))
plt.ylabel("{} Magnetometers".format(len(ch_names)))
plt.title("Between-sensor adjacency")

###############################################################################
# Compute permutation statistic
# -----------------------------
#
# How does it work? We use clustering to `bind` together features which are
# similar. Our features are the magnetic fields measured over our sensor
# array at different times. This reduces the multiple comparison problem.
Example #19
0
def erp_analysis(subjects):
    from mne.stats import spatio_temporal_cluster_test
    from mne.channels import read_ch_connectivity
    from mpl_toolkits.axes_grid1 import make_axes_locatable
    from scipy import stats as stats

    all_evo = {'off_sup_lon': list(), 'off_sup_sho': list()}
    all_log = list()
    condition_names = ['S2 Longer', 'S2 Shorter']
    n_subjects = len(subjects)

    # Load
    for subj in subjects:
        epochs, log = load_subj(subj)
        epochs = epochs[['off_sup_lon', 'off_sup_sho']]
        log = log.loc[(log.Condition == 2.0) & (log.Ratio != 1.0)]

        corr_log = list()
        for k in all_evo.keys():
            c = marks_off[k]
            sub_log = log[log.condition == c]
            sub_log['epo_ix'] = np.arange(len(sub_log))
            # corr_ix = sub_log['epo_ix'].loc[(sub_log.condition == c) & (sub_log.Accuracy == 1.0)].values
            # sub_log = sub_log.loc[(sub_log.condition == c) & (sub_log.Accuracy == 1.0)]
            corr_ix = sub_log['epo_ix']
            all_evo[k].append(epochs[k][corr_ix].average())
            corr_log.append(sub_log)
            print(k, c, len(corr_ix))

        all_log.append(pd.concat(corr_log))

    all_log = pd.concat(all_log)
    all_log.groupby('condition')[['condition'
                                  ]].agg(np.count_nonzero).plot(kind='bar')

    # Plot
    evoked = {
        k: mne.combine_evoked(all_evo[k], weights='nave')
        for k in all_evo.keys()
    }
    mne.viz.plot_evoked_topo([evoked[ev] for ev in evoked.keys()])

    # Stats
    connectivity, ch_names = read_ch_connectivity(
        '/Users/lpen/Documents/MATLAB/Toolbox/fieldtrip-20170628/template/neighbours/biosemi128_neighb.mat'
    )

    #threshold = {'start': 5, 'step': 0.5}
    threshold = None
    p_threshold = 0.001
    t_threshold = -stats.distributions.t.ppf(p_threshold / 2., n_subjects - 1)

    x = {
        k: np.array([all_evo[k][ix_s].data for ix_s in range(len(subjects))])
        for k in sorted(all_evo.keys())
    }
    x = [np.transpose(x[k], (0, 2, 1)) for k in sorted(x.keys())]

    t_obs, clusters, p_values, _ = spatio_temporal_cluster_test(
        x,
        n_permutations=1000,
        threshold=t_threshold,
        tail=0,
        n_jobs=2,
        connectivity=connectivity,
    )

    p_val = 0.01
    good_cluster_inds = np.where(p_values < p_val)[0]
    print(good_cluster_inds)
    print(len(good_cluster_inds))

    # configure variables for visualization
    times = evoked['off_sup_lon'].times * 1e3
    colors = 'r', 'b',
    linestyles = '-', '-',

    # grand average as numpy arrray
    grand_ave = np.array(x).mean(axis=1)

    # get sensor positions via layout
    pos = mne.find_layout(evoked['off_sup_lon'].info).pos

    # loop over significant clusters
    for i_clu, clu_idx in enumerate(good_cluster_inds):
        # unpack cluster infomation, get unique indices
        time_inds, space_inds = np.squeeze(clusters[clu_idx])
        ch_inds = np.unique(space_inds)
        time_inds = np.unique(time_inds)

        # get topography for F stat
        f_map = t_obs[time_inds, ...].mean(axis=0)

        # get signals at significant sensors
        signals = grand_ave[..., ch_inds].mean(axis=-1)
        sig_times = times[time_inds]

        # create spatial mask
        mask = np.zeros((f_map.shape[0], 1), dtype=bool)
        mask[ch_inds, :] = True

        # initialize figure
        fig, ax_topo = plt.subplots(1, 1, figsize=(10, 3))
        title = 'Cluster #{0}'.format(i_clu + 1)
        fig.suptitle(title, fontsize=14)

        # plot average test statistic and mark significant sensors
        image, _ = mne.viz.plot_topomap(f_map,
                                        pos,
                                        mask=mask,
                                        axes=ax_topo,
                                        cmap='magma',
                                        vmin=np.min,
                                        vmax=np.max)

        # advanced matplotlib for showing image with figure and colorbar
        # in one plot
        divider = make_axes_locatable(ax_topo)

        # add axes for colorbar
        ax_colorbar = divider.append_axes('right', size='5%', pad=0.05)
        plt.colorbar(image, cax=ax_colorbar)
        ax_topo.set_xlabel('Averaged F-map ({:0.1f} - {:0.1f} ms)'.format(
            *sig_times[[0, -1]]))

        # add new axis for time courses and plot time courses
        ax_signals = divider.append_axes('right', size='300%', pad=1.5)
        for signal, name, col, ls in zip(signals, condition_names, colors,
                                         linestyles):
            ax_signals.plot(times, signal, color=col, linestyle=ls, label=name)

        # add information
        ax_signals.axvline(0, color='k', linestyle=':', label='stimulus onset')
        ax_signals.set_xlim([times[0], times[-1]])
        ax_signals.set_ylim([-10e-7, 20e-7])
        ax_signals.set_xlabel('time [ms]')
        ax_signals.set_ylabel('Amplitude')
        ax_signals.hlines(0, xmin=times[0], xmax=times[-1], linestyles='--')

        # plot significant time range
        ymin, ymax = ax_signals.get_ylim()
        ax_signals.fill_betweenx((ymin, ymax),
                                 sig_times[0],
                                 sig_times[-1],
                                 color='orange',
                                 alpha=0.3)
        ax_signals.legend(loc='lower right')
        ax_signals.set_ylim(ymin, ymax)

        # clean up viz
        mne.viz.tight_layout(fig=fig)
        fig.subplots_adjust(bottom=.05)
        plt.show()
        fig.savefig(op.join(study_path, 'figures',
                            'ERP_off_ckust_{}.eps'.format(i_clu)),
                    format='eps',
                    dpi=300)

    # Cluster Amplitude
    # t_mask = np.arange(len(times))[(times > 300) & (times < 400)]
    # sig_amp = {k: np.array([x[ix_c][ix_s, t_mask, :][:, ch_inds].mean() for ix_s, s in enumerate(subjects)]) for ix_c, k in enumerate(['lon', 'sho'])}
    sig_amp = {
        k: np.array([
            x[ix_c][ix_s, time_inds, :][:, ch_inds].mean()
            for ix_s, s in enumerate(subjects)
        ])
        for ix_c, k in enumerate(['lon', 'sho'])
    }

    subj_cond = all_log.groupby('subject')[['RT', 'Accuracy']].agg(np.mean)
    subj_cond['acc_lon'] = all_log[all_log.condition == 90].groupby(
        'subject')[['Accuracy']].agg(np.mean)
    subj_cond['acc_sho'] = all_log[all_log.condition == 70].groupby(
        'subject')[['Accuracy']].agg(np.mean)
    subj_cond['amp_lon'] = sig_amp['lon']
    subj_cond['amp_sho'] = sig_amp['sho']
    subj_cond['amp_dif'] = subj_cond['amp_sho'] - subj_cond['amp_lon']

    subj_cond.corr(method='pearson')

    from seaborn import regplot
    from eeg_etg_fxs import permutation_pearson

    r_sho, p_sho = permutation_pearson(subj_cond['amp_dif'].values,
                                       subj_cond['acc_sho'].values, 10000)
    r_lon, p_lon = permutation_pearson(subj_cond['amp_dif'].values,
                                       subj_cond['acc_lon'].values, 10000)

    plt.style.use('ggplot')
    fig, axes = plt.subplots(1, 2, sharey=True, sharex=True)
    for ix, (r, p, c) in enumerate(
            zip([r_lon, r_sho], [p_lon, p_sho], ['acc_lon', 'acc_sho'])):
        regplot(subj_cond['amp_dif'], subj_cond[c], ci=None, ax=axes[ix])
        axes[ix].set_title('r = %0.3f   p = %0.3f' % (r, p))
    fig.savefig(op.join(study_path, 'figures', 'ERP_diff_acc.eps'),
                format='eps',
                dpi=300)

    mne.viz.plot_compare_evokeds([evoked[ev] for ev in evoked.keys()], picks=2)
    plt.savefig(op.join(study_path, 'figures', 'ERP_diff_A4.eps'),
                format='eps',
                dpi=300)