def run_evoked(subject_id):
    subject = "sub%03d" % subject_id
    print("processing subject: %s" % subject)

    data_path = op.join(meg_dir, subject)
    epochs = mne.read_epochs(op.join(data_path, '%s-epo.fif' % subject),
                             preload=False)

    evoked_famous = epochs['face/famous'].average()
    evoked_scrambled = epochs['scrambled'].average()
    evoked_unfamiliar = epochs['face/unfamiliar'].average()

    # Simplify comment
    evoked_famous.comment = 'famous'
    evoked_scrambled.comment = 'scrambled'
    evoked_unfamiliar.comment = 'unfamiliar'

    contrast = mne.combine_evoked([evoked_famous, evoked_unfamiliar,
                                   evoked_scrambled], weights=[0.5, 0.5, -1.])
    contrast.comment = 'contrast'
    faces = mne.combine_evoked([evoked_famous, evoked_unfamiliar], 'nave')
    faces.comment = 'faces'

    mne.evoked.write_evokeds(op.join(data_path, '%s-ave.fif' % subject),
                             [evoked_famous, evoked_scrambled,
                              evoked_unfamiliar, contrast, faces])

    # take care of noise cov
    cov = mne.compute_covariance(epochs, tmax=0, method='shrunk')
    cov.save(op.join(data_path, '%s-cov.fif' % subject))
예제 #2
0
def apply_STC_epo(fnepo, event, method='MNE', snr=1.0, min_subject='fsaverage',
                  subjects_dir=None):

    from mne import morph_data
    from mne.minimum_norm import read_inverse_operator, apply_inverse_epochs

    fnlist = get_files_from_list(fnepo)
    # loop across all filenames
    for fname in fnlist:
        fn_path = os.path.split(fname)[0]
        name = os.path.basename(fname)
        subject = name.split('_')[0]
        min_dir = subjects_dir + '/%s' %min_subject
        snr = snr
        lambda2 = 1.0 / snr ** 2
        stcs_path = min_dir + '/stcs/%s/%s/' % (subject,event)
        reset_directory(stcs_path)
        # fn_inv = fname[:fname.rfind('-ave.fif')] + ',ave-inv.fif'
        fn_inv = fn_path + '/%s_epo-inv.fif' %subject

        # noise_cov = mne.read_cov(fn_cov)
        epo = mne.read_epochs(fname)
        epo.pick_types(meg=True, ref_meg=False)
        inv = read_inverse_operator(fn_inv)
        stcs = apply_inverse_epochs(epo, inv, lambda2, method,
                            pick_ori='normal')
        s = 0
        while s < len(stcs):
            stc_morph = morph_data(subject, min_subject, stcs[s])
            stc_morph.save(stcs_path + '/trial%s_fsaverage'
                           % (str(s)), ftype='stc')
            s = s + 1
예제 #3
0
def compute_and_save_psd(epochs_fname, fmin=0, fmax=120,
                         method='welch', n_fft=256, n_overlap=0, 
                         picks=None, proj=False, n_jobs=1, verbose=None):
    """
    Load epochs from file,
    compute psd and save the result in numpy arrays
    """
    import numpy as np
    import os
    from mne import read_epochs
    epochs = read_epochs(epochs_fname)
    epochs_meg = epochs.pick_types(meg=True, eeg=False, eog=False, ecg=False)
    if method == 'welch':
        from mne.time_frequency import psd_welch
        psds, freqs = psd_welch(epochs_meg)
    elif method == 'multitaper':
        from mne.time_frequency import psd_multitaper
        psds, freqs = psd_multitaper(epochs_meg)
    else:
        raise Exception('nonexistent method for psd computation')
    path, name = os.path.split(epochs_fname)
    base, ext = os.path.splitext(name)
    psds_fname = base + '-psds.npz'
    # freqs_fname = base + '-freqs.npy'
    psds_fname = os.path.abspath(psds_fname)
    # print(psds.shape)
    np.savez(psds_fname, psds=psds, freqs=freqs)
    # np.save(freqs_file, freqs)
    return psds_fname
예제 #4
0
def test_access_by_name():
    """Test accessing epochs by event name and on_missing for rare events
    """
    assert_raises(ValueError, Epochs, raw, events, {1: 42, 2: 42}, tmin,
                  tmax, picks=picks)
    assert_raises(ValueError, Epochs, raw, events, {'a': 'spam', 2: 'eggs'},
                  tmin, tmax, picks=picks)
    assert_raises(ValueError, Epochs, raw, events, {'a': 'spam', 2: 'eggs'},
                  tmin, tmax, picks=picks)
    assert_raises(ValueError, Epochs, raw, events, 'foo', tmin, tmax,
                  picks=picks)
    # Test accessing non-existent events (assumes 12345678 does not exist)
    event_id_illegal = dict(aud_l=1, does_not_exist=12345678)
    assert_raises(ValueError, Epochs, raw, events, event_id_illegal,
                  tmin, tmax)
    # Test on_missing
    assert_raises(ValueError, Epochs, raw, events, 1, tmin, tmax,
                  on_missing='foo')
    with warnings.catch_warnings(record=True) as w:
        warnings.simplefilter('always')
        Epochs(raw, events, event_id_illegal, tmin, tmax, on_missing='warning')
        nw = len(w)
        assert_true(1 <= nw <= 2)
        Epochs(raw, events, event_id_illegal, tmin, tmax, on_missing='ignore')
        assert_equal(len(w), nw)
    epochs = Epochs(raw, events, {'a': 1, 'b': 2}, tmin, tmax, picks=picks)
    assert_raises(KeyError, epochs.__getitem__, 'bar')

    data = epochs['a'].get_data()
    event_a = events[events[:, 2] == 1]
    assert_true(len(data) == len(event_a))

    epochs = Epochs(raw, events, {'a': 1, 'b': 2}, tmin, tmax, picks=picks,
                    preload=True)
    assert_raises(KeyError, epochs.__getitem__, 'bar')
    epochs.save(op.join(tempdir, 'test-epo.fif'))
    epochs2 = read_epochs(op.join(tempdir, 'test-epo.fif'))

    for ep in [epochs, epochs2]:
        data = ep['a'].get_data()
        event_a = events[events[:, 2] == 1]
        assert_true(len(data) == len(event_a))

    assert_array_equal(epochs2['a'].events, epochs['a'].events)

    epochs3 = Epochs(raw, events, {'a': 1, 'b': 2, 'c': 3, 'd': 4},
                     tmin, tmax, picks=picks, preload=True)
    assert_equal(list(sorted(epochs3[['a', 'b']].event_id.values())),
                 [1, 2])
    epochs4 = epochs['a']
    epochs5 = epochs3['a']
    assert_array_equal(epochs4.events, epochs5.events)
    # 20 is our tolerance because epochs are written out as floats
    assert_array_almost_equal(epochs4.get_data(), epochs5.get_data(), 20)
    epochs6 = epochs3[['a', 'b']]
    assert_true(all(np.logical_or(epochs6.events[:, 2] == 1,
                                  epochs6.events[:, 2] == 2)))
    assert_array_equal(epochs.events, epochs6.events)
    assert_array_almost_equal(epochs.get_data(), epochs6.get_data(), 20)
def calc_inverse_operator(events_id, epochs_fn, fwd_sub_fn, inv_fn, min_crop_t=None, max_crop_t=0):
    for cond in events_id.keys():
        epochs = mne.read_epochs(epochs_fn.format(cond=cond))
        noise_cov = mne.compute_covariance(epochs.crop(min_crop_t, max_crop_t, copy=True))
        forward_sub = mne.read_forward_solution(fwd_sub_fn.format(cond=cond))
        inverse_operator_sub = make_inverse_operator(epochs.info, forward_sub, noise_cov,
            loose=None, depth=None)
        write_inverse_operator(inv_fn.format(cond=cond), inverse_operator_sub)
예제 #6
0
def _calc_epoches(params):
    subject, events_id, tmin, tmax = params
    out_file = op.join(LOCAL_ROOT_DIR, 'epo', '{}_ecr_nTSSS_conflict-epo.fif'.format(subject))
    if not op.isfile(out_file):
        events = mne.read_events(op.join(REMOTE_ROOT_DIR, 'events', '{}_ecr_nTSSS_conflict-eve.fif'.format(subject)))
        raw = mne.io.Raw(op.join(REMOTE_ROOT_DIR, 'raw', '{}_ecr_nTSSS_raw.fif'.format(subject)), preload=False)
        picks = mne.pick_types(raw.info, meg=True)
        epochs = find_epoches(raw, picks, events, events_id, tmin=tmin, tmax=tmax)
        epochs.save(out_file)
    else:
        epochs = mne.read_epochs(out_file)
    return epochs
예제 #7
0
def _call_base_epochs_public_api(epochs, tmpdir):
    """Call all public API methods of an (non-empty) epochs object."""
    # make sure saving and loading returns the same data
    orig_data = epochs.get_data()
    export_file = tmpdir.join('test_rt-epo.fif')
    epochs.save(str(export_file))
    loaded_epochs = read_epochs(str(export_file))
    loaded_data = loaded_epochs.get_data()
    assert orig_data.shape == loaded_data.shape
    assert_allclose(loaded_data, orig_data)

    # decimation
    epochs_copy = epochs.copy()
    epochs_copy.decimate(1)
    assert epochs_copy.get_data().shape == orig_data.shape
    epochs_copy.info['lowpass'] = 10  # avoid warning
    epochs_copy.decimate(10)
    assert np.abs(10.0 - orig_data.shape[2] /
                  epochs_copy.get_data().shape[2]) <= 1

    # check that methods that require preloaded data fail
    with pytest.raises(RuntimeError):
        epochs.crop(tmin=epochs.tmin,
                    tmax=(epochs.tmin + (epochs.tmax - epochs.tmin) / 2))
    with pytest.raises(RuntimeError):
        epochs.drop_channels(epochs.ch_names[0:1])
    with pytest.raises(RuntimeError):
        epochs.resample(epochs.info['sfreq'] / 10)

    # smoke test
    epochs.standard_error()
    avg_evoked = epochs.average()
    epochs.subtract_evoked(avg_evoked)
    epochs.metadata
    epochs.events
    epochs.ch_names
    epochs.tmin
    epochs.tmax
    epochs.filename
    repr(epochs)
    epochs.plot(show=False)
    # save time by not calling all plot functions
    # epochs.plot_psd(show=False)
    # epochs.plot_drop_log(show=False)
    # epochs.plot_topo_image()
    # epochs.plot_psd_topomap()
    # epochs.plot_image()
    epochs.drop_bad()
    epochs_copy.apply_baseline()
    # do not call since we don't want to make assumptions about events
    # epochs_copy.equalize_event_counts(epochs.event_id.keys())
    epochs_copy.drop([0])
예제 #8
0
def test_array_epochs():
    """Test creating epochs from array
    """

    # creating
    rng = np.random.RandomState(42)
    data = rng.random_sample((10, 20, 300))
    sfreq = 1e3
    ch_names = ["EEG %03d" % (i + 1) for i in range(20)]
    types = ["eeg"] * 20
    info = create_info(ch_names, sfreq, types)
    events = np.c_[np.arange(1, 600, 60), np.zeros(10), [1, 2] * 5]
    event_id = {"a": 1, "b": 2}
    epochs = EpochsArray(data, info, events=events, event_id=event_id, tmin=-0.2)

    # saving
    temp_fname = op.join(tempdir, "test-epo.fif")
    epochs.save(temp_fname)
    epochs2 = read_epochs(temp_fname)
    data2 = epochs2.get_data()
    assert_allclose(data, data2)
    assert_allclose(epochs.times, epochs2.times)
    assert_equal(epochs.event_id, epochs2.event_id)
    assert_array_equal(epochs.events, epochs2.events)

    # plotting
    import matplotlib

    matplotlib.use("Agg")  # for testing don't use X server
    epochs[0].plot()

    # indexing
    assert_array_equal(np.unique(epochs["a"].events[:, 2]), np.array([1]))
    assert_equal(len(epochs[:2]), 2)
    data[0, 5, 150] = 3000
    data[1, :, :] = 0
    data[2, 5, 210] = 3000
    data[3, 5, 260] = 0
    epochs = EpochsArray(
        data,
        info,
        events=events,
        event_id=event_id,
        tmin=0,
        reject=dict(eeg=1000),
        flat=dict(eeg=1e-1),
        reject_tmin=0.1,
        reject_tmax=0.2,
    )
    assert_equal(len(epochs), len(events) - 2)
    assert_equal(epochs.drop_log[0], ["EEG 006"])
    assert_equal(len(events), len(epochs.selection))
def evok_epochs(sub_id, session):
    """ load a epoched file and average it and save the evoked file
    """
    fname = "sub_%d_%s" % (sub_id, session)
    f_load = fname + "_tsss_mc_epochs.fif"
    f_save = fname + "_tsss_mc_evk.fif"
    epochs = mne.read_epochs(f_load)

    evoked = epochs.average()
    evoked.comment = session
    evoked = mne.fiff.pick_types_evoked(evoked, meg='grad', exclude='bads')
    evoked.resample(sfreq=250)
    evoked.save(f_save)
예제 #10
0
def calc_evoked(indices, epochs_fname, overwrite_epochs=False, overwrite_evoked=False):
    epochs = mne.read_epochs(epochs_fname, preload=False)
    print(epochs.events.shape)
    for event_name, event_indices in indices.items():
        evoked_event_fname = meg.get_cond_fname(meg.EVO, event_name)
        epochs_event_fname = meg.get_cond_fname(meg.EPO, event_name)
        if not op.isfile(epochs_event_fname) or overwrite_epochs:
            print('Saving {} epochs to {}, events num: {}'.format(event_name, epochs_event_fname, len(event_indices)))
            event_epochs = epochs[event_indices]
            event_epochs.save(epochs_event_fname)
        if not op.isfile(evoked_event_fname) or overwrite_evoked:
            print('Saving {} evoked to {}'.format(event_name, evoked_event_fname))
            mne.write_evokeds(evoked_event_fname, event_epochs.average())
예제 #11
0
def load(subject, event_type):
    with open(op.join(path_data, '%s/behavior_%s.pkl') % (subject, event_type), 'rb') as f:
        events = pickle.load(f)
        # add explicit conditions
        events = complete_behavior(events)
    epochs = mne.read_epochs(op.join(path_data, '%s/epochs_%s.fif') % (subject, event_type))
    if event_type == 'Target':
        epochs.crop(0, .600)
    elif event_type == 'Cue':
        epochs.crop(0, .900)
    elif event_type == 'Probe':
        epochs.crop(0, .600)
    return epochs, events
def load(subject, event_type):
    fname = op.join(path_data, '%s/behavior_%s.hdf5') % (subject, event_type)
    events = read_hdf5(fname)
    # add explicit conditions
    events = complete_behavior(events)
    fname = op.join(path_data, '%s/epochs_%s.fif') % (subject, event_type)
    epochs = mne.read_epochs(fname)
    if event_type == 'Target':
        epochs.crop(0, .600)
    elif event_type == 'Cue':
        epochs.crop(0, .900)
    elif event_type == 'Probe':
        epochs.crop(0, .600)
    return epochs, events
예제 #13
0
def test_access_by_name():
    """Test accessing epochs by event name and on_missing for rare events
    """
    assert_raises(ValueError, Epochs, raw, events, {1: 42, 2: 42}, tmin, tmax, picks=picks)
    assert_raises(ValueError, Epochs, raw, events, {"a": "spam", 2: "eggs"}, tmin, tmax, picks=picks)
    assert_raises(ValueError, Epochs, raw, events, {"a": "spam", 2: "eggs"}, tmin, tmax, picks=picks)
    assert_raises(ValueError, Epochs, raw, events, "foo", tmin, tmax, picks=picks)
    # Test accessing non-existent events (assumes 12345678 does not exist)
    event_id_illegal = dict(aud_l=1, does_not_exist=12345678)
    assert_raises(ValueError, Epochs, raw, events, event_id_illegal, tmin, tmax)
    # Test on_missing
    assert_raises(ValueError, Epochs, raw, events, 1, tmin, tmax, on_missing="foo")
    with warnings.catch_warnings(record=True) as w:
        warnings.simplefilter("always")
        Epochs(raw, events, event_id_illegal, tmin, tmax, on_missing="warning")
        nw = len(w)
        assert_true(1 <= nw <= 2)
        Epochs(raw, events, event_id_illegal, tmin, tmax, on_missing="ignore")
        assert_equal(len(w), nw)
    epochs = Epochs(raw, events, {"a": 1, "b": 2}, tmin, tmax, picks=picks)
    assert_raises(KeyError, epochs.__getitem__, "bar")

    data = epochs["a"].get_data()
    event_a = events[events[:, 2] == 1]
    assert_true(len(data) == len(event_a))

    epochs = Epochs(raw, events, {"a": 1, "b": 2}, tmin, tmax, picks=picks, preload=True)
    assert_raises(KeyError, epochs.__getitem__, "bar")
    epochs.save(op.join(tempdir, "test-epo.fif"))
    epochs2 = read_epochs(op.join(tempdir, "test-epo.fif"))

    for ep in [epochs, epochs2]:
        data = ep["a"].get_data()
        event_a = events[events[:, 2] == 1]
        assert_true(len(data) == len(event_a))

    assert_array_equal(epochs2["a"].events, epochs["a"].events)

    epochs3 = Epochs(raw, events, {"a": 1, "b": 2, "c": 3, "d": 4}, tmin, tmax, picks=picks, preload=True)
    assert_equal(list(sorted(epochs3[["a", "b"]].event_id.values())), [1, 2])
    epochs4 = epochs["a"]
    epochs5 = epochs3["a"]
    assert_array_equal(epochs4.events, epochs5.events)
    # 20 is our tolerance because epochs are written out as floats
    assert_array_almost_equal(epochs4.get_data(), epochs5.get_data(), 20)
    epochs6 = epochs3[["a", "b"]]
    assert_true(all(np.logical_or(epochs6.events[:, 2] == 1, epochs6.events[:, 2] == 2)))
    assert_array_equal(epochs.events, epochs6.events)
    assert_array_almost_equal(epochs.get_data(), epochs6.get_data(), 20)
예제 #14
0
def test_read_epochs():
    event_id = 1
    tmin = -0.2
    tmax = 0.5

    #   Setup for reading the raw data
    raw = fiff.setup_read_raw(raw_fname)
    events = mne.read_events(event_name)

    # Set up pick list: MEG + STI 014 - bad channels (modify to your needs)
    include = ["STI 014"]
    want_meg = True
    want_eeg = False
    want_stim = False
    picks = fiff.pick_types(raw["info"], want_meg, want_eeg, want_stim, include, raw["info"]["bads"])

    data, times, channel_names = mne.read_epochs(raw, events, event_id, tmin, tmax, picks=picks, baseline=(None, 0))
예제 #15
0
def test_access_by_name():
    """Test accessing epochs by event name
    """
    assert_raises(ValueError, Epochs, raw, events, {1: 42, 2: 42}, tmin,
                  tmax, picks=picks)
    assert_raises(ValueError, Epochs, raw, events, {'a': 'spam', 2: 'eggs'},
                  tmin, tmax, picks=picks)
    assert_raises(ValueError, Epochs, raw, events, {'a': 'spam', 2: 'eggs'},
                  tmin, tmax, picks=picks)
    assert_raises(ValueError, Epochs, raw, events, 'foo', tmin, tmax,
                  picks=picks)
    epochs = Epochs(raw, events, {'a': 1, 'b': 2}, tmin, tmax, picks=picks)
    assert_raises(KeyError, epochs.__getitem__, 'bar')

    data = epochs['a'].get_data()
    event_a = events[events[:, 2] == 1]
    assert_true(len(data) == len(event_a))

    epochs = Epochs(raw, events, {'a': 1, 'b': 2}, tmin, tmax, picks=picks,
                    preload=True)
    assert_raises(KeyError, epochs.__getitem__, 'bar')
    epochs.save(op.join(tempdir, 'test-epo.fif'))
    epochs2 = read_epochs(op.join(tempdir, 'test-epo.fif'))

    for ep in [epochs, epochs2]:
        data = ep['a'].get_data()
        event_a = events[events[:, 2] == 1]
        assert_true(len(data) == len(event_a))

    assert_array_equal(epochs2['a'].events, epochs['a'].events)

    epochs3 = Epochs(raw, events, {'a': 1, 'b': 2, 'c': 3, 'd': 4},
                     tmin, tmax, picks=picks, preload=True)
    assert_equal(list(sorted(epochs3[['a', 'b']].event_id.values())),
                 [1, 2])
    epochs4 = epochs['a']
    epochs5 = epochs3['a']
    assert_array_equal(epochs4.events, epochs5.events)
    # 20 is our tolerance because epochs are written out as floats
    assert_array_almost_equal(epochs4.get_data(), epochs5.get_data(), 20)
    epochs6 = epochs3[['a', 'b']]
    assert_true(all(np.logical_or(epochs6.events[:, 2] == 1,
                                  epochs6.events[:, 2] == 2)))
    assert_array_equal(epochs.events, epochs6.events)
    assert_array_almost_equal(epochs.get_data(), epochs6.get_data(), 20)
예제 #16
0
def ep2ts(fif_file):
    """Read fif file with raw data or epochs and save
    timeseries to .npy
    """
    from mne import read_epochs
    from mne import pick_types

    from numpy import save
    import os.path as op

    with nostdout():
        epochs = read_epochs(fif_file)

    epochs_meg = epochs.pick_types(meg=True, eeg=False, eog=False, ecg=False)

    data = epochs_meg.get_data()
    save_path = op.abspath('ts_epochs.npy')
    save(save_path, data)
    return save_path
def inverse_function(sub_id, session):
    """ Will calculate the inverse model based dSPM
    """
    data_path = "/media/mje/My_Book/Data/MEG/MEG_libet/sub_2_tests"
    fname = "sub_%d_%s_tsss_mc" % (sub_id, session)
    fname_epochs = data_path + fname + "_epochs.fif"
    fname_fwd_meg = data_path + fname + "_fwd.fif"
    fname_cov = data_path + fname + "_cov.fif"
    fname_inv = data_path + fname + "_inv.fif"
    fname_stcs = fname + "_mne_dSPM_inverse"

    epochs = mne.read_epochs(fname_epochs)
    evoked = epochs.average()

    snr = 3.0
    lambda2 = 1.0 / snr ** 2

    # Load data
    forward_meg = mne.read_forward_solution(fname_fwd_meg, surf_ori=True)
    noise_cov = mne.read_cov(fname_cov)

    # regularize noise covariance
    noise_cov = mne.cov.regularize(noise_cov, evoked.info,
                                   mag=0.05, grad=0.05, eeg=0.1, proj=True)

    # Restrict forward solution as necessary for MEG
    forward_meg = mne.fiff.pick_types_forward(forward_meg, meg=True, eeg=False)

    # make an M/EEG, MEG-only, and EEG-only inverse operators
    info = evoked.info
    inverse_operator_meg = make_inverse_operator(info, forward_meg, noise_cov,
                                                 loose=0.2, depth=0.8)

    write_inverse_operator(fname_inv, inverse_operator_meg)

    # Compute inverse solution
    stc = apply_inverse(evoked, inverse_operator_meg, lambda2, "dSPM",
                        pick_normal=False)

    # Save result in stc files
    stc.save(fname_stcs)
def load(typ, subject='fsaverage', analysis='analysis', block=999,
         download=True, preload=False):
    """Auxiliary saving function."""
    # get file name
    fname = paths(typ, subject=subject, analysis=analysis, block=block)

    # check if file exists
    if not op.exists(fname) and download:
        client.download(fname)

    # different data format depending file type
    if typ == 'behavior':
        from base import read_events
        out = read_events(fname)
    elif typ == 'sss':
        out = Raw(fname, preload=preload)
    elif typ in ['epo_block', 'epochs', 'epochs_decim', 'epochs_vhp']:
        out = read_epochs(fname, preload=preload)
    elif typ in ['cov']:
        from mne.cov import read_cov
        out = read_cov(fname)
    elif typ in ['fwd']:
        from mne import read_forward_solution
        out = read_forward_solution(fname, surf_ori=True)
    elif typ in ['inv']:
        from mne.minimum_norm import read_inverse_operator
        out = read_inverse_operator(fname)
    elif typ in ['evoked', 'decod', 'decod_tfr', 'score', 'score_tfr',
                 'evoked_source']:
        with open(fname, 'rb') as f:
            out = pickle.load(f)
    elif typ == 'morph':
        from scipy.sparse import csr_matrix
        loader = np.load(fname)
        out = csr_matrix((loader['data'], loader['indices'], loader['indptr']),
                         shape=loader['shape'])
    elif typ in ['score_source', 'score_pval']:
        out = np.load(fname)
    else:
        raise NotImplementedError()
    return out
예제 #19
0
def _calc_inverse(params):
    subject, epochs, overwrite = params
    epo = op.join(REMOTE_ROOT_DIR, 'ave', '{}_ecr_nTSSS_conflict-epo.fif'.format(subject))
    fwd = op.join(REMOTE_ROOT_DIR, 'fwd', '{}_ecr-fwd.fif'.format(subject))
    local_inv_file_name = op.join(LOCAL_ROOT_DIR, 'inv', '{}_ecr_nTSSS_conflict-inv.fif'.format(subject))

    if os.path.isfile(local_inv_file_name) and not overwrite:
        inverse_operator = read_inverse_operator(local_inv_file_name)
        print('inv already calculated for {}'.format(subject))
    else:
        if epochs is None:
            epochs = mne.read_epochs(epo)
        noise_cov = mne.compute_covariance(epochs.crop(None, 0, copy=True))
        inverse_operator = None
        if not os.path.isfile(fwd):
            print('no fwd for {}'.format(subject))
        else:
            forward = mne.read_forward_solution(fwd)
            inverse_operator = make_inverse_operator(epochs.info, forward, noise_cov,
                loose=None, depth=None)
            write_inverse_operator(local_inv_file_name, inverse_operator)
    return inverse_operator
def run_time_frequency(subject_id):
    print("processing subject: %s" % subject_id)
    subject = "sub%03d" % subject_id
    data_path = op.join(meg_dir, subject)
    epochs = mne.read_epochs(op.join(data_path, '%s-epo.fif' % subject))

    faces = epochs['face']
    idx = [faces.ch_names.index('EEG070')]
    power_faces, itc_faces = mne.time_frequency.tfr_morlet(
        faces, freqs=freqs, return_itc=True, n_cycles=n_cycles, picks=idx)
    power_scrambled, itc_scrambled = mne.time_frequency.tfr_morlet(
        epochs['scrambled'], freqs=freqs, return_itc=True, n_cycles=n_cycles,
        picks=idx)

    power_faces.save(op.join(data_path, '%s-faces-tfr.h5' % subject),
                     overwrite=True)
    itc_faces.save(op.join(data_path, '%s-itc_faces-tfr.h5' % subject),
                   overwrite=True)

    power_scrambled.save(op.join(data_path, '%s-scrambled-tfr.h5' % subject),
                         overwrite=True)
    itc_scrambled.save(op.join(data_path, '%s-itc_scrambled-tfr.h5' % subject),
                       overwrite=True)
예제 #21
0
def apply_inverse_oper(fnepo, tmin=-0.2, tmax=0.8, subjects_dir=None):
    '''
    Apply inverse operator
    Parameter
    ---------
    fnepo: string or list
        The epochs file with ECG, EOG and environmental noise free.
    tmax, tmax:float
        The time period (second) of each epoch.
    '''
    # Get the default subjects_dir
    from mne import make_forward_solution
    from mne.minimum_norm import make_inverse_operator, write_inverse_operator

    fnlist = get_files_from_list(fnepo)
    # loop across all filenames
    for fname in fnlist:
        fn_path = os.path.split(fname)[0]
        name = os.path.basename(fname)
        subject = name.split('_')[0]
        subject_path = subjects_dir + '/%s' % subject
        fn_trans = fn_path + '/%s-trans.fif' % subject
        fn_cov = fn_path + '/%s_empty-cov.fif' % subject
        fn_src = subject_path + '/bem/%s-oct-6-src.fif' % subject
        fn_bem = subject_path + '/bem/%s-5120-5120-5120-bem-sol.fif' % subject
        fn_inv = fn_path + '/%s_epo-inv.fif' % subject

        epochs = mne.read_epochs(fname)
        epochs.crop(tmin, tmax)
        epochs.pick_types(meg=True, ref_meg=False)
        noise_cov = mne.read_cov(fn_cov)
        fwd = make_forward_solution(epochs.info, fn_trans, fn_src, fn_bem)
        fwd['surf_ori'] = True
        inv = make_inverse_operator(epochs.info, fwd, noise_cov, loose=0.2,
                                    depth=0.8, limit_depth_chs=False)
        write_inverse_operator(fn_inv, inv)
def global_RMS(sub, session, baseline=500, selection="Vertex"):
    """ make global RMS
        baseline is in indexes
    """

    f_load = "sub_%d_%s_tsss_mc_epochs.fif" % (sub, session)
    epochs = mne.read_epochs(f_load)

    if selection is not None:
        selection = mne.viz._clean_names(mne.read_selection(selection))
        data_picks = mne.epochs.pick_types(epochs.info, meg='grad',
                                           exclude='bads', selection=None)
    else:
        data_picks = mne.epochs.pick_types(epochs.info, meg='grad',
                                           exclude='bads')

    data = epochs.get_data()[:, data_picks, :]
    data = np.sqrt(np.square(data.mean(axis=0)))
    data = data.mean(axis=0)
    baseline_std = data[:baseline].std().mean()

    grms = data/baseline_std

    return grms
def run_time_decoding(subject_id, condition1, condition2):
    subject = "sub%03d" % subject_id
    data_path = os.path.join(meg_dir, subject)
    epochs = mne.read_epochs(os.path.join(data_path, '%s-epo.fif' % subject))

    # We define the epochs and the labels
    n_cond1 = len(epochs[condition1])
    n_cond2 = len(epochs[condition2])
    y = np.r_[np.ones((n_cond1, )), np.zeros((n_cond2, ))]
    epochs = mne.concatenate_epochs([epochs[condition1],
                                    epochs[condition2]])
    epochs.apply_baseline()

    # Let us restrict ourselves to the occipital channels
    from mne.selection import read_selection
    ch_names = [ch_name.replace(' ', '') for ch_name
                in read_selection('occipital')]
    epochs.pick_types(meg='mag').pick_channels(ch_names)

    # Now we fit and plot the time decoder
    from mne.decoding import TimeDecoding

    times = dict(step=0.005)  # fit a classifier only every 5 ms
    # Use AUC because chance level is same regardless of the class balance
    td = TimeDecoding(predict_mode='cross-validation',
                      times=times, scorer='roc_auc')
    td.fit(epochs, y)

    # let's save the scores now
    a_vs_b = '%s_vs_%s' % (os.path.basename(condition1),
                           os.path.basename(condition2))
    fname_td = os.path.join(data_path, '%s-td-auc-%s.mat'
                            % (subject, a_vs_b))
    from scipy.io import savemat
    savemat(fname_td, {'scores': td.score(epochs),
                       'times': td.times_['times']})
예제 #24
0
def test_array_epochs():
    """Test creating epochs from array
    """

    # creating
    rng = np.random.RandomState(42)
    data = rng.random_sample((10, 20, 50))
    sfreq = 1e3
    ch_names = ['EEG %03d' % (i + 1) for i in range(20)]
    types = ['eeg'] * 20
    info = create_info(ch_names, sfreq, types)
    events = np.c_[np.arange(1, 600, 60),
                   np.zeros(10),
                   [1, 2] * 5]
    event_id = {'a': 1, 'b': 2}
    epochs = EpochsArray(data, info, events=events, event_id=event_id,
                         tmin=-.2)

    # saving
    temp_fname = op.join(tempdir, 'epo.fif')
    epochs.save(temp_fname)
    epochs2 = read_epochs(temp_fname)
    data2 = epochs2.get_data()
    assert_allclose(data, data2)
    assert_allclose(epochs.times, epochs2.times)
    assert_equal(epochs.event_id, epochs2.event_id)
    assert_array_equal(epochs.events, epochs2.events)

    # plotting
    import matplotlib
    matplotlib.use('Agg')  # for testing don't use X server
    epochs[0].plot()

    # indexing
    assert_array_equal(np.unique(epochs['a'].events[:, 2]), np.array([1]))
    assert_equal(len(epochs[:2]), 2)
예제 #25
0
def test_access_by_name():
    """Test accessing epochs by event name
    """
    assert_raises(ValueError, Epochs, raw, events, {1: 42, 2: 42}, tmin, tmax, picks=picks)
    assert_raises(ValueError, Epochs, raw, events, {"a": "spam", 2: "eggs"}, tmin, tmax, picks=picks)
    assert_raises(ValueError, Epochs, raw, events, {"a": "spam", 2: "eggs"}, tmin, tmax, picks=picks)
    assert_raises(ValueError, Epochs, raw, events, "foo", tmin, tmax, picks=picks)
    epochs = Epochs(raw, events, {"a": 1, "b": 2}, tmin, tmax, picks=picks)
    assert_raises(KeyError, epochs.__getitem__, "bar")

    data = epochs["a"].get_data()
    event_a = events[events[:, 2] == 1]
    assert_true(len(data) == len(event_a))

    epochs = Epochs(raw, events, {"a": 1, "b": 2}, tmin, tmax, picks=picks, preload=True)
    assert_raises(KeyError, epochs.__getitem__, "bar")
    epochs.save(op.join(tempdir, "test-epo.fif"))
    epochs2 = read_epochs(op.join(tempdir, "test-epo.fif"))

    for ep in [epochs, epochs2]:
        data = ep["a"].get_data()
        event_a = events[events[:, 2] == 1]
        assert_true(len(data) == len(event_a))

    assert_array_equal(epochs2["a"].events, epochs["a"].events)

    epochs3 = Epochs(raw, events, {"a": 1, "b": 2, "c": 3, "d": 4}, tmin, tmax, picks=picks, preload=True)
    epochs4 = epochs["a"]
    epochs5 = epochs3["a"]
    assert_array_equal(epochs4.events, epochs5.events)
    # 20 is our tolerance because epochs are written out as floats
    assert_array_almost_equal(epochs4.get_data(), epochs5.get_data(), 20)
    epochs6 = epochs3[["a", "b"]]
    assert_true(all(np.logical_or(epochs6.events[:, 2] == 1, epochs6.events[:, 2] == 2)))
    assert_array_equal(epochs.events, epochs6.events)
    assert_array_almost_equal(epochs.get_data(), epochs6.get_data(), 20)
def apply_ica(subject, run, session):
    print("Processing subject: %s" % subject)
    # Construct the search path for the data file. `sub` is mandatory
    subject_path = op.join('sub-{}'.format(subject))
    # `session` is optional
    if session is not None:
        subject_path = op.join(subject_path, 'ses-{}'.format(session))

    subject_path = op.join(subject_path, config.kind)

    bids_basename = make_bids_basename(subject=subject,
                                       session=session,
                                       task=config.task,
                                       acquisition=config.acq,
                                       run=None,
                                       processing=config.proc,
                                       recording=config.rec,
                                       space=config.space
                                       )

    fpath_deriv = op.join(config.bids_root, 'derivatives',
                          config.PIPELINE_NAME, subject_path)
    fname_in = \
        op.join(fpath_deriv, bids_basename + '-epo.fif')

    fname_out = \
        op.join(fpath_deriv, bids_basename + '_cleaned-epo.fif')

    # load epochs to reject ICA components
    epochs = mne.read_epochs(fname_in, preload=True)

    print("Input: ", fname_in)
    print("Output: ", fname_out)

    # load first run of raw data for ecg /eog epochs
    print("  Loading one run from raw data")

    bids_basename = make_bids_basename(subject=subject,
                                       session=session,
                                       task=config.task,
                                       acquisition=config.acq,
                                       run=config.runs[0],
                                       processing=config.proc,
                                       recording=config.rec,
                                       space=config.space
                                       )

    if config.use_maxwell_filter:
        raw_fname_in = \
            op.join(fpath_deriv, bids_basename + '_sss_raw.fif')
    else:
        raw_fname_in = \
            op.join(fpath_deriv, bids_basename + '_filt_raw.fif')

    raw = mne.io.read_raw_fif(raw_fname_in, preload=True)

    # run ICA on MEG and EEG
    picks_meg = mne.pick_types(raw.info, meg=True, eeg=False,
                               eog=False, stim=False, exclude='bads')
    picks_eeg = mne.pick_types(raw.info, meg=False, eeg=True,
                               eog=False, stim=False, exclude='bads')
    all_picks = {'meg': picks_meg, 'eeg': picks_eeg}

    for ch_type in config.ch_types:
        report = None
        print(ch_type)
        picks = all_picks[ch_type]

        # Load ICA
        fname_ica = \
            op.join(fpath_deriv, bids_basename + '_%s-ica.fif' % ch_type)

        print('Reading ICA: ' + fname_ica)
        ica = read_ica(fname=fname_ica)

        pick_ecg = mne.pick_types(raw.info, meg=False, eeg=False,
                                  ecg=True, eog=False)

        # ECG
        # either needs an ecg channel, or avg of the mags (i.e. MEG data)
        ecg_inds = list()
        if pick_ecg or ch_type == 'meg':

            picks_ecg = np.concatenate([picks, pick_ecg])

            # Create ecg epochs
            if ch_type == 'meg':
                reject = {'mag': config.reject['mag'],
                          'grad': config.reject['grad']}
            elif ch_type == 'eeg':
                reject = {'eeg': config.reject['eeg']}

            ecg_epochs = create_ecg_epochs(raw, picks=picks_ecg, reject=reject,
                                           baseline=(None, 0), tmin=-0.5,
                                           tmax=0.5)

            ecg_average = ecg_epochs.average()

            ecg_inds, scores = \
                ica.find_bads_ecg(ecg_epochs, method='ctps',
                                  threshold=config.ica_ctps_ecg_threshold)
            del ecg_epochs

            report_fname = \
                op.join(fpath_deriv,
                        bids_basename + '_%s-reject_ica.html' % ch_type)

            report = Report(report_fname, verbose=False)

            # Plot r score
            report.add_figs_to_section(ica.plot_scores(scores,
                                                       exclude=ecg_inds,
                                                       show=config.plot),
                                       captions=ch_type.upper() + ' - ECG - ' +
                                       'R scores')

            # Plot source time course
            report.add_figs_to_section(ica.plot_sources(ecg_average,
                                                        exclude=ecg_inds,
                                                        show=config.plot),
                                       captions=ch_type.upper() + ' - ECG - ' +
                                       'Sources time course')

            # Plot source time course
            report.add_figs_to_section(ica.plot_overlay(ecg_average,
                                                        exclude=ecg_inds,
                                                        show=config.plot),
                                       captions=ch_type.upper() + ' - ECG - ' +
                                       'Corrections')

        else:
            # XXX : to check when EEG only is processed
            print('no ECG channel is present. Cannot automate ICAs component '
                  'detection for ECG!')

        # EOG
        pick_eog = mne.pick_types(raw.info, meg=False, eeg=False,
                                  ecg=False, eog=True)
        eog_inds = list()
        if pick_eog.any():
            print('using EOG channel')
            picks_eog = np.concatenate([picks, pick_eog])
            # Create eog epochs
            eog_epochs = create_eog_epochs(raw, picks=picks_eog, reject=None,
                                           baseline=(None, 0), tmin=-0.5,
                                           tmax=0.5)

            eog_average = eog_epochs.average()
            eog_inds, scores = ica.find_bads_eog(eog_epochs, threshold=config.ica_ctps_eog_threshold)
            del eog_epochs

            params = dict(exclude=eog_inds, show=config.plot)

            # Plot r score
            report.add_figs_to_section(ica.plot_scores(scores, **params),
                                       captions=ch_type.upper() + ' - EOG - ' +
                                       'R scores')

            # Plot source time course
            report.add_figs_to_section(ica.plot_sources(eog_average, **params),
                                       captions=ch_type.upper() + ' - EOG - ' +
                                       'Sources time course')

            # Plot source time course
            report.add_figs_to_section(ica.plot_overlay(eog_average, **params),
                                       captions=ch_type.upper() + ' - EOG - ' +
                                       'Corrections')

            report.save(report_fname, overwrite=True, open_browser=False)

        else:
            print('no EOG channel is present. Cannot automate ICAs component '
                  'detection for EOG!')

        ica_reject = (list(ecg_inds) + list(eog_inds) +
                      list(config.rejcomps_man[subject][ch_type]))

        # now reject the components
        print('Rejecting from %s: %s' % (ch_type, ica_reject))
        epochs = ica.apply(epochs, exclude=ica_reject)

        print('Saving cleaned epochs')
        epochs.save(fname_out)

        if report is not None:
            fig = ica.plot_overlay(raw, exclude=ica_reject, show=config.plot)
            report.add_figs_to_section(fig, captions=ch_type.upper() +
                                       ' - ALL(epochs) - Corrections')

        if config.plot:
            epochs.plot_image(combine='gfp', group_by='type', sigma=2.,
                              cmap="YlGnBu_r", show=config.plot)
예제 #27
0
# 999 Heartbeats
# 998 Blinks

# Randomize subjects - this will help when submitting multiple jobs in Beluga

# Select channel type to create the topographies on
ch_type = 'eeg'

n_skipped_subjects = 0
iSubject = 1
for subject in subjects:

    frontalEOGChannel = 'E22'
    # Load preprocessed mne data from Guillaume
    fname = os.path.join(data_path, subject, 'RestingState_Blinks_epo.fif')
    epochs = mne.read_epochs(fname, proj=True, preload=True, verbose=None)

    if len(
            epochs
    ) < 50:  # Only take into account the cases when the algorithm "PROBABLY" detects blinks correctly
        subject = subject.lower() + "01"  # BIDS compliance

        #eo.plot_image(picks=[frontalEOGChannel])

        ## Do some preprocessing on the epochs - jitter the center so the model doesn't learn the position
        epochs_preprocessed = epochs.crop(tmin=-0.4 + np.random.random() * 0.1,
                                          tmax=0.40 + np.random.random() * 0.1,
                                          include_tmax=True)

        # Check if resampling is needed
        epochs_preprocessed = epochs_preprocessed.resample(100)
예제 #28
0
import mne

base_dir = "C:/Users/muellena/Desktop/Desktop/Experiment Somatosensorik/SOMA Daten/"
proc_dir = base_dir + "proc/"

subjs = ["SOM_20"]
#runs = ["1","2","3"]
runs = ["2"]

for sub in subjs:
    for run in runs:
        mepo = mne.read_epochs(proc_dir + sub + "_" + run + "_m-epo.fif")
        ica = mne.preprocessing.ICA(n_components=0.95,
                                    max_iter=500,
                                    method="picard")
        ica.fit(mepo)
        ica.save(proc_dir + sub + "_" + run + "_prepro-ica.fif")
예제 #29
0
                                                                 sub=sub,
                                                                 sp=spacing))
    freq = freq_table[sub]
    freqs = [freq-1, freq, freq+1]
    print(freqs)
    all_bads = []
    epos = []
    epo_names = []
    epo_conds = []
    epo_cond_names = []
    for run_idx,run in enumerate(runs):
        wav_epos = []
        for wav_idx, wav_name in enumerate(wavs):
            epo_name = "{dir}nc_{sub}_{run}_{wav}_hand-epo.fif".format(
              dir=proc_dir, sub=sub, run=run, wav=wav_name)
            epo = mne.read_epochs(epo_name)
            all_bads += epo.info["bads"]
            epos.append(epo)
            wav_epos.append(epo)
            epo_names.append("{}_{}".format(run,wav_name))
        epo_conds.append(mne.concatenate_epochs(wav_epos))
        epo_cond_names.append(run)

    for x in epos:
        x.info["bads"] = all_bads
        x.info["dev_head_t"] = epos[0].info["dev_head_t"]
    epo = mne.concatenate_epochs(epos)
    csd = csd_morlet(epo, frequencies=freqs, n_jobs=n_jobs, n_cycles=cycles, decim=3)
    csd = csd.mean()
    fwd_name = "{dir}nc_{sub}_{sp}-fwd.fif".format(dir=proc_dir, sub=sub, sp=spacing)
    fwd = mne.read_forward_solution(fwd_name)
예제 #30
0
ch = range(1, 307)  # Channels of interest
mags = range(2, 306, 3)
grads = range(0, 306, 3) + range(1, 306, 3)

for subj in subjlist:
    fpath = froot + subj + '/'

    print 'Running Subject', subj

    save_raw_name = subj + '_ABR-epo.fif'

    if os.path.isfile(fpath + save_raw_name):
        preEpoched = True
        print 'Epoched data is already available on disk!'
        print 'Loading data from:', fpath + save_raw_name
        epochs = mne.read_epochs(fpath + save_raw_name, verbose='DEBUG')
        Fs = epochs.info['sfreq']
        x = epochs.get_data()
        times = epochs.times
    else:
        preEpoched = False
        fifs = fnmatch.filter(os.listdir(fpath), subj + '_CABR_raw.fif')
        print 'No pre-epoched data found, looking for raw files'
        print 'Viola!', len(fifs),  'files found!'
        for k, fif in enumerate(fifs):
            fifs[k] = fpath + fif
        # Load data and read event channel
        raw = mne.io.Raw(fifs, preload=True, add_eeg_ref=False)
        raw.set_channel_types({'EMG061': 'eeg'})
        raw.info['bads'] += ['MEG0223', 'MEG1623']
예제 #31
0
# Scipy supports read and write of matlab files. You can save your single
# trials with:

from scipy import io  # noqa
io.savemat('epochs_data.mat', dict(epochs_data=epochs_data), oned_as='row')

##############################################################################
# or if you want to keep all the information about the data you can save your
# epochs in a fif file:

epochs.save('sample-epo.fif')

##############################################################################
# and read them later with:

saved_epochs = mne.read_epochs('sample-epo.fif')

##############################################################################
# Compute evoked responses for auditory responses by averaging and plot it:

evoked = epochs['aud_l'].average()
print(evoked)
evoked.plot(time_unit='s')

##############################################################################
# .. topic:: Exercise
#
#   1. Extract the max value of each epoch

max_in_each_epoch = [e.max() for e in epochs['aud_l']]  # doctest:+ELLIPSIS
print(max_in_each_epoch[:4])  # doctest:+ELLIPSIS
예제 #32
0
def run_evoked(subject):
    print("Processing subject: %s" % subject)
    meg_subject_dir = op.join(config.meg_dir, subject)

    # load epochs to reject ICA components
    extension = '-epo'
    fname_in = op.join(meg_subject_dir, config.base_fname.format(**locals()))
    epochs = mne.read_epochs(fname_in, preload=True)

    extension = 'cleaned-epo'
    fname_out = op.join(meg_subject_dir, config.base_fname.format(**locals()))

    print("Input: ", fname_in)
    print("Output: ", fname_out)

    # load first run of raw data for ecg /eog epochs
    raw_list = list()
    print("  Loading one run from raw data")
    extension = config.runs[0] + '_sss_raw'
    raw_fname_in = op.join(meg_subject_dir,
                           config.base_fname.format(**locals()))
    raw = mne.io.read_raw_fif(raw_fname_in, preload=True)

    # run ICA on MEG and EEG
    picks_meg = mne.pick_types(raw.info,
                               meg=True,
                               eeg=False,
                               eog=False,
                               stim=False,
                               exclude='bads')
    picks_eeg = mne.pick_types(raw.info,
                               meg=False,
                               eeg=True,
                               eog=False,
                               stim=False,
                               exclude='bads')
    all_picks = {'meg': picks_meg, 'eeg': picks_eeg}

    if config.eeg:
        ch_types = ['meg', 'eeg']
    else:
        ch_types = ['meg']

    for ch_type in ch_types:
        print(ch_type)
        picks = all_picks[ch_type]

        # Load ICA
        fname_ica = op.join(
            meg_subject_dir,
            '{0}_{1}_{2}-ica.fif'.format(subject, config.study_name, ch_type))
        print('Reading ICA: ' + fname_ica)
        ica = read_ica(fname=fname_ica)

        pick_ecg = mne.pick_types(raw.info,
                                  meg=False,
                                  eeg=False,
                                  ecg=True,
                                  eog=False)

        # ECG
        # either needs an ecg channel, or avg of the mags (i.e. MEG data)
        if pick_ecg or ch_type == 'meg':

            picks_ecg = np.concatenate([picks, pick_ecg])

            # Create ecg epochs
            if ch_type == 'meg':
                reject = {
                    'mag': config.reject['mag'],
                    'grad': config.reject['grad']
                }
            elif ch_type == 'eeg':
                reject = {'eeg': config.reject['eeg']}

            ecg_epochs = create_ecg_epochs(raw,
                                           picks=picks_ecg,
                                           reject=reject,
                                           baseline=(None, 0),
                                           tmin=-0.5,
                                           tmax=0.5)

            ecg_average = ecg_epochs.average()

            # XXX I had to lower the threshold for ctps (default 0.25), otherwise it does not
            # find any components
            # check how this behaves on other data
            ecg_inds, scores = ica.find_bads_ecg(ecg_epochs,
                                                 method='ctps',
                                                 threshold=0.1)
            del ecg_epochs

            report_name = op.join(
                meg_subject_dir,
                '{0}_{1}_{2}-reject_ica.html'.format(subject,
                                                     config.study_name,
                                                     ch_type))
            report = Report(report_name, verbose=False)

            # Plot r score
            report.add_figs_to_section(ica.plot_scores(scores,
                                                       exclude=ecg_inds),
                                       captions=ch_type.upper() + ' - ECG - ' +
                                       'R scores')

            # Plot source time course
            report.add_figs_to_section(ica.plot_sources(ecg_average,
                                                        exclude=ecg_inds),
                                       captions=ch_type.upper() + ' - ECG - ' +
                                       'Sources time course')

            # Plot source time course
            report.add_figs_to_section(ica.plot_overlay(ecg_average,
                                                        exclude=ecg_inds),
                                       captions=ch_type.upper() + ' - ECG - ' +
                                       'Corrections')

        else:
            print('no ECG channel!')

        # EOG
        pick_eog = mne.pick_types(raw.info,
                                  meg=False,
                                  eeg=False,
                                  ecg=False,
                                  eog=True)

        if pick_eog.any():
            print('using EOG channel')
            picks_eog = np.concatenate([picks, pick_eog])
            # Create eog epochs
            eog_epochs = create_eog_epochs(raw,
                                           picks=picks_eog,
                                           reject=None,
                                           baseline=(None, 0),
                                           tmin=-0.5,
                                           tmax=0.5)

            eog_average = eog_epochs.average()
            eog_inds, scores = ica.find_bads_eog(eog_epochs, threshold=3.0)
            del eog_epochs

            # Plot r score
            report.add_figs_to_section(ica.plot_scores(scores,
                                                       exclude=eog_inds),
                                       captions=ch_type.upper() + ' - EOG - ' +
                                       'R scores')

            # Plot source time course
            report.add_figs_to_section(ica.plot_sources(eog_average,
                                                        exclude=eog_inds),
                                       captions=ch_type.upper() + ' - EOG - ' +
                                       'Sources time course')

            # Plot source time course
            report.add_figs_to_section(ica.plot_overlay(eog_average,
                                                        exclude=eog_inds),
                                       captions=ch_type.upper() + ' - EOG - ' +
                                       'Corrections')

            report.save(report_name, overwrite=True, open_browser=False)

        else:
            print('no EOG channel!')

        ica_reject = (list(ecg_inds) + list(eog_inds) +
                      list(config.rejcomps_man[subject][ch_type]))

        # now reject the components
        print('Rejecting from ' + ch_type + ': ' + str(ica_reject))
        epochs = ica.apply(epochs, exclude=ica_reject)

        print('Saving epochs')
        epochs.save(fname_out)

        report.add_figs_to_section(ica.plot_overlay(raw.copy(),
                                                    exclude=ica_reject),
                                   captions=ch_type.upper() +
                                   ' - ALL(epochs) - ' + 'Corrections')

        if config.plot:
            epochs.plot_image(combine='gfp',
                              group_by='type',
                              sigma=2.,
                              cmap="YlGnBu_r")
예제 #33
0
epochs.resample(300)

# let's save a version without dropping based on SSPs
red_epochs = epochs.copy()
no_interest = np.nonzero(red_epochs.events[:, 2] < 13)[0]
red_epochs.drop_epochs(no_interest)
print 'Saving epochs and evoked data...'
evokeds = [red_epochs[name].average() for name in ['STI-correct', 'STI-incorrect']]
mne.write_evokeds(dir_out + subj + '_stop_parsed_matched_BP1-100_DS300-ave.fif', evokeds)
new_fname = dir_out + subj + '_stop_parsed_matched_BP1-100_DS300-epo.fif.gz'
red_epochs.save(new_fname)

# now we save a version after cleaning with SSPs
# grab SSP vectors from previous cleanup sessions
epochs_fname = clean_dir + subj + '_stop_parsed_matched_clean_BP1-35_DS120-epo.fif.gz'
epochs35 = mne.read_epochs(epochs_fname, proj=True)
bad_epochs = [i for i, j in enumerate(epochs35.drop_log) if len(j) > 0]
epochs.drop_epochs(bad_epochs)
epochs.info['projs'] = epochs35.info['projs']

# removing the epochs we don't want, need to do it again because indices
# changed after removing bad epochs based on SSP
no_interest = np.nonzero(epochs.events[:, 2] < 13)[0]
epochs.drop_epochs(no_interest)

# make averaged file and save final result
print 'Saving epochs and evoked data with optional SSP operators...'
evokeds = [epochs[name].average() for name in ['STI-correct', 'STI-incorrect']]
mne.write_evokeds(dir_out + subj + '_stop_parsed_matched_clean_BP1-100_DS300-ave.fif', evokeds)
new_fname = dir_out + subj + '_stop_parsed_matched_clean_BP1-100_DS300-epo.fif.gz'
epochs.save(new_fname)
예제 #34
0

## change data files dir
os.chdir('/projects/MINDLAB2011_24-MEG-readiness/scratch')

### SETUP DATA ####
sessions  = ["plan", "classic"]
subs = [2]
for sub in subs:
    for session in sessions:
        
        f_load = "sub_%d_%s_tsss_mc_epochs.fif" %(sub, session)
        f_save = "sub_%d_%s" % (sub, session)
        print f_load
        print f_save
        epochs = mne.read_epochs(f_load)
        exec("%s=%s" % (f_save, "epochs"))

sub_2_classic.resample(sfreq=500, n_jobs=n_jobs)
sub_2_plan.resample(sfreq=500, n_jobs=n_jobs)
#sub_2_interupt.resample(sfreq=500, n_jobs=n_jobs)

cmb_A = sub_2_classic[:, :, 250:-250]
cmb_B = sub_2_plan[:, :, 250:-250]
#cmb_C = sub_2_interupt[:, :, 250:-250]

###############################################################################
# Decoding in sensor space using a linear SVM
n_times = len(epochs.times)
# Take only the data channels (here the gradiometers)
data_picks = mne.fiff.pick_types(sub_2_plan.info, meg='grad', exclude='bads')
예제 #35
0
import pandas as pd
import os.path as op
import matplotlib.pyplot as plt
from scipy.signal import hilbert, savgol_filter

subjects = ['s1', 's2', 's3']

study_path = '/Users/lpen/Documents/wake_sleep/study/'
ref = 'avg'  # reference: 'avg' (average) or 'bip' (bipolar)
subject = 's1'

# Prepare data --------
# load
file_name = '%s_wake_%s_16s-epo.fif' % (subject, ref)
file_path = op.join(study_path, subject, 'data', 'epochs', file_name)
epochs = mne.read_epochs(file_path)

# apply filter
bandpass = [90, 95]
epochs.filter(bandpass[0], bandpass[1], fir_design='firwin')

# get data
data_mat = epochs.get_data()  # 3D matrix: epochs x channels x samples
sfreq = int(epochs.info['sfreq'])

pd.set_option('display.expand_frame_repr',
              False)  # to show channel info in wide format
ch_info = pd.read_csv(op.join(study_path, 'varios', 'chan_info_all.csv'))
subj_ch_info = ch_info.loc[(ch_info.Subject == subject)
                           & (ch_info.Condition == 'wake')]
print(subj_ch_info)
예제 #36
0
def main(ecog_lp,savefolder,averagePower,stat_method): 
    '''
    Use MNE to compute spectrograms from ECoG trials (with false positive trials removed, if desired).
    '''
    
    #Load parameters
    alpha = config.constants_compute_tfr['alpha']
    n_perms = config.constants_compute_tfr['n_perms']
    pad_val = config.constants_compute_tfr['pad_val'] #(sec) used for padding each side of epochs and cropped off later
    freqs = config.constants_compute_tfr['freqs']
    decim = config.constants_compute_tfr['decim']
    baselineT = config.constants_compute_tfr['baseline_vals'] B#aseline times to subtract off (in sec, centered around events)
    
    #Find all epoch files
    fnames_all = natsort.natsorted(glob.glob(ecog_lp+'*_epo.fif'))
    
    for fname in fnames_all:
        #Load epoch data
        ep_dat = mne.read_epochs(fname)
        epoch_times = [ep_dat.times.min()+pad_val, ep_dat.times.max()-pad_val]
        bad_chans = ep_dat.info['bads'].copy() #store for later
        ep_dat.info['bads'] = []
        
        #Remove false positive events
        if 'false_pos' in ep_dat.metadata.columns:
            bad_ev_inds = np.nonzero(ep_dat.metadata['false_pos'].values)[0]
            ep_dat.drop(bad_ev_inds)
        
        #Compute TFR
        power = compute_tfr(ep_dat,epoch_times,freqs = freqs,crop_val=pad_val,decim=decim)
        power._metadata = ep_dat.metadata.copy() #add in metadata
        
        #Parameter updates
        power_ave_masked=power.copy()
        if averagePower:
            power_ave_masked = power_ave_masked.average() #remove epoch dimension for output
        else:
            alpha = np.nan #no stats if not averaging 

        #Calculate and subtract baseline (for each channel), computing stats if desired
        baseidx = np.nonzero(np.logical_and(power.times>=baselineT[0],power.times<=baselineT[1]))[0]
        for chan in range(power.data.shape[1]):
            curr_power = tfr_subtract_baseline(power,chan,baseidx,compute_mean=True)
            curr_masked_power_ave = tfr_boot_sig_mask(curr_power,baseidx,n_perms,alpha,stat_method,averagePower)

            #Return masked data to original variable
            if averagePower:
                power_ave_masked.data[chan,...]=curr_masked_power_ave
            else:
                curr_masked_power_ave = np.moveaxis(curr_masked_power_ave,-1,0)
                power_ave_masked.data[:,chan,...]=curr_masked_power_ave
            del curr_masked_power_ave, curr_power

        #Save result
        file_prefix = fname.split('/')[-1][:-8]
        if not averagePower:
            saveName = file_prefix+'_allEpochs_tfr.h5'
        else:
            saveName = file_prefix+'_ave_tfr.h5'
        power_ave_masked.info['bads'] = bad_chans.copy() #add bad channel list back in
        power_ave_masked.save(savefolder+saveName, overwrite=True)
        del power
예제 #37
0
###############################################################################
# See the :ref:`tut-epochs-dataframe` tutorial for many more examples of the
# :meth:`~mne.Epochs.to_data_frame` method.
#
#
# Loading and saving ``Epochs`` objects to disk
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#
# :class:`~mne.Epochs` objects can be loaded and saved in the ``.fif`` format
# just like :class:`~mne.io.Raw` objects, using the :func:`mne.read_epochs`
# function and the :meth:`~mne.Epochs.save` method. Functions are also
# available for loading data that was epoched outside of MNE-Python, such as
# :func:`mne.read_epochs_eeglab` and :func:`mne.read_epochs_kit`.

epochs.save('saved-audiovisual-epo.fif', overwrite=True)
epochs_from_file = mne.read_epochs('saved-audiovisual-epo.fif', preload=False)

###############################################################################
# The MNE-Python naming convention for epochs files is that the file basename
# (the part before the ``.fif`` or ``.fif.gz`` extension) should end with
# ``-epo`` or ``_epo``, and a warning will be issued if the filename you
# provide does not adhere to that convention.
#
# As a final note, be aware that the class of the epochs object is different
# when epochs are loaded from disk rather than generated from a
# :class:`~mne.io.Raw` object:

print(type(epochs))
print(type(epochs_from_file))

###############################################################################
elec = 65


def nextpow2(n):
    m_f = np.log2(n)
    m_i = np.ceil(m_f)
    return int(np.log2(2**m_i))


output_dir = 'your output directory for time-frequency results'
data_path = 'your path to all your epoched files'

for epochs in glob.glob(os.path.join(data_path, '*.fif')):

    EEG = mne.read_epochs(epochs)

    for E in range(1, elec):

        tmin = min(EEG.times)
        tmax = max(EEG.times)

        min_frex = 1
        max_frex = 30
        num_frex = 30

        time = np.arange(-1, 1.0001, 1 / EEG.info['sfreq'])
        pnts = int(EEG.info['sfreq'] * (tmax + (tmin * -1)) + 1)
        frex = np.logspace(np.log10(min_frex), np.log10(max_frex), num_frex)
        cycles = np.logspace(np.log10(3), np.log10(10),
                             num_frex) / (2 * np.pi * frex)
예제 #39
0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Apr 10 01:35:05 2020

@author: wexu
"""

import mne
from config_SYN_Human import study_path,EEG_data_path,exp
import os
import json
from mne.preprocessing import ICA

for ID in exp.index:

    fname=os.path.join(study_path,ID+'.edf') 
    
    epochs=mne.read_epochs(fname.replace(".edf", "_cleaned-epo.fif"))
    
    ica = ICA(method='fastica',n_components=0.99,max_iter=1000)
    ica.fit(epochs)
    ica.save(fname.replace(".edf", "-ica.fif"))
예제 #40
0
    def pipeline(self, band_name, ext='_ica-raw.fif', stim_channel='UPPT001'):
        """Pipeline of standard operations

        Args:
            band_name ({str}): Band name of filter raw
            ext ({str}, optional): The extend name of interest files. Defaults to '_ica-raw.fif'.
            stim_channel ({str}, optional): The channel name of stimuli. Defaults to 'UPPT001'.
        """
        # Prepare memory stuffs ------------------------------------------------------
        # Raw name
        memory_name = f'{self.running_name}-{band_name}-epo.fif'
        memory_path = os.path.join(MEMORY_DIR, memory_name)
        # Denoise name
        memory_denoise_name = f'{self.running_name}-{band_name}-denoise-epo.fif'
        memory_denoise_path = os.path.join(MEMORY_DIR, memory_denoise_name)
        # Clean name
        memory_clean_name = [
            f'{self.running_name}-{band_name}-clean-epo.fif',
            f'{self.running_name}-{band_name}-clean-lags.pkl'
        ]
        memory_clean_path = [
            os.path.join(MEMORY_DIR, memory_clean_name[0]),
            os.path.join(MEMORY_DIR, memory_clean_name[1])
        ]

        # Get raw -------------------------------------------------------------------
        raw_dir = os.path.join(RAW_DIR, self.running_name)
        self.raw = self._get_raw(raw_dir, ext)

        # Raw epochs ----------------------------------------------------------------
        try:
            assert (self.use_memory)

            # Recall epochs from memory
            self.epochs = mne.read_epochs(memory_path)
            prompt(f'Raw epochs are recalled from memory: {self.epochs}')
        except:
            # Filter raw
            l_freq, h_freq = self.parameters['bands'][band_name]
            self._filter_raw(l_freq=l_freq, h_freq=h_freq)

            # Get epochs
            self.epochs = self._get_epochs(stim_channel)

            # Remember if [use_memory]
            if self.use_memory:
                self.epochs.save(memory_path)

        # Denoise epochs ------------------------------------------------------------
        try:
            assert (self.use_memory)

            # Recall denoise epochs from memory
            self.denoise_epochs = mne.read_epochs(memory_denoise_path)
            prompt(
                f'Denoise epochs are recalled from memory: {self.denoise_epochs}'
            )
        except:
            # Denoise epoch
            self.denoise_epochs = self._denoise(self.epochs.copy())

            # Remember if [use_memory]
            if self.use_memory:
                self.denoise_epochs.save(memory_denoise_path)

        # Remove button effect ------------------------------------------------------
        try:
            assert (self.use_memory)

            # Recall clean epochs and lags from memory
            self.clean_epochs = mne.read_epochs(memory_clean_path[0])
            with open(memory_clean_path[1], 'rb') as f:
                self.paired_lags_timelines = pickle.load(f)
            prompt(
                f'Clean epochs are recalled from memory: {self.clean_epochs}')
        except:
            # Remove button effect
            clean_epochs, paired_lags_timelines = self._remove_button_effect()
            self.clean_epochs = clean_epochs
            self.paired_lags_timelines = paired_lags_timelines

            # Remember if [use_memory]
            if self.use_memory:
                self.clean_epochs.save(memory_clean_path[0])
                with open(memory_clean_path[1], 'wb') as f:
                    pickle.dump(self.paired_lags_timelines, f)
fname = data_dir + '/%s_event_order.txt' % subj
fid = open(fname, 'r')
events = [line.rstrip() for line in fid]
fid.close()

if len(events) > 688:
    fname = behavior_dir + '/%s_m_triaOrder.txt' % subj
else:
    fname = behavior_dir + '/%s_triaOrder.txt' % subj
fid = open(fname, 'r')
behavior = [line.rstrip() for line in fid]
fid.close()

epochs_fname = data_dir + subj + '_stop_parsed_BP1-100_DS300-epo.fif.gz'
epochs = mne.read_epochs(epochs_fname)

new_events = epochs.events.copy()
# check how many events match starting from the first one
cnt = 0
match = True
while cnt < min(len(behavior), len(events)) and match:
    match = behavior[cnt].find(events[cnt]) >= 0
    if match:
        new_events[cnt, 2] = new_event_ids[behavior[cnt]]
    cnt += 1
print 'Last matched event for %s: %d (Beh: %d, Eve: %d)' % (
    subj, cnt, len(behavior), len(events))

# we need to drop events that were not matched
if len(behavior) < len(events):
                        picks=EEG_chs,
                        average=False)

    return morlet


fmin = 2
fmax = 45
n_bins = 45
##Compute_PSD
for subj in SUBJ_LIST:
    for task in TASK_LIST:
        for run in RUN_LIST[task]:
            epochs_name, epochs_path = get_pareidolia_bids(FOLDERPATH,
                                                           subj,
                                                           task,
                                                           run,
                                                           stage='epo_long_AR',
                                                           cond=None)
            epochs = mne.read_epochs(epochs_path)
            epochs = epochs.crop(-1.5, 8)
            #Downsampling
            epochs = epochs.copy().resample(500, npad='auto')

            morlet = compute_morlet(epochs, fmin, fmax, n_bins)
            morlet.apply_baseline(mode='ratio', baseline=(None, -0.1))

            psds_file, psds_path = get_pareidolia_bids(
                FOLDERPATH, subj, task, run, stage='Morlet_halfcycle_500Hz')
            morlet.save(psds_path, overwrite=True)
예제 #43
0
from hypyp.ext.mpl3d.mesh import Mesh
from hypyp.ext.mpl3d.camera import Camera

#MNE
import mne

#HyPyP
from hypyp import prep # need pip install https://api.github.com/repos/autoreject/autoreject/zipball/master
from hypyp import analyses
from hypyp import stats
from hypyp import viz

path="C:\\Users\\kathr\\OneDrive\\Documents\\GitHub\\EEG---Special-course"
os.chdir(path)

epo1 = mne.read_epochs('13_epochs_224a_resampled-epo.fif', preload = True)
epo2 = mne.read_epochs('13_epochs_224b_resampled-epo.fif', preload = True)

epo1.set_channel_types({'1-EXG2':'eog', '1-EXG3':'eog', '1-EXG4':'eog', '1-EXG5':'eog', '1-EXG6':'eog', '1-EXG7':'eog', '1-EXG8':'eog'})
epo2.set_channel_types({'1-EXG2':'eog', '1-EXG3':'eog', '1-EXG4':'eog', '1-EXG5':'eog', '1-EXG6':'eog', '1-EXG7':'eog', '1-EXG8':'eog'})


epo1.set_montage('biosemi64')
epo2.set_montage('biosemi64')

#Equal number of epochs
mne.epochs.equalize_epoch_counts([epo1, epo2])

#Sampling rate
sampling_rate = epo1.info['sfreq']
예제 #44
0
for [si, sbj] in enumerate(subjs):
    subject = 'Sub%02d' % sbj

    sub_dir = op.join(C.cov_path, subject)
    if not op.exists(sub_dir):
        os.mkdir(sub_dir)

    for st_duration in C.MF['st_duration']:

        for origin in C.MF['origin']:

            ## read epochs
            epo_fname = C.fname_epo(C, subject, st_duration, origin)

            epochs = mne.read_epochs(epo_fname, preload=True)

            for method in C.cov_methods:  # doesn't work with 'auto'

                # covariance matrix (filter with wildcard)
                cov_fname = C.fname_cov(C, subject, st_duration, origin,
                                        method, '*')

                # method may be underspecified, since it may be ranked differently for different subjects
                cov_fname = glob.glob(cov_fname)[
                    0]  # be careful if multiple options present

                cov = mne.read_cov(cov_fname)

                # hack to create filenames taking into account muliptle '.' in paths/names
                fstem = cov_fname.split('.')[0:-1]  # remove suffix'
예제 #45
0
            print 'running for condition ', mycond
            if filtered:
                fn_list = glob.glob(
                    op.join(
                        subjects_dir, subject,
                        'MEG/*bcc,nr,fibp1-45,ar,*,evt_%s_bc-epo.fif' %
                        mycond))
            else:
                fn_list = glob.glob(
                    op.join(subjects_dir, subject,
                            'MEG/*bcc,nr,ar,*,evt_%s_bc-epo.fif' % mycond))
            print len(fn_list), ' files for this condition.'
            # loop across chops per condition per subject (done for better ICA)
            chop_epochs_list = []
            for fn_epo in fn_list:
                epochs = mne.read_epochs(fn_epo, preload=True)
                chop_epochs_list.append(epochs)
                os.remove(fn_epo)
            final_epochs = mne.concatenate_epochs(chop_epochs_list)
            final_epochs_name = fn_epo.rsplit(
                'ar')[0] + 'ar,evt_%s_bc-epo.fif' % mycond  # jumeg epocher
            print 'saving combined epochs as ', final_epochs_name
            final_epochs.save(final_epochs_name)

if check_epochs:
    # for manual checking and identifying bad epochs
    if filtered:
        fn_list = glob.glob(
            subjects_dir +
            '*[0-9]/MEG/*rfDC,bcc,nr,fibp1-45,ar,evt_*bc-epo.fif')
    else:
예제 #46
0
def test_anonymize(tmpdir):
    """Test that sensitive information can be anonymized."""
    pytest.raises(TypeError, anonymize_info, 'foo')

    # Fake some subject data
    raw = read_raw_fif(raw_fname)
    raw.set_annotations(
        Annotations(onset=[0, 1],
                    duration=[1, 1],
                    description='dummy',
                    orig_time=None))
    first_samp = raw.first_samp
    expected_onset = np.arange(2) + raw._first_time
    assert raw.first_samp == first_samp
    assert_allclose(raw.annotations.onset, expected_onset)

    # test mne.anonymize_info()
    events = read_events(event_name)
    epochs = Epochs(raw, events[:1], 2, 0., 0.1, baseline=None)
    _test_anonymize_info(raw.info.copy())
    _test_anonymize_info(epochs.info.copy())

    # test instance methods & I/O roundtrip
    for inst, keep_his in zip((raw, epochs), (True, False)):
        inst = inst.copy()

        subject_info = dict(his_id='Volunteer', sex=2, hand=1)
        inst.info['subject_info'] = subject_info
        inst.anonymize(keep_his=keep_his)

        si = inst.info['subject_info']
        if keep_his:
            assert si == subject_info
        else:
            assert si['his_id'] == '0'
            assert si['sex'] == 0
            assert 'hand' not in si

        # write to disk & read back
        inst_type = 'raw' if isinstance(inst, BaseRaw) else 'epo'
        fname = 'tmp_raw.fif' if inst_type == 'raw' else 'tmp_epo.fif'
        out_path = tmpdir.join(fname)
        inst.save(out_path, overwrite=True)
        if inst_type == 'raw':
            read_raw_fif(out_path)
        else:
            read_epochs(out_path)

    # test that annotations are correctly zeroed
    raw.anonymize()
    assert raw.first_samp == first_samp
    assert_allclose(raw.annotations.onset, expected_onset)
    assert raw.annotations.orig_time == raw.info['meas_date']
    stamp = _dt_to_stamp(raw.info['meas_date'])
    assert raw.annotations.orig_time == _stamp_to_dt(stamp)

    raw.info['meas_date'] = None
    raw.anonymize(daysback=None)
    with pytest.warns(RuntimeWarning, match='None'):
        raw.anonymize(daysback=123)
    assert raw.annotations.orig_time is None
    assert raw.first_samp == first_samp
    assert_allclose(raw.annotations.onset, expected_onset)
예제 #47
0
if t_by_t:
    n_pnts = 50
else:
    n_pnts = 1

allscores = np.ndarray((n_pnts,24))
perm_allscores = np.ndarray((n_pnts,24,nperms))
max_perm_allscores = np.ndarray((24,nperms))
allscores_pval = np.ndarray((n_pnts,24))

for subj in np.arange(0,24):

    dat_file = ('S%02d_EB-epo' % (subj+1))
    dat_file = op.join(filepath, dat_file + '.fif')
    epochs = mne.read_epochs(dat_file)
    epochs.crop(tmin=0, tmax=1)
    epochs.resample(50)
    X0 = epochs['motor'].get_data()

    # dat_file = ('S%02d_EB-epo' % (subj+1))
    # dat_file = op.join(filepath, dat_file + '.fif')
    # epochs = mne.read_epochs(dat_file)
    # epochs.crop(tmin=0, tmax=1)
    # epochs.resample(50)
    X1 = epochs['non-motor'].get_data()

    X = np.concatenate((X0,X1),axis=0)
    y = np.concatenate((np.zeros(X0.shape[0]),np.ones(X1.shape[0])),axis=0)

    if t_by_t:
예제 #48
0
def test_channel_name_limit(tmpdir, monkeypatch, fname):
    """Test that our remapping works properly."""
    #
    # raw
    #
    if fname.endswith('fif'):
        raw = read_raw_fif(fname)
        raw.pick_channels(raw.ch_names[:3])
        ref_names = []
        data_names = raw.ch_names
    else:
        assert fname.endswith('.ds')
        raw = read_raw_ctf(fname)
        ref_names = [
            raw.ch_names[pick]
            for pick in pick_types(raw.info, meg=False, ref_meg=True)
        ]
        data_names = raw.ch_names[32:35]
    proj = dict(data=np.ones((1, len(data_names))),
                col_names=data_names[:2].copy(),
                row_names=None,
                nrow=1)
    proj = Projection(data=proj,
                      active=False,
                      desc='test',
                      kind=0,
                      explained_var=0.)
    raw.add_proj(proj, remove_existing=True)
    raw.info.normalize_proj()
    raw.pick_channels(data_names + ref_names).crop(0, 2)
    long_names = ['123456789abcdefg' + name for name in raw.ch_names]
    fname = tmpdir.join('test-raw.fif')
    with catch_logging() as log:
        raw.save(fname)
    log = log.getvalue()
    assert 'truncated' not in log
    rename = dict(zip(raw.ch_names, long_names))
    long_data_names = [rename[name] for name in data_names]
    long_proj_names = long_data_names[:2]
    raw.rename_channels(rename)
    for comp in raw.info['comps']:
        for key in ('row_names', 'col_names'):
            for name in comp['data'][key]:
                assert name in raw.ch_names
    if raw.info['comps']:
        assert raw.compensation_grade == 0
        raw.apply_gradient_compensation(3)
        assert raw.compensation_grade == 3
    assert len(raw.info['projs']) == 1
    assert raw.info['projs'][0]['data']['col_names'] == long_proj_names
    raw.info['bads'] = bads = long_data_names[2:3]
    good_long_data_names = [
        name for name in long_data_names if name not in bads
    ]
    with catch_logging() as log:
        raw.save(fname, overwrite=True, verbose=True)
    log = log.getvalue()
    assert 'truncated to 15' in log
    for name in raw.ch_names:
        assert len(name) > 15
    # first read the full way
    with catch_logging() as log:
        raw_read = read_raw_fif(fname, verbose=True)
    log = log.getvalue()
    assert 'Reading extended channel information' in log
    for ra in (raw, raw_read):
        assert ra.ch_names == long_names
    assert raw_read.info['projs'][0]['data']['col_names'] == long_proj_names
    del raw_read
    # next read as if no longer names could be read
    monkeypatch.setattr(meas_info, '_read_extended_ch_info',
                        lambda x, y, z: None)
    with catch_logging() as log:
        raw_read = read_raw_fif(fname, verbose=True)
    log = log.getvalue()
    assert 'extended' not in log
    if raw.info['comps']:
        assert raw_read.compensation_grade == 3
        raw_read.apply_gradient_compensation(0)
        assert raw_read.compensation_grade == 0
    monkeypatch.setattr(  # restore
        meas_info, '_read_extended_ch_info', _read_extended_ch_info)
    short_proj_names = [
        f'{name[:13 - bool(len(ref_names))]}-{len(ref_names) + ni}'
        for ni, name in enumerate(long_data_names[:2])
    ]
    assert raw_read.info['projs'][0]['data']['col_names'] == short_proj_names
    #
    # epochs
    #
    epochs = Epochs(raw, make_fixed_length_events(raw))
    fname = tmpdir.join('test-epo.fif')
    epochs.save(fname)
    epochs_read = read_epochs(fname)
    for ep in (epochs, epochs_read):
        assert ep.info['ch_names'] == long_names
        assert ep.ch_names == long_names
    del raw, epochs_read
    # cov
    epochs.info['bads'] = []
    cov = compute_covariance(epochs, verbose='error')
    fname = tmpdir.join('test-cov.fif')
    write_cov(fname, cov)
    cov_read = read_cov(fname)
    for co in (cov, cov_read):
        assert co['names'] == long_data_names
        assert co['bads'] == []
    del cov_read

    #
    # evoked
    #
    evoked = epochs.average()
    evoked.info['bads'] = bads
    assert evoked.nave == 1
    fname = tmpdir.join('test-ave.fif')
    evoked.save(fname)
    evoked_read = read_evokeds(fname)[0]
    for ev in (evoked, evoked_read):
        assert ev.ch_names == long_names
        assert ev.info['bads'] == bads
    del evoked_read, epochs

    #
    # forward
    #
    with pytest.warns(None):  # not enough points for CTF
        sphere = make_sphere_model('auto', 'auto', evoked.info)
    src = setup_volume_source_space(
        pos=dict(rr=[[0, 0, 0.04]], nn=[[0, 1., 0.]]))
    fwd = make_forward_solution(evoked.info, None, src, sphere)
    fname = tmpdir.join('temp-fwd.fif')
    write_forward_solution(fname, fwd)
    fwd_read = read_forward_solution(fname)
    for fw in (fwd, fwd_read):
        assert fw['sol']['row_names'] == long_data_names
        assert fw['info']['ch_names'] == long_data_names
        assert fw['info']['bads'] == bads
    del fwd_read

    #
    # inv
    #
    inv = make_inverse_operator(evoked.info, fwd, cov)
    fname = tmpdir.join('test-inv.fif')
    write_inverse_operator(fname, inv)
    inv_read = read_inverse_operator(fname)
    for iv in (inv, inv_read):
        assert iv['info']['ch_names'] == good_long_data_names
    apply_inverse(evoked, inv)  # smoke test
예제 #49
0
def split_epochs(LOGS_DIR,
                 subj,
                 bloc,
                 lobound=None,
                 hibound=None,
                 save_epochs=False):
    '''
    This functions allows to use the logfile to split the epochs obtained in the epo.fif file.
    It works by comparing the timestamps of IN and OUT events to the timestamps in the epo file events
    Ultimately, we want to compute our features on all epochs then split them as needed. That's why we gonna use IN and OUTidx.
    HERE WE KEEP ONLY CORRECT TRIALS
    '''
    epo_path, epo_filename = get_SAflow_bids(FOLDERPATH,
                                             subj,
                                             bloc,
                                             'epo',
                                             cond=None)
    ### Find logfile to extract VTC
    log_file = find_logfile(subj, bloc, os.listdir(LOGS_DIR))
    VTC, INbounds, OUTbounds, INzone, OUTzone = get_VTC_from_file(
        LOGS_DIR + log_file, lobound=lobound, hibound=hibound)
    ### Find events, split them by IN/OUT and start epoching
    preproc_path, preproc_filename = get_SAflow_bids(FOLDERPATH,
                                                     subj,
                                                     bloc,
                                                     stage='preproc_raw',
                                                     cond=None)
    raw = read_raw_fif(preproc_filename,
                       preload=False)  #, min_duration=2/epochs.info['sfreq'])
    events = mne.find_events(raw, min_duration=2 / raw.info['sfreq'])
    INevents, OUTevents = split_events_by_VTC(INzone, OUTzone, events)
    print('event length : {}'.format(len(events)))
    INidx = []
    OUTidx = []
    epo_events = mne.read_events(
        epo_filename)  # get events from the epochs file (so no resp event)
    # the droping of epochs is a bit confused because we have to get indices from the current cleaned epochs file
    for idx, ev in enumerate(epo_events):
        if ev[0] in INevents[:, 0]:  #compare timestamps
            INidx.append(idx)
        if ev[0] in OUTevents[:, 0]:
            OUTidx.append(idx)
    INidx = np.array(INidx)
    OUTidx = np.array(OUTidx)
    if save_epochs == True:
        epo_idx = np.array(range(len(epo_events)))
        IN_todrop = np.delete(epo_idx, INidx)  # drop all epochs EXCEPT INidx
        OUT_todrop = np.delete(epo_idx, OUTidx)
        INepochs = mne.read_epochs(epo_filename, preload=False)
        INepochs = INepochs.drop(indices=IN_todrop)
        OUTepochs = mne.read_epochs(epo_filename, preload=False)
        OUTepochs = OUTepochs.drop(indices=OUT_todrop)
        if lobound == None:
            INpath, INfilename = get_SAflow_bids(FOLDERPATH,
                                                 subj,
                                                 bloc,
                                                 'epo',
                                                 cond='IN')
        else:
            INpath, INfilename = get_SAflow_bids(FOLDERPATH,
                                                 subj,
                                                 bloc,
                                                 'epo',
                                                 cond='IN{}'.format(lobound *
                                                                    100))
        if hibound == None:
            OUTpath, OUTfilename = get_SAflow_bids(FOLDERPATH,
                                                   subj,
                                                   bloc,
                                                   'epo',
                                                   cond='OUT')
        else:
            OUTpath, OUTfilename = get_SAflow_bids(FOLDERPATH,
                                                   subj,
                                                   bloc,
                                                   'epo',
                                                   cond='OUT{}'.format(
                                                       hibound * 100))
        INepochs.save(INfilename)
        OUTepochs.save(OUTfilename)

    return INidx, OUTidx
    "NEM_32": "NAG83",
    "NEM_33": "FAO18_fa",
    "NEM_34": "KER27",
    "NEM_35": "MUN79",
    "NEM_36": "BRA52_fa",
    "NEM_37": "EAM67"
}
# # sub_dict = {"NEM_21":"WKI71_fa","NEM_22":"EAM11","NEM_23":"FOT12","NEM_24":"BII41","NEM_26":"ENR41",
#               "NEM_27":"HIU14","NEM_28":"WAL70","NEM_29":"KIL72","NEM_30":"DIU11","NEM_31":"BLE94",
#               "NEM_32":"NAG83","NEM_33":"FAO18_fa","NEM_34":"KER27","NEM_35":"MUN79","NEM_36":"BRA52_fa",
#               "NEM_37":"EAM67"} # these are waiting for the next batch
# # sub_dict = {"NEM_10":"GIZ04","NEM_11":"WOO07","NEM_12":"TGH11","NEM_14":"FIN23","NEM_15":"KIL13","NEM_16":"KIO12","NEM_17":"DEN59","NEM_18":"SAG13","NEM_19":"ALC81","NEM_20":"PAG48",} # these are done

for meg, mri in sub_dict.items():
    # load and prepare the MEG data
    rest = mne.read_epochs("{dir}nc_{sub}_1_ica-epo.fif".format(dir=meg_dir,
                                                                sub=meg))
    ton = mne.read_epochs("{dir}nc_{sub}_2_ica-epo.fif".format(dir=meg_dir,
                                                               sub=meg))
    epo_exp = mne.read_epochs("{dir}nc_{sub}_exp-epo.fif".format(dir=meg_dir,
                                                                 sub=meg))
    neg = epo_exp['negative']
    pos = epo_exp['positive']
    # override head_position data to append sensor data (just for calculating CSD !)
    rest.info['dev_head_t'] = ton.info['dev_head_t']
    epo_bas = mne.concatenate_epochs([rest, ton])
    # make and save CSDs for 1-90 Hz range
    frequencies = np.linspace(1, 90, num=90)
    csd_exp = csd_morlet(epo_exp,
                         frequencies=frequencies,
                         n_jobs=8,
                         n_cycles=7,
예제 #51
0
    img_type = 'orig'
    fs_subj = subj + '_an' if img_type == 'anony' else subj

    epo_path = op.join(study_path, 'source_stim', subj, 'epochs', 'fif')
    conds = glob.glob(epo_path + '/*-epo.fif')

    fwd_fname = op.join(study_path, 'source_stim', subj, 'source_files',
                        img_type, '%s-fwd.fif' % fs_subj)

    if not op.isfile(fwd_fname):
        fwd = make_fwd_solution(conds[0], fs_subj, study_path)
    else:
        fwd = mne.read_forward_solution(fwd_fname)

    for cond_fname in conds:
        eeg_epo = mne.read_epochs(cond_fname)
        eeg_epo.interpolate_bads(reset_bads=True)
        # eeg_epo.filter(None, 40)
        cov = mne.compute_covariance(
            eeg_epo, method='shrunk', tmin=-0.5,
            tmax=-0.3)  # use method='auto' for final computation
        evoked = eeg_epo.average()

        evoked.plot(spatial_colors=True)
        mne.viz.plot_evoked_topo(evoked)
        evoked.plot_topomap(np.linspace(-0.005, 0.005, 11))

        dist_dis = source_loc(evoked,
                              cov,
                              fwd,
                              subj,
    i for i in os.listdir(filedir)
    if 'Subject' in i and 'Test' not in i and '.' not in i
]
rmvSubjList = []

SubjList = [
    'Subject%d' % (n + 1) for n in np.arange(len(SubjDir)) if 'Subject%d' %
    (n + 1) not in rmvSubjList and os.path.exists(filedir + '/Subject%d/SF' %
                                                  (n + 1))
]
SubjN = len(SubjList)
del SubjDir

#- setting some parameters -#
os.chdir(filedir + '/' + SubjList[0] + '/' + ExpID + '/Datafiles/EpochData')
Epochs = mne.read_epochs('ProperEpochData-epo.fif', preload=True)
epochs = Epochs.copy().pick_types(meg=True)

conditions = list(epochs.event_id.keys())
conditions2 = [i for i in conditions if i != 'target']
times = epochs.times
del Epochs, epochs

MRIsubject = 'fsaverage'
subjects_dir = ''

src = mne.read_source_spaces(subjects_dir + '/' + MRIsubject +
                             '/bem/%s-%s-src.fif' %
                             (MRIsubject, useFsaveModel))
nuseVerts = src[-1]['nuse']
예제 #53
0
def test_read_write_epochs():
    """Test epochs from raw files with IO as fif file
    """
    epochs = Epochs(raw, events, event_id, tmin, tmax, picks=picks, baseline=(None, 0))
    evoked = epochs.average()
    data = epochs.get_data()

    epochs_no_id = Epochs(raw, pick_events(events, include=event_id), None, tmin, tmax, picks=picks, baseline=(None, 0))
    assert_array_equal(data, epochs_no_id.get_data())

    eog_picks = pick_types(raw.info, meg=False, eeg=False, stim=False, eog=True, exclude="bads")
    eog_ch_names = [raw.ch_names[k] for k in eog_picks]
    epochs.drop_channels(eog_ch_names)
    assert_true(len(epochs.info["chs"]) == len(epochs.ch_names) == epochs.get_data().shape[1])
    data_no_eog = epochs.get_data()
    assert_true(data.shape[1] == (data_no_eog.shape[1] + len(eog_picks)))

    # test decim kwarg
    with warnings.catch_warnings(record=True) as w:
        warnings.simplefilter("always")
        epochs_dec = Epochs(raw, events, event_id, tmin, tmax, picks=picks, baseline=(None, 0), decim=4)
        assert_equal(len(w), 1)

    data_dec = epochs_dec.get_data()
    assert_array_equal(data[:, :, epochs_dec._decim_idx], data_dec)

    evoked_dec = epochs_dec.average()
    assert_array_equal(evoked.data[:, epochs_dec._decim_idx], evoked_dec.data)

    n = evoked.data.shape[1]
    n_dec = evoked_dec.data.shape[1]
    n_dec_min = n // 4
    assert_true(n_dec_min <= n_dec <= n_dec_min + 1)
    assert_true(evoked_dec.info["sfreq"] == evoked.info["sfreq"] / 4)

    # test IO
    epochs.save(op.join(tempdir, "test-epo.fif"))
    epochs_read = read_epochs(op.join(tempdir, "test-epo.fif"))

    assert_array_almost_equal(epochs_read.get_data(), epochs.get_data())
    assert_array_equal(epochs_read.times, epochs.times)
    assert_array_almost_equal(epochs_read.average().data, evoked.data)
    assert_equal(epochs_read.proj, epochs.proj)
    bmin, bmax = epochs.baseline
    if bmin is None:
        bmin = epochs.times[0]
    if bmax is None:
        bmax = epochs.times[-1]
    baseline = (bmin, bmax)
    assert_array_almost_equal(epochs_read.baseline, baseline)
    assert_array_almost_equal(epochs_read.tmin, epochs.tmin, 2)
    assert_array_almost_equal(epochs_read.tmax, epochs.tmax, 2)
    assert_equal(epochs_read.event_id, epochs.event_id)

    epochs.event_id.pop("1")
    epochs.event_id.update({"a:a": 1})  # test allow for ':' in key
    epochs.save(op.join(tempdir, "foo-epo.fif"))
    epochs_read2 = read_epochs(op.join(tempdir, "foo-epo.fif"))
    assert_equal(epochs_read2.event_id, epochs.event_id)

    # add reject here so some of the epochs get dropped
    epochs = Epochs(raw, events, event_id, tmin, tmax, picks=picks, baseline=(None, 0), reject=reject)
    epochs.save(op.join(tempdir, "test-epo.fif"))
    # ensure bad events are not saved
    epochs_read3 = read_epochs(op.join(tempdir, "test-epo.fif"))
    assert_array_equal(epochs_read3.events, epochs.events)
    data = epochs.get_data()
    assert_true(epochs_read3.events.shape[0] == data.shape[0])

    # test copying loaded one (raw property)
    epochs_read4 = epochs_read3.copy()
    assert_array_almost_equal(epochs_read4.get_data(), data)
    # test equalizing loaded one (drop_log property)
    epochs_read4.equalize_event_counts(epochs.event_id)

    epochs.drop_epochs([1, 2], reason="can we recover orig ID?")
    epochs.save("test-epo.fif")
    epochs_read5 = read_epochs("test-epo.fif")
    assert_array_equal(epochs_read5.selection, epochs.selection)
    assert_array_equal(epochs_read5.drop_log, epochs.drop_log)

    # Test that one can drop channels on read file
    epochs_read5.drop_channels(epochs_read5.ch_names[:1])

    # test warnings on bad filenames
    with warnings.catch_warnings(record=True) as w:
        warnings.simplefilter("always")
        epochs_badname = op.join(tempdir, "test-bad-name.fif.gz")
        epochs.save(epochs_badname)
        read_epochs(epochs_badname)
    assert_true(len(w) == 2)
예제 #54
0
import numpy as np
import os
from sklearn.preprocessing import LabelEncoder
from braindecode.datautil.signal_target import SignalAndTarget

exclude = [7]
all_epochs = list()
# We start by exploring the frequence content of our epochs.
for subject_id in range(1, 6):
    if subject_id in exclude:
        continue
    subject = 'S%02d' % subject_id
    data_path = os.path.join('/home/claire/DATA/Data_Face_House/' + subject +
                             '/EEG/Evoked_Lowpass')
    fname_in = os.path.join(data_path, '%s-epo.fif' % subject)
    epochs = mne.read_epochs(fname_in)
    epochs.interpolate_bads()
    all_epochs.append(epochs)

epochs_train = epochs = mne.concatenate_epochs(all_epochs)

mne.epochs.combine_event_ids(epochs_train, ['stim/face', 'stim/house'],
                             {'stim': 100},
                             copy=False)
mne.epochs.combine_event_ids(epochs_train, ['imag/face', 'imag/house'],
                             {'imag': 200},
                             copy=False)

# Load Test subject
all_epochs = list()
for subject_id in range(7, 11):
예제 #55
0
def SourceReconstructionv2(condnames, ListSubj, modality, Method,
                           covmatsource):

    # condnames = (('Qt_all','Qs_all'))
    # ListSubj  = [('jm100109')]
    # modality  = 'MEG'
    # method    = 'MNE'

    import mne
    from mne.minimum_norm import apply_inverse, make_inverse_operator
    from mne.beamformer import lcmv
    import os

    os.chdir('/neurospin/meg/meg_tmp/MTT_MEG_Baptiste/SCRIPTS/MNE_PYTHON')
    os.environ['SUBJECTS_DIR'] = '/neurospin/meg/meg_tmp/MTT_MEG_Baptiste/mri'
    os.environ['MNE_ROOT'] = '/neurospin/local/mne'

    wdir = "/neurospin/meg/meg_tmp/MTT_MEG_Baptiste/MEG/"

    for c in range(len(condnames)):
        for i in range(len(ListSubj)):

            # which modality?
            if modality == 'MEG':
                megtag = True
                eegtag = False
                fname_fwd = (wdir + ListSubj[i] +
                             "/mne_python/run3_ico-5_megonly_-fwd.fif")
            elif modality == 'EEG':
                megtag = False
                eegtag = True
                fname_fwd = (wdir + ListSubj[i] +
                             "/mne_python/run3_ico-5_eegonly_-fwd.fif")
            elif modality == 'MEEG':
                megtag = True
                eegtag = True
                fname_fwd = (wdir + ListSubj[i] +
                             "/mne_python/run3_ico-5_meeg_-fwd.fif")

            # load noise covariance matrice
            fname_noisecov = (wdir + ListSubj[i] + "/mne_python/COVMATS/" +
                              modality + "_noisecov_" + covmatsource + "_" +
                              ListSubj[i] + "-cov.fif")
            NOISE_COV1 = mne.read_cov(fname_noisecov)

            # load MEEG epochs, then pick
            fname_epochs = (wdir + ListSubj[i] +
                            "/mne_python/EPOCHS/MEEG_epochs_" + condnames[c] +
                            '_' + ListSubj[i] + "-epo.fif")
            epochs = mne.read_epochs(fname_epochs)
            picks = mne.pick_types(epochs.info,
                                   meg=megtag,
                                   eeg=eegtag,
                                   stim=False,
                                   eog=False)

            # compute evoked

            evokedcond1 = epochs.average(picks=picks)
            forward = mne.read_forward_solution(fname_fwd, surf_ori=True)
            inverse_operator1 = make_inverse_operator(evokedcond1.info,
                                                      forward,
                                                      NOISE_COV1,
                                                      loose=0.2,
                                                      depth=0.8)

            if Method == 'LCMV':
                fname_datacov = (wdir + ListSubj[i] + "/mne_python/COVMATS/" +
                                 modality + "datacov_" + condnames[c] + "_" +
                                 ListSubj[i] + "-cov.fif")
                DATA_COV1 = mne.read_cov(fname_datacov)

    ###############################
    # dSPM/ MNE/ sLORETA solution #
    ###############################

        if Method == 'dSPM' or Method == 'MNE' or Method == 'sLORETA':

            snr = 3.0
            lambda2 = 1.0 / snr**2

            # MEG source reconstruction
            stccond1 = apply_inverse(evokedcond1,
                                     inverse_operator1,
                                     lambda2,
                                     method=Method,
                                     pick_ori=None)
            stccond1.save(wdir + ListSubj[i] + "/mne_python/STCS/" + modality +
                          "_" + ListSubj[i] + "_" + condnames[c] +
                          "_pick_oriNone_" + Method + "_ico-5-fwd.fif")

            stccond1norm = apply_inverse(evokedcond1,
                                         inverse_operator1,
                                         lambda2,
                                         method=Method,
                                         pick_ori="normal")
            stccond1norm.save(wdir + ListSubj[i] + "/mne_python/STCS/" +
                              modality + "_" + ListSubj[i] + "_" +
                              condnames[c] + "_pick_orinormal_" + Method +
                              "_ico-5-fwd.fif")

            # morphing to fsaverage
            stc_fsaverage_cond1 = mne.morph_data(ListSubj[i],
                                                 'fsaverage',
                                                 stccond1,
                                                 smooth=20)
            stc_fsaverage_cond1.save(wdir + ListSubj[i] + "/mne_python/STCS/" +
                                     modality + "_" + ListSubj[i] + "_" +
                                     condnames[c] + "_pick_oriNone_" + Method +
                                     "_ico-5-fwd-fsaverage.fif")

            stc_fsaverage_cond1norm = mne.morph_data(ListSubj[i],
                                                     'fsaverage',
                                                     stccond1norm,
                                                     smooth=20)
            stc_fsaverage_cond1norm.save(wdir + ListSubj[i] +
                                         "/mne_python/STCS/" + modality + "_" +
                                         ListSubj[i] + "_" + condnames[c] +
                                         "_pick_orinormal_" + Method +
                                         "_ico-5-fwd-fsaverage.fif")

    ###################
    # LCMV Beamformer #
    ###################

        elif Method == 'LCMV':

            # MEG source reconstruction
            stccond1 = lcmv(evokedcond1,
                            forward,
                            NOISE_COV1,
                            DATA_COV1,
                            reg=0.01,
                            pick_ori=None)
            stccond1.save(wdir + ListSubj[i] + "/mne_python/STCS/" + modality +
                          "_" + ListSubj[i] + "_" + condnames[c] +
                          "_pick_oriNone_" + Method + "_ico-5-fwd.fif")

            stccond1norm = lcmv(evokedcond1,
                                forward,
                                NOISE_COV1,
                                DATA_COV1,
                                reg=0.01,
                                pick_ori="normal")
            stccond1norm.save(wdir + ListSubj[i] + "/mne_python/STCS/" +
                              modality + "_" + ListSubj[i] + "_" +
                              condnames[c] + "_pick_orinormal_" + Method +
                              "_ico-5-fwd.fif")

            # morphing to fsaverage
            stc_fsaverage_cond1 = mne.morph_data(ListSubj[i],
                                                 'fsaverage',
                                                 stccond1,
                                                 smooth=20)
            stc_fsaverage_cond1.save(wdir + ListSubj[i] + "/mne_python/STCS/" +
                                     modality + "_" + ListSubj[i] + "_" +
                                     condnames[c] + "_pick_oriNone_" + Method +
                                     "_ico-5-fwd-fsaverage.fif")

            stc_fsaverage_cond1norm = mne.morph_data(ListSubj[i],
                                                     'fsaverage',
                                                     stccond1norm,
                                                     smooth=20)
            stc_fsaverage_cond1norm.save(wdir + ListSubj[i] +
                                         "/mne_python/STCS/" + modality + "_" +
                                         ListSubj[i] + "_" + condnames[c] +
                                         "_pick_orinormal_" + Method +
                                         "_ico-5-fwd-fsaverage.fif")
예제 #56
0
base_dir = "C:/Users/kimca/Documents/MEG_analyses/NEMO/"
proc_dir = base_dir + "proc/"
subjs = [
    "nc_NEM_10", "nc_NEM_11", "nc_NEM_12", "nc_NEM_14", "nc_NEM_15",
    "nc_NEM_16", "nc_NEM_17", "nc_NEM_18", "nc_NEM_19", "nc_NEM_20",
    "nc_NEM_21", "nc_NEM_22", "nc_NEM_23", "nc_NEM_24", "nc_NEM_26",
    "nc_NEM_27", "nc_NEM_28", "nc_NEM_29", "nc_NEM_30", "nc_NEM_31",
    "nc_NEM_32", "nc_NEM_33", "nc_NEM_34", "nc_NEM_35", "nc_NEM_36",
    "nc_NEM_37"
]
subjs = ["nc_NEM_23"]

for subj in subjs:
    #load resting state measurement of a subjects
    rest = mne.read_epochs(proc_dir + subj + "_1_ica-epo.fif")
    # produce a layout that we use for sensor plotting; same for all conditions
    layout = mne.find_layout(rest.info)
    mag_names = [rest.ch_names[p] for p in mne.pick_types(rest.info, meg=True)]
    layout.names = mag_names
    #load tone baseline (run 2) of a subject
    tonbas = mne.read_epochs(proc_dir + subj + "_2_ica-epo.fif")
    #load run 3 and 4 of a subject
    epo_a = mne.read_epochs(proc_dir + subj + "_3_ica-epo.fif")
    epo_b = mne.read_epochs(proc_dir + subj + "_4_ica-epo.fif")
    #interpolate bad n_channels
    rest.interpolate_bads()
    tonbas.interpolate_bads()
    epo_a.interpolate_bads()
    epo_b.interpolate_bads()
    #override coil positions of block b with those of block a for concatenation (sensor level only!!)
예제 #57
0
from my_settings import (epochs_folder, tf_folder, subjects_dir, mne_folder)
import mne
import sys
import numpy as np
import pandas as pd

from mne.minimum_norm import read_inverse_operator, source_induced_power

subject = sys.argv[1]

epochs = mne.read_epochs(epochs_folder + "%s_target-epo.fif" % subject)

inv = read_inverse_operator(mne_folder + "%s-inv.fif" % subject)

labels = mne.read_labels_from_annot(
    subject,
    parc='PALS_B12_Lobes',
    # regexp="Bro",
    subjects_dir=subjects_dir)
labels_selc = labels[9], labels[10]

frequencies = np.arange(8, 13, 1)  # define frequencies of interest
n_cycles = frequencies / 3.  # different number of cycle per frequency
method = "dSPM"

sides = ["left", "right"]
conditions = ["ctl", "ent"]
cor = ["correct", "incorrect"]
phase = ["in_phase", "out_phase"]
congrunet = ["cong", "incong"]
예제 #58
0
import matplotlib.pyplot as plt
import scipy
import os
path="C:\\Users\\kathr\\OneDrive\\Documents\\GitHub\\Bachelor-Project"
os.chdir(path)

# %% Cluster permutation test - define neighbors for pairs of electrodes
# To use the MNE function I need each group data in the format 3D array (n_obs, n_freq, n_connectivity_combination)
# And the 2 groups should be as elements in a list
# I should test each connectivity measurement separately

# First I have to define the "neighbors" for the connectivity combinations
# Definition: For the connection ch1 - ch2
# All spatial neighbors to ch1 and all spatial neighbors to ch2 and their connections are also considered neighbors!

epochs_a_s = mne.read_epochs('epochs_a_short_10.fif')

connectivity, ch_names = mne.channels.find_ch_adjacency(epochs_a_s[0].info, ch_type="eeg")
n_channels = len(ch_names)
ch_names = np.array(ch_names)

ch_names_new = []
connectivity_names_new = []
for i in range(n_channels):
    for j in range(n_channels):
        # Avoid connections between same electrode
        # Connectivity is symmetric, thus I will also avoid repeats, e.g. Fp1-Fp2, Fp2-Fp1
        if i >= j:
            continue
        else:
            # Get the label for the connection
예제 #59
0
# spacing between channnels (DC offset)
scale = 1e-5

# loop through the participants
for p, pid in enumerate(pids):
    # loop through the blocks
    for b, block in enumerate(blocks):
        # loop through the conditions
        for c, condition in enumerate(conditions):

            # construct the filename
            dfile = "./EPODATA/%d_%s_%s-epo.fif" % (pid, block, condition)

            # read data into mne
            epo = mne.read_epochs(dfile, proj=False, add_eeg_ref=False)

            # get data
            epo_data = epo.get_data()
            t = epo.times

            # loop through the trials
            for m in range(n_trials):

                # get the trial data
                trial_data = epo_data[m, :72]

                for ch in range(72):
                    # compute range
                    rng = trial_data[ch].max() - trial_data[ch].min()
                    fout.write("%d %s %s %d %d %.4e\n" % (pid, block, condition, m, ch, rng))
예제 #60
0
def Extract_data_multisubject(root_dir,N_S_list, datatype='EEG'):
    """
    Load all blocks for a list of subject and stack the results in X
    """
    import mne
    import numpy as np
    import gc
        
    N_B_arr = [1,2,3]
    tmp_list_X = []
    tmp_list_Y = []
    rows = []
    total_elem = len(N_S_list)*3 # assume 3 sessions per subject
    S = 0
    for N_S in N_S_list:
        print("Iteration ", S)
        print("Subject ", N_S)
        for N_B in N_B_arr:
    
            # name correction if N_Subj is less than 10
            if N_S<10:
                Num_s='sub-0'+str(N_S)
            else:
                Num_s='sub-'+str(N_S)

            base_file_name = root_dir + '/derivatives/' + Num_s + '/ses-0'+ str(N_B) + '/' +Num_s+'_ses-0'+str(N_B) 
            events_file_name = base_file_name+'_events.dat'
            data_tmp_Y = np.load(events_file_name,allow_pickle=True)
            tmp_list_Y.append(data_tmp_Y)
            print("Inner iteration " , N_B)
            if datatype=="EEG" or datatype=="eeg":
                # load data and events
                eeg_file_name = base_file_name+'_eeg-epo.fif'
                data_tmp_X = mne.read_epochs(eeg_file_name,verbose='WARNING')._data
                rows.append(data_tmp_X.shape[0])
                if S == 0 and N_B == 1: # assume same number of channels, time steps, and column labels in every subject and session
                  chann=data_tmp_X.shape[1]
                  steps=data_tmp_X.shape[2]
                  columns=data_tmp_Y.shape[1]
                tmp_list_X.append(data_tmp_X)

            elif datatype=="EXG" or datatype=="exg":
                exg_file_name = base_file_name+'_exg-epo.fif'
                data_tmp_X = mne.read_epochs(exg_file_name,verbose='WARNING')._data
                rows.append(data_tmp_X.shape[0])
                if S == 0 and N_B == 1:
                  chann=data_tmp_X.shape[1]
                  steps=data_tmp_X.shape[2]
                  columns=data_tmp_Y.shape[1]
                tmp_list_X.append(data_tmp_X)
            
            elif datatype=="Baseline" or datatype=="baseline":
                baseline_file_name = base_file_name+'_baseline-epo.fif'
                data_tmp_X = mne.read_epochs(baseline_file_name,verbose='WARNING')._data
                rows.append(data_tmp_X.shape[0])
                if S == 0 and N_B == 1:
                  chann=data_tmp_X.shape[1]
                  steps=data_tmp_X.shape[2]
                  columns=data_tmp_Y.shape[1]
                tmp_list_X.append(data_tmp_X)
    
            else:
                print("Invalid Datatype")
                return None, None
        
        S += 1

    X = np.empty((sum(rows), chann, steps))
    Y = np.empty((sum(rows), columns))
    offset = 0
    offset_Y = 0
    # put elements of list into numpy array
    for i in range(total_elem):
      print("Saving element {} into array ".format(i))
      X[offset:offset+rows[i],:,:] = tmp_list_X[0]
      if datatype=="EEG" or datatype=="eeg" or datatype=="EXG" or datatype=="exg":
        Y[offset:offset+rows[i],:] = tmp_list_Y[0] # only build Y for the datatypes that uses it
      offset+=rows[i]
      del tmp_list_X[0]
      del tmp_list_Y[0]
      gc.collect()
    print("X shape", X.shape)
    print("Y shape", Y.shape)

    if datatype=="EEG" or datatype=="eeg" or datatype=="EXG" or datatype=="exg":
      # for eeg and exg types, there is a predefined label that is returned
      return X,Y
    else:
      # for baseline datatypes, there's no such label (rest phase)
      return X