コード例 #1
0
ファイル: test_event.py プロジェクト: The3DWizard/mne-python
def test_make_fixed_length_events():
    """Test making events of a fixed length
    """
    raw = io.Raw(raw_fname)
    events = make_fixed_length_events(raw, id=1)
    assert_true(events.shape[1], 3)
    tmin, tmax = raw.times[[0, -1]]
    duration = tmax - tmin
    events = make_fixed_length_events(raw, 1, tmin, tmax, duration)
    assert_equal(events.shape[0], 1)
コード例 #2
0
ファイル: test_event.py プロジェクト: kambysese/mne-python
def test_make_fixed_length_events():
    """Test making events of a fixed length."""
    raw = read_raw_fif(raw_fname)
    events = make_fixed_length_events(raw, id=1)
    assert events.shape[1] == 3
    events_zero = make_fixed_length_events(raw, 1, first_samp=False)
    assert_equal(events_zero[0, 0], 0)
    assert_array_equal(events_zero[:, 0], events[:, 0] - raw.first_samp)
    # With limits
    tmin, tmax = raw.times[[0, -1]]
    duration = tmax - tmin
    events = make_fixed_length_events(raw, 1, tmin, tmax, duration)
    assert_equal(events.shape[0], 1)
    # With bad limits (no resulting events)
    pytest.raises(ValueError, make_fixed_length_events, raw, 1,
                  tmin, tmax - 1e-3, duration)
    # not raw, bad id or duration
    pytest.raises(TypeError, make_fixed_length_events, raw, 2.3)
    pytest.raises(TypeError, make_fixed_length_events, 'not raw', 2)
    pytest.raises(TypeError, make_fixed_length_events, raw, 23, tmin, tmax,
                  'abc')

    # Let's try some ugly sample rate/sample count combos
    data = np.random.RandomState(0).randn(1, 27768)

    # This breaks unless np.round() is used in make_fixed_length_events
    info = create_info(1, 155.4499969482422)
    raw = RawArray(data, info)
    events = make_fixed_length_events(raw, 1, duration=raw.times[-1])
    assert events[0, 0] == 0
    assert len(events) == 1

    # Without use_rounding=True this breaks
    raw = RawArray(data[:, :21216], info)
    events = make_fixed_length_events(raw, 1, duration=raw.times[-1])
    assert events[0, 0] == 0
    assert len(events) == 1

    # Make sure it gets used properly by compute_raw_covariance
    cov = compute_raw_covariance(raw, tstep=None)
    expected = np.cov(data[:, :21216])
    np.testing.assert_allclose(cov['data'], expected, atol=1e-12)

    # overlaps
    events = make_fixed_length_events(raw, 1, duration=1)
    assert len(events) == 136
    events_ol = make_fixed_length_events(raw, 1, duration=1, overlap=0.5)
    assert len(events_ol) == 271
    events_ol_2 = make_fixed_length_events(raw, 1, duration=1, overlap=0.9)
    assert len(events_ol_2) == 1355
    assert_array_equal(events_ol_2[:, 0], np.unique(events_ol_2[:, 0]))
    with pytest.raises(ValueError, match='overlap must be'):
        make_fixed_length_events(raw, 1, duration=1, overlap=1.1)
コード例 #3
0
ファイル: test_event.py プロジェクト: YashAgarwal/mne-python
def test_make_fixed_length_events():
    """Test making events of a fixed length"""
    raw = io.read_raw_fif(raw_fname)
    events = make_fixed_length_events(raw, id=1)
    assert_true(events.shape[1], 3)
    events_zero = make_fixed_length_events(raw, 1, first_samp=False)
    assert_equal(events_zero[0, 0], 0)
    assert_array_equal(events_zero[:, 0], events[:, 0] - raw.first_samp)
    # With limits
    tmin, tmax = raw.times[[0, -1]]
    duration = tmax - tmin
    events = make_fixed_length_events(raw, 1, tmin, tmax, duration)
    assert_equal(events.shape[0], 1)
    # With bad limits (no resulting events)
    assert_raises(ValueError, make_fixed_length_events, raw, 1,
                  tmin, tmax - 1e-3, duration)
コード例 #4
0
ファイル: test_cov.py プロジェクト: jhouck/mne-python
def test_cov_ctf():
    """Test basic cov computation on ctf data with/without compensation."""
    raw = read_raw_ctf(ctf_fname).crop(0., 2.).load_data()
    events = make_fixed_length_events(raw, 99999)
    assert len(events) == 2
    ch_names = [raw.info['ch_names'][pick]
                for pick in pick_types(raw.info, meg=True, eeg=False,
                                       ref_meg=False)]

    for comp in [0, 1]:
        raw.apply_gradient_compensation(comp)
        epochs = Epochs(raw, events, None, -0.2, 0.2, preload=True)
        with pytest.warns(RuntimeWarning, match='Too few samples'):
            noise_cov = compute_covariance(epochs, tmax=0.,
                                           method=['empirical'])
        prepare_noise_cov(noise_cov, raw.info, ch_names)

    raw.apply_gradient_compensation(0)
    epochs = Epochs(raw, events, None, -0.2, 0.2, preload=True)
    with pytest.warns(RuntimeWarning, match='Too few samples'):
        noise_cov = compute_covariance(epochs, tmax=0., method=['empirical'])
    raw.apply_gradient_compensation(1)

    # TODO This next call in principle should fail.
    prepare_noise_cov(noise_cov, raw.info, ch_names)

    # make sure comps matrices was not removed from raw
    assert raw.info['comps'], 'Comps matrices removed'
コード例 #5
0
ファイル: test_cov.py プロジェクト: Vincent-wq/mne-python
def raw_epochs_events():
    """Create raw, epochs, and events for tests."""
    raw = read_raw_fif(raw_fname).set_eeg_reference(projection=True).crop(0, 3)
    raw = maxwell_filter(raw, regularize=None)  # heavily reduce the rank
    assert raw.info['bads'] == []  # no bads
    events = make_fixed_length_events(raw)
    epochs = Epochs(raw, events, tmin=-0.2, tmax=0, preload=True)
    return (raw, epochs, events)
コード例 #6
0
def test_tfr_ctf():
    """Test that TFRs can be calculated on CTF data."""
    raw = read_raw_fif(raw_ctf_fname).crop(0, 1)
    raw.apply_gradient_compensation(3)
    events = mne.make_fixed_length_events(raw, duration=0.5)
    epochs = mne.Epochs(raw, events)
    for method in (tfr_multitaper, tfr_morlet):
        method(epochs, [10], 1)  # smoke test
コード例 #7
0
def test_stockwell_ctf():
    """Test that Stockwell can be calculated on CTF data."""
    raw = read_raw_fif(raw_ctf_fname)
    raw.apply_gradient_compensation(3)
    events = make_fixed_length_events(raw, duration=0.5)
    evoked = Epochs(raw, events, tmin=-0.2, tmax=0.3, decim=10,
                    preload=True, verbose='error').average()
    tfr_stockwell(evoked, verbose='error')  # smoke test
コード例 #8
0
ファイル: test_cov.py プロジェクト: Eric89GXL/mne-python
def raw_epochs_events():
    """Create raw, epochs, and events for tests."""
    raw = read_raw_fif(raw_fname).set_eeg_reference(projection=True).crop(0, 3)
    raw = maxwell_filter(raw, regularize=None)  # heavily reduce the rank
    assert raw.info['bads'] == []  # no bads
    events = make_fixed_length_events(raw)
    epochs = Epochs(raw, events, tmin=-0.2, tmax=0, preload=True)
    return (raw, epochs, events)
コード例 #9
0
ファイル: test_tfr.py プロジェクト: kambysese/mne-python
def test_tfr_ctf():
    """Test that TFRs can be calculated on CTF data."""
    raw = read_raw_fif(raw_ctf_fname).crop(0, 1)
    raw.apply_gradient_compensation(3)
    events = mne.make_fixed_length_events(raw, duration=0.5)
    epochs = mne.Epochs(raw, events)
    for method in (tfr_multitaper, tfr_morlet):
        method(epochs, [10], 1)  # smoke test
コード例 #10
0
def test_stockwell_ctf():
    """Test that Stockwell can be calculated on CTF data."""
    raw = read_raw_fif(raw_ctf_fname)
    raw.apply_gradient_compensation(3)
    events = make_fixed_length_events(raw, duration=0.5)
    evoked = Epochs(raw, events, tmin=-0.2, tmax=0.3, decim=10,
                    preload=True, verbose='error').average()
    tfr_stockwell(evoked, verbose='error')  # smoke test
コード例 #11
0
def import_ECoG_rps(
    datadir,
    filename,
    finger,
    duration,
    overlap,
    normalize_input=True,
    y_measure="mean",
):
    # TODO add finger choice dict
    path = "".join([datadir, filename])
    if os.path.exists(path):
        dataset = sio.loadmat(path)
        X = dataset["train_data"].astype(np.float).T
        assert (finger >= 0 and
                finger < 5), "Finger input not valid, range value from 0 to 4."
        y = dataset["train_dg"][:, finger]  #

        raw = create_raw(X, y, X.shape[0], sampling_rate=1000)

        # Generate fixed length events.
        events = mne.make_fixed_length_events(raw,
                                              duration=duration,
                                              overlap=overlap)
        # Notch filter out some specific noisy bands
        raw.notch_filter([50, 100])
        # Band pass the input data
        raw.filter(l_freq=1.0, h_freq=70)

        epochs = mne.Epochs(raw,
                            events,
                            tmin=0.0,
                            tmax=duration,
                            baseline=(0, 0),
                            decim=2)

        X = epochs.get_data()[:, :-1, :]
        y = epochs.get_data()[:, -1, :]

        bands = [(1, 4), (4, 8), (8, 10), (10, 13), (13, 30), (30, 70)]
        bp = bandpower_multi(X,
                             fs=epochs.info["sfreq"],
                             bands=bands,
                             relative=True)

        # Normalize data
        if normalize_input:
            X = standard_scaling(X, scalings="mean", log=False)

        # Pick the y vales per each hand
        y = y_reshape(np.expand_dims(y, axis=1), measure=y_measure)

        print(
            "The input data are of shape: {}, the corresponding y shape (filtered to 1 finger) is: {}"
            .format(X.shape, y.shape))
        return X, y, bp
    else:
        print("No such file '{}'".format(path), file=sys.stderr)
コード例 #12
0
ファイル: power_features.py プロジェクト: apmellot/meegpowreg
def compute_features(raw,
                     duration=60.,
                     shift=10.,
                     n_fft=512,
                     n_overlap=256,
                     fs=63.0,
                     fmin=0,
                     fmax=30,
                     fbands={'alpha': (8.0, 12.0)},
                     clean_func=lambda x: x,
                     n_jobs=1):

    events = mne.make_fixed_length_events(raw,
                                          id=3000,
                                          start=0,
                                          duration=shift,
                                          stop=raw.times[-1] - duration)
    epochs = mne.Epochs(raw,
                        events,
                        event_id=3000,
                        tmin=0,
                        tmax=duration,
                        proj=True,
                        baseline=None,
                        reject=None,
                        preload=True,
                        decim=1)
    epochs_clean = clean_func(epochs)
    clean_events = events[epochs_clean.selection]

    psds_clean, freqs = mne.time_frequency.psd_welch(epochs_clean,
                                                     fmin=fmin,
                                                     fmax=fmax,
                                                     n_fft=n_fft,
                                                     n_overlap=n_overlap,
                                                     average='mean',
                                                     picks=None)
    psds = trim_mean(psds_clean, 0.25, axis=0)

    covs = _compute_covs(raw, clean_events, fbands, duration)
    xfreqcovs, xfreqcorrs = _compute_xfreq_covs(epochs_clean, fbands)
    cospcovs = _compute_cosp_covs(epochs_clean, n_fft, n_overlap, fmin, fmax,
                                  fs)

    features = {
        'psds': psds,
        'freqs': freqs,
        'covs': covs,
        'xfreqcovs': xfreqcovs,
        'xfreqcorrs': xfreqcorrs,
        'cospcovs': cospcovs
    }

    res = dict(n_epochs=len(events),
               n_good_epochs=len(epochs),
               n_clean_epochs=len(epochs_clean))
    return features, res
コード例 #13
0
ファイル: test_topomap.py プロジェクト: shrahimim/mne-python
def test_ctf_plotting():
    """Test CTF topomap plotting."""
    raw = read_raw_fif(ctf_fname, preload=True)
    events = make_fixed_length_events(raw, duration=0.01)
    assert len(events) > 10
    evoked = Epochs(raw, events, tmin=0, tmax=0.01, baseline=None).average()
    assert get_current_comp(evoked.info) == 3
    # smoke test that compensation does not matter
    evoked.plot_topomap(time_unit='s')
コード例 #14
0
def epoch_condition(raw, cat='Rest', tmin=-0.5, tmax=1.75):
    """
    Epoch condition specific raw object (raw can be hfb or lfp)
    """
    condition = VisualClassifier()
    if cat == 'Rest':
        events_1 = mne.make_fixed_length_events(raw,
                                                id=32,
                                                start=100,
                                                stop=156,
                                                duration=2,
                                                first_samp=False,
                                                overlap=0.0)
        events_2 = mne.make_fixed_length_events(raw,
                                                id=32,
                                                start=300,
                                                stop=356,
                                                duration=2,
                                                first_samp=False,
                                                overlap=0.0)

        events = np.concatenate((events_1, events_2))
        rest_id = {'Rest': 32}
        # epoch
        epochs = mne.Epochs(raw,
                            events,
                            event_id=rest_id,
                            tmin=tmin,
                            tmax=tmax,
                            baseline=None,
                            preload=True)
    else:
        stim_events, stim_events_id = mne.events_from_annotations(raw)
        condition_id = condition.extract_stim_id(stim_events_id, cat=cat)
        epochs = mne.Epochs(raw,
                            stim_events,
                            event_id=stim_events_id,
                            tmin=tmin,
                            tmax=tmax,
                            baseline=None,
                            preload=True)
        epochs = epochs[condition_id]
        events = epochs.events
    return epochs, events
コード例 #15
0
def create_epochs(raw_data, duration=1):
    """
    Chops the RawArray onto Epochs given the time duration of every epoch
    :param raw_data: mne.io.RawArray instance
    :param duration: seconds for copping
    :return: mne Epochs class
    """
    events = mne.make_fixed_length_events(raw_data, duration=duration)
    epochs = mne.Epochs(raw_data, events, preload=True)
    return epochs
コード例 #16
0
def test_field_map_ctf():
    """Test that field mapping can be done with CTF data."""
    raw = read_raw_fif(raw_ctf_fname).crop(0, 1)
    raw.apply_gradient_compensation(3)
    events = make_fixed_length_events(raw, duration=0.5)
    evoked = Epochs(raw, events).average()
    evoked.pick_channels(evoked.ch_names[:50])  # crappy mapping but faster
    # smoke test
    make_field_map(evoked, trans=trans_fname, subject='sample',
                   subjects_dir=subjects_dir)
コード例 #17
0
ファイル: test_event.py プロジェクト: neurofractal/mne-python
def test_make_fixed_length_events():
    """Test making events of a fixed length."""
    raw = read_raw_fif(raw_fname)
    events = make_fixed_length_events(raw, id=1)
    assert_true(events.shape[1], 3)
    events_zero = make_fixed_length_events(raw, 1, first_samp=False)
    assert_equal(events_zero[0, 0], 0)
    assert_array_equal(events_zero[:, 0], events[:, 0] - raw.first_samp)
    # With limits
    tmin, tmax = raw.times[[0, -1]]
    duration = tmax - tmin
    events = make_fixed_length_events(raw, 1, tmin, tmax, duration)
    assert_equal(events.shape[0], 1)
    # With bad limits (no resulting events)
    assert_raises(ValueError, make_fixed_length_events, raw, 1, tmin,
                  tmax - 1e-3, duration)
    # not raw, bad id or duration
    assert_raises(TypeError, make_fixed_length_events, raw, 2.3)
    assert_raises(TypeError, make_fixed_length_events, 'not raw', 2)
    assert_raises(TypeError, make_fixed_length_events, raw, 23, tmin, tmax,
                  'abc')

    # Let's try some ugly sample rate/sample count combos
    data = np.random.RandomState(0).randn(1, 27768)

    # This breaks unless np.round() is used in make_fixed_length_events
    info = create_info(1, 155.4499969482422)
    raw = RawArray(data, info)
    events = make_fixed_length_events(raw, 1, duration=raw.times[-1])
    assert events[0, 0] == 0
    assert len(events) == 1

    # Without use_rounding=True this breaks
    raw = RawArray(data[:, :21216], info)
    events = make_fixed_length_events(raw, 1, duration=raw.times[-1])
    assert events[0, 0] == 0
    assert len(events) == 1

    # Make sure it gets used properly by compute_raw_covariance
    cov = compute_raw_covariance(raw, tstep=None)
    expected = np.cov(data[:, :21216])
    np.testing.assert_allclose(cov['data'], expected, atol=1e-12)
コード例 #18
0
def create_cognitive_reduce_features(data_path):
    preprocess = 'raw'
    for f in data_path.rglob("*{:}.edf".format(preprocess)):
        print(f.name)
        task = re.findall('(?<=_S[0-9]{2}_T[0-9]{2}_).+(?=_raw\.edf)',
                          f.name)[0]
        uid = re.findall('.+(?=_S[0-9]+_T[0-9]+_)', f.name)[0]
        # Read eeg file
        raw = mne.io.read_raw_edf(f)

        # Rename Channel
        mne.rename_channels(raw.info, renameChannels)
        # Set montage (3d electrode location)
        raw = raw.set_montage('standard_1020')
        raw = raw.pick(EEG_channels)
        raw.crop(tmin=(raw.times[-1] - 120))

        # Create events every 20 seconds
        reject_criteria = dict(eeg=160e-6, )  # 100 µV
        events_array = mne.make_fixed_length_events(raw,
                                                    start=0.5,
                                                    stop=None,
                                                    duration=0.5)
        epochs = mne.Epochs(raw,
                            events_array,
                            tmin=-0.5,
                            tmax=0.5,
                            reject=reject_criteria,
                            preload=True)
        epochs.drop_bad()
        print(epochs.get_data().shape)
        # epochs.plot_drop_log(show=True, subject=uid)

        frontal_epochs = epochs.copy().pick(frontal_ch)
        parietal_epochs = epochs.copy().pick(parietal_ch)

        frontal_bandpower = calculate_features(frontal_epochs)
        frontal_bandpower['area'] = 'frontal'
        parietal_bandpower = calculate_features(parietal_epochs)
        parietal_bandpower['area'] = 'parietal'

        final_feat = pd.concat((frontal_bandpower, parietal_bandpower))
        final_feat['condition'] = task
        final_feat['user'] = uid
        final_feat.reset_index(drop=True)

        final_feat.to_csv(f.parent / 'eeg_features.csv')

        ratio = pd.DataFrame(frontal_bandpower['Theta'] /
                             parietal_bandpower['Alpha'],
                             columns=['ratio'])
        ratio['condition'] = task
        ratio['user'] = uid
        ratio.to_csv(f.parent / 'thetaf-alphap.csv')
コード例 #19
0
def test_make_fixed_length_events():
    """Test making events of a fixed length."""
    raw = read_raw_fif(raw_fname, add_eeg_ref=False)
    events = make_fixed_length_events(raw, id=1)
    assert_true(events.shape[1], 3)
    events_zero = make_fixed_length_events(raw, 1, first_samp=False)
    assert_equal(events_zero[0, 0], 0)
    assert_array_equal(events_zero[:, 0], events[:, 0] - raw.first_samp)
    # With limits
    tmin, tmax = raw.times[[0, -1]]
    duration = tmax - tmin
    events = make_fixed_length_events(raw, 1, tmin, tmax, duration)
    assert_equal(events.shape[0], 1)
    # With bad limits (no resulting events)
    assert_raises(ValueError, make_fixed_length_events, raw, 1, tmin,
                  tmax - 1e-3, duration)
    # not raw, bad id or duration
    assert_raises(ValueError, make_fixed_length_events, raw, 2.3)
    assert_raises(ValueError, make_fixed_length_events, 'not raw', 2)
    assert_raises(ValueError, make_fixed_length_events, raw, 23, tmin, tmax,
                  'abc')
コード例 #20
0
def transform_raw_data(subject='chb01', annotations_dict=annotations_dict):
    import mne
    from mne.channels.montage import get_builtin_montages
    
    for rec in records_loc_lst:
        #for all records matching the subject read them into an mne raw object and set the channels

        if rec[0:5] == subject:
                data_path = 'chb-mit-scalp-eeg-database-1.0.0/'
                fname = data_path + rec
                data = mne.io.read_raw_edf(fname, preload=True)
                data = data.filter(1., 40., fir_design='firwin', n_jobs=1)
                info = data.info
                info['ch_names'] = ['AF7', 'FT7', 'TP7', 'PO7', 'AF3', 'FC3', 'CP3', 'PO3', 'AF4', 'FC4', 'CP4', 'PO4', 'AF8', 'FT8', 'TP8', 'PO8', 'FCz', 'CPz', 'T7', 'FT9', 'FT10', 'T8', 'TP8']
                
                for i in range(len(info['chs'])):
                    info['chs'][i]['ch_name'] = info['ch_names'][i]
                    
                montage = mne.channels.read_montage("standard_1020")
                data.set_montage(montage)    

                #if rec in annotations_dict set the annotations for the seizures in the raw object
                    
                rec_name = rec.split('/')[1].split('.')[0]
                if rec_name in annotations_dict:
                    anno = annotations_dictionary[rec_name]
                    data.set_annotations(anno)
            
                    event_id = dict(Seizure=1, Nonseizure=0)
                    events_from_annot, event_dict = mne.events_from_annotations(data, chunk_duration=1)

                 #if rec not in annotations_dict set the annotations as all nonseizure in the raw object    
                    
                if rec_name not in annotations_dictionary:
                    events = mne.make_fixed_length_events(data, id=0, start=0, stop=None, duration=1.0, first_samp=True, overlap=0.0)
                    
                    
                    mapping = {0: 'Nonseizure', 1: 'Seizure'}
                    onsets = events[:, 0] / data.info['sfreq']
                    durations = np.ones_like(onsets)  # assumes instantaneous events
                    descriptions = [mapping[event_id] for event_id in events[:, 2]]
                    annot_from_events = mne.Annotations(onset=onsets, duration=durations,
                                                        description=descriptions)
                    data.set_annotations(annot_from_events)    
                    
                
            #save raw files with annotations set as fif files  
                
            fif_lst = []
            fif = '_raw.fif'
            file_nm = rec_name + fif
            fif_lst.append(file_nm)
            data.save(file_nm, picks='all', fmt='single')
コード例 #21
0
ファイル: test_dipole.py プロジェクト: mvdoc/mne-python
def test_dipole_fitting_ctf():
    """Test dipole fitting with CTF data."""
    raw_ctf = read_raw_ctf(fname_ctf).set_eeg_reference()
    events = make_fixed_length_events(raw_ctf, 1)
    evoked = Epochs(raw_ctf, events, 1, 0, 0, baseline=None).average()
    cov = make_ad_hoc_cov(evoked.info)
    sphere = make_sphere_model((0., 0., 0.))
    # XXX Eventually we should do some better checks about accuracy, but
    # for now our CTF phantom fitting tutorials will have to do
    # (otherwise we need to add that to the testing dataset, which is
    # a bit too big)
    fit_dipole(evoked, cov, sphere)
コード例 #22
0
def test_field_map_ctf():
    """Test that field mapping can be done with CTF data."""
    raw = read_raw_fif(raw_ctf_fname).crop(0, 1)
    raw.apply_gradient_compensation(3)
    events = make_fixed_length_events(raw, duration=0.5)
    evoked = Epochs(raw, events).average()
    evoked.pick_channels(evoked.ch_names[:50])  # crappy mapping but faster
    # smoke test
    make_field_map(evoked,
                   trans=trans_fname,
                   subject='sample',
                   subjects_dir=subjects_dir)
コード例 #23
0
def get_emg_epoch(subject, raw_emg, time, config):
    """Create the epoch data from raw data.

    Parameter
    ----------
    subject : string
        String of subject ID e.g. 7707
    raw_emg : mne raw object
        data structure of raw_emg
    time : list
        A list with start and end time
    config : yaml
        The configuration file

    Returns
    ----------
    mne epoch data
        A data (dict) of all the subjects with different conditions

    """
    # Parameters
    epoch_length = config['epoch_length']
    overlap = config['overlap']

    if subject in config['subjects2']:
        raw_cropped = raw_emg.copy().resample(config['sfreq2'],
                                              npad='auto',
                                              verbose='error')
    else:
        raw_cropped = raw_emg.copy().crop(tmin=time[0], tmax=time[1])
        raw_cropped = raw_cropped.copy().resample(config['sfreq2'],
                                                  npad='auto',
                                                  verbose='error')

    if config['high_pass_filter']:
        raw_cropped = raw_cropped.filter(l_freq=config['filter_freq'][0],
                                         h_freq=None,
                                         picks=[
                                             'emg_1', 'emg_2', 'emg_3',
                                             'emg_4', 'emg_5', 'emg_6',
                                             'emg_7', 'emg_8'
                                         ])

    events = mne.make_fixed_length_events(raw_cropped,
                                          duration=epoch_length,
                                          overlap=epoch_length * overlap)
    epochs = mne.Epochs(raw_cropped,
                        events,
                        tmin=0,
                        tmax=config['epoch_length'],
                        baseline=None,
                        verbose=False)
    return epochs
コード例 #24
0
def test_dipole_fitting_ctf():
    """Test dipole fitting with CTF data."""
    raw_ctf = read_raw_ctf(fname_ctf).set_eeg_reference(projection=True)
    events = make_fixed_length_events(raw_ctf, 1)
    evoked = Epochs(raw_ctf, events, 1, 0, 0, baseline=None).average()
    cov = make_ad_hoc_cov(evoked.info)
    sphere = make_sphere_model((0., 0., 0.))
    # XXX Eventually we should do some better checks about accuracy, but
    # for now our CTF phantom fitting tutorials will have to do
    # (otherwise we need to add that to the testing dataset, which is
    # a bit too big)
    fit_dipole(evoked, cov, sphere)
コード例 #25
0
def test_ctf_plotting():
    """Test CTF topomap plotting."""
    raw = read_raw_fif(ctf_fname, preload=True)
    assert raw.compensation_grade == 3
    events = make_fixed_length_events(raw, duration=0.01)
    assert len(events) > 10
    evoked = Epochs(raw, events, tmin=0, tmax=0.01, baseline=None).average()
    assert get_current_comp(evoked.info) == 3
    # smoke test that compensation does not matter
    evoked.plot_topomap(time_unit='s')
    # better test that topomaps can still be used without plotting ref
    evoked.pick_types(meg=True, ref_meg=False)
    evoked.plot_topomap()
コード例 #26
0
def _get_global_reject_epochs(raw):
    duration = 3.
    events = mne.make_fixed_length_events(
        raw, id=3000, start=0, duration=duration)

    epochs = mne.Epochs(
        raw, events, event_id=3000, tmin=0, tmax=duration, proj=False,
        baseline=None, reject=None)
    epochs.apply_proj()
    epochs.load_data()
    epochs.pick_types(meg=True)
    reject = get_rejection_threshold(epochs, decim=8)
    return reject
コード例 #27
0
ファイル: test_topomap.py プロジェクト: Eric89GXL/mne-python
def test_ctf_plotting():
    """Test CTF topomap plotting."""
    raw = read_raw_fif(ctf_fname, preload=True)
    assert raw.compensation_grade == 3
    events = make_fixed_length_events(raw, duration=0.01)
    assert len(events) > 10
    evoked = Epochs(raw, events, tmin=0, tmax=0.01, baseline=None).average()
    assert get_current_comp(evoked.info) == 3
    # smoke test that compensation does not matter
    evoked.plot_topomap(time_unit='s')
    # better test that topomaps can still be used without plotting ref
    evoked.pick_types(meg=True, ref_meg=False)
    evoked.plot_topomap()
コード例 #28
0
ファイル: utils.py プロジェクト: matteoanelli/MEG-prediction
def create_epoch(
    X,
    sampling_rate,
    duration=4.0,
    overlap=0.0,
    ds_factor=1.0,
    verbose=None,
    baseline=None,
):
    # Create Basic info data
    # X.shape to be channel, n_sample
    n_channels = X.shape[0]
    raw = create_raw(X, n_channels, sampling_rate)

    # events = mne.make_fixed_length_events(raw, 1, duration=duration)
    # delta = 1. / raw.info['sfreq'] # TODO understand this delta
    # epochs = mne.Epochs(raw, events, event_id=[1], tmin=tmin,
    #               tmax=tmax - delta,
    #               verbose=verbose, baseline=baseline)

    events = mne.make_fixed_length_events(raw,
                                          1,
                                          duration=duration,
                                          overlap=overlap)
    delta = 1.0 / raw.info["sfreq"]

    if float(ds_factor) != 1.0:
        epochs = mne.Epochs(
            raw,
            events,
            event_id=[1],
            tmin=0.0,
            tmax=duration - delta,
            verbose=verbose,
            baseline=baseline,
            preload=True,
        )
        epochs = epochs.copy().resample(sampling_rate / ds_factor, npad="auto")
    else:
        epochs = mne.Epochs(
            raw,
            events,
            event_id=[1],
            tmin=0.0,
            tmax=duration - delta,
            verbose=verbose,
            baseline=baseline,
            preload=False,
        )

    return epochs
コード例 #29
0
def test_compute_features_epochs():
    raw = mne.io.read_raw_fif(raw_fname, verbose=False)
    raw = raw.copy().crop(0, 200).pick([0, 1, 330, 331,
                                        332]  # take some MEG and EEG
                                       )
    raw.info.normalize_proj()
    events = mne.make_fixed_length_events(raw,
                                          id=3000,
                                          start=0,
                                          duration=10.,
                                          stop=raw.times[-1] - 60.)
    epochs = mne.Epochs(raw,
                        events,
                        event_id=3000,
                        tmin=0,
                        tmax=60.,
                        proj=True,
                        baseline=None,
                        reject=None,
                        preload=True,
                        decim=1)
    computed_features, res = compute_features(epochs,
                                              features=[
                                                  'psds', 'covs',
                                                  'cross_frequency_covs',
                                                  'cross_frequency_corrs',
                                                  'cospectral_covs'
                                              ],
                                              frequency_bands=frequency_bands)
    n_channels = len(raw.ch_names)
    n_freqs = len(res['freqs'])
    n_fb = len(frequency_bands)
    assert set(computed_features.keys()) == {
        'psds', 'covs', 'cross_frequency_covs', 'cross_frequency_corrs',
        'cospectral_covs'
    }

    assert computed_features['psds'].shape == (n_channels, n_freqs)
    assert computed_features['covs'].shape == (n_fb, n_channels, n_channels)
    assert (computed_features['cross_frequency_covs'].shape == (n_fb *
                                                                n_channels,
                                                                n_fb *
                                                                n_channels))
    assert (computed_features['cross_frequency_corrs'].shape == (n_fb *
                                                                 n_channels,
                                                                 n_fb *
                                                                 n_channels))
    assert (computed_features['cospectral_covs'].shape[1:] == (n_channels,
                                                               n_channels))
コード例 #30
0
def create_epochs(raw: Raw, picks=None) -> Epochs:
    '''
    Create epochs from Raw instance with overlap between epochs.

    Args:
        Raw raw: Raw object containing EEGLAB .set data
        list picks: Subset of channel names to include in Epochs data,
        if None use all channels from Raw instance
    :return: Epochs epochs:  Epochs extracted from Raw instance

    See Also
    --------
    mne.io.Raw, mne.Epochs : Documentation of attribute and methods.
    '''
    def _check_epochs_are_overlapping():
        """
        Check that created epochs are overlapping and raises error if not.
        For speed concerns only checks overlap for one channel.
        """
        n_epochs = epochs.get_data().shape[0]
        overlap_data_points = int(overlap_in_seconds * raw.info['sfreq'])
        for epoch_num in range(n_epochs - 1):
            try:
                assert np.array_equal(
                    epochs.get_data()[epoch_num, 1, overlap_data_points:],
                    epochs.get_data()[epoch_num + 1, 1, :overlap_data_points])
            except AssertionError:
                logging.error('Epochs are not overlapping!')
                raise

    overlap_in_seconds = EPOCH_OVERLAP_RATIO * EPOCH_LENGTH_IN_SECONDS
    events = make_fixed_length_events(raw,
                                      id=1,
                                      first_samp=True,
                                      duration=EPOCH_LENGTH_IN_SECONDS,
                                      overlap=overlap_in_seconds)
    epochs = Epochs(raw=raw,
                    events=events,
                    picks=picks,
                    event_id=1,
                    baseline=None,
                    tmin=0.,
                    tmax=EPOCH_LENGTH_IN_SECONDS - 0.001,
                    preload=True)

    _check_epochs_are_overlapping()

    return epochs
コード例 #31
0
def _compute_cov(file_raw, duration):
    subject = pp.get_subject(file_raw)
    raw = mne.io.read_raw_fif(file_raw)
    raw.crop(tmax=duration * 60)
    rawc, reject = pp.clean_raw(raw, subject)

    events = mne.make_fixed_length_events(rawc,
                                          id=3000,
                                          start=0,
                                          duration=pp.duration)
    epochs = mne.Epochs(rawc,
                        events,
                        event_id=3000,
                        tmin=0,
                        tmax=pp.duration,
                        proj=True,
                        baseline=None,
                        reject=reject,
                        preload=False,
                        decim=1)
    epochs.drop_bad()
    clean_events = events[epochs.selection]

    picks = mne.pick_types(rawc.info, meg=True)
    covs = []
    for fb in pp.fbands:
        rf = rawc.copy().load_data().filter(fb[0], fb[1])
        ec = mne.Epochs(rf,
                        clean_events,
                        event_id=3000,
                        tmin=0,
                        tmax=pp.duration,
                        proj=True,
                        baseline=None,
                        reject=None,
                        preload=False,
                        decim=1,
                        picks=picks)
        cov = mne.compute_covariance(ec, method='oas', rank=None)
        covs.append(cov.data)
    out = dict(subject=subject,
               kind='rest',
               n_events=len(events),
               n_events_good=len(clean_events),
               covs=np.array(covs))
    return out
コード例 #32
0
def test_lcmv_ctf_comp():
    """Test interpolation with compensated CTF data."""
    ctf_dir = op.join(testing.data_path(download=False), 'CTF')
    raw_fname = op.join(ctf_dir, 'somMDYO-18av.ds')
    raw = mne.io.read_raw_ctf(raw_fname, preload=True)

    events = mne.make_fixed_length_events(raw, duration=0.2)[:2]
    epochs = mne.Epochs(raw, events, tmin=0., tmax=0.2)
    evoked = epochs.average()

    with warnings.catch_warnings(record=True):
        data_cov = mne.compute_covariance(epochs)
    fwd = mne.make_forward_solution(evoked.info, None,
                                    mne.setup_volume_source_space(pos=15.0),
                                    mne.make_sphere_model())
    filters = mne.beamformer.make_lcmv(evoked.info, fwd, data_cov)
    assert 'weights' in filters
コード例 #33
0
def read_xdf_eeg_data(config, subject):
    read_path = config['raw_eeg_path'] + 'S_' + subject + '/eeg.xdf'
    raw = read_raw_xdf(read_path)
    raw = raw.drop_channels(
        ['ACC30', 'ACC31', 'ACC32', 'Packet Counter', 'TRIGGER'])
    raw.set_montage(montage="standard_1020", set_dig=True, verbose=False)
    ch_names = [
        'Fp1', 'Fp2', 'AF3', 'AF4', 'F7', 'F8', 'F3', 'Fz', 'F4', 'FC5', 'FC6',
        'T7', 'T8', 'C3', 'Cz', 'C4', 'CP5', 'CP6', 'P7', 'P8', 'P3', 'Pz',
        'P4', 'PO7', 'PO8', 'PO3', 'PO4', 'O1', 'O2', 'A2'
    ]
    epoch_length = config['epoch_length']
    events = mne.make_fixed_length_events(raw, duration=epoch_length)
    epochs = mne.Epochs(raw, events, picks=ch_names, verbose=False)
    animate(epochs, config)

    return raw
コード例 #34
0
def mne_epoch(raw,
              tmax=3.,
              tmin=None,
              events=None,
              event_id='rest',
              reject_by_annotation=True):
    if events is None:
        events = mne.make_fixed_length_events(raw, duration=tmax)
        event_id = {'rest': 1}
        tmin = 0
    epochs = mne.Epochs(raw,
                        events=events,
                        event_id=event_id,
                        tmin=tmin,
                        tmax=tmax,
                        baseline=None,
                        reject_by_annotation=reject_by_annotation)
    return epochs
コード例 #35
0
ファイル: test_lcmv.py プロジェクト: SherazKhan/mne-python
def test_lcmv_ctf_comp():
    """Test interpolation with compensated CTF data."""
    ctf_dir = op.join(testing.data_path(download=False), 'CTF')
    raw_fname = op.join(ctf_dir, 'somMDYO-18av.ds')
    raw = mne.io.read_raw_ctf(raw_fname, preload=True)

    events = mne.make_fixed_length_events(raw, duration=0.2)[:2]
    epochs = mne.Epochs(raw, events, tmin=0., tmax=0.2)
    evoked = epochs.average()

    with pytest.warns(RuntimeWarning,
                      match='Too few samples .* estimate may be unreliable'):
        data_cov = mne.compute_covariance(epochs)
    fwd = mne.make_forward_solution(evoked.info, None,
                                    mne.setup_volume_source_space(pos=15.0),
                                    mne.make_sphere_model())
    filters = mne.beamformer.make_lcmv(evoked.info, fwd, data_cov)
    assert 'weights' in filters
コード例 #36
0
def test_plot_evoked_cov():
    """Test plot_evoked with noise_cov."""
    evoked = _get_epochs().average()
    cov = read_cov(cov_fname)
    cov['projs'] = []  # avoid warnings
    evoked.plot(noise_cov=cov, time_unit='s')
    with pytest.raises(TypeError, match='Covariance'):
        evoked.plot(noise_cov=1., time_unit='s')
    with pytest.raises(IOError, match='No such file'):
        evoked.plot(noise_cov='nonexistent-cov.fif', time_unit='s')
    raw = read_raw_fif(raw_sss_fname)
    events = make_fixed_length_events(raw)
    epochs = Epochs(raw, events, picks=default_picks)
    cov = compute_covariance(epochs)
    evoked_sss = epochs.average()
    with pytest.warns(RuntimeWarning, match='relative scaling'):
        evoked_sss.plot(noise_cov=cov, time_unit='s')
    plt.close('all')
コード例 #37
0
ファイル: test_evoked.py プロジェクト: Eric89GXL/mne-python
def test_plot_evoked_cov():
    """Test plot_evoked with noise_cov."""
    evoked = _get_epochs().average()
    cov = read_cov(cov_fname)
    cov['projs'] = []  # avoid warnings
    evoked.plot(noise_cov=cov, time_unit='s')
    with pytest.raises(TypeError, match='Covariance'):
        evoked.plot(noise_cov=1., time_unit='s')
    with pytest.raises(IOError, match='No such file'):
        evoked.plot(noise_cov='nonexistent-cov.fif', time_unit='s')
    raw = read_raw_fif(raw_sss_fname)
    events = make_fixed_length_events(raw)
    epochs = Epochs(raw, events, picks=picks)
    cov = compute_covariance(epochs)
    evoked_sss = epochs.average()
    with pytest.warns(RuntimeWarning, match='relative scaling'):
        evoked_sss.plot(noise_cov=cov, time_unit='s')
    plt.close('all')
コード例 #38
0
def _compute_cov(subject):
    rawc, reject = preprocess_raw(subject)

    events = mne.make_fixed_length_events(rawc,
                                          id=3000,
                                          start=0,
                                          duration=pp.duration)
    epochs = mne.Epochs(rawc,
                        events,
                        event_id=3000,
                        tmin=0,
                        tmax=pp.duration,
                        proj=True,
                        baseline=None,
                        reject=reject,
                        preload=False,
                        decim=1)
    epochs.drop_bad()
    clean_events = events[epochs.selection]

    #  picks = mne.pick_types(rawc.info, meg=False, eeg=True)
    covs = []
    for fb in pp.fbands:
        rf = rawc.copy().load_data().filter(fb[0], fb[1])
        ec = mne.Epochs(rf,
                        clean_events,
                        event_id=3000,
                        tmin=0,
                        tmax=pp.duration,
                        proj=True,
                        baseline=None,
                        reject=None,
                        preload=False,
                        decim=1,
                        picks=None)
        cov = mne.compute_covariance(ec, method='oas', rank=None)
        covs.append(cov.data)
    out = dict(subject=subject,
               n_events=len(events),
               n_events_good=len(clean_events),
               covs=np.array(covs),
               age=age_of(subject))
    return out
コード例 #39
0
def spectral_epochs(label: str,
                    raw: mne.io.Raw,
                    epoch_size: int,
                    max_freq: int = 100,
                    n_fft: int = 48,
                    filter=True) -> np.array:
    """Read raw data and split into epochs of a given size (s), compute features
    over each one
    label: subject identifier
    epoch_size: duration of epochs in seconds
    data_folder: location of source data
    max_freq: max frequency for FFT (default 100)
    n_fft: FFT size (default 48)

    Returns: labels, features
    labels: a list of labels with the format <subject>-<run_index>-<N>
    features: a list of np.arrays one per epoch containing the features
    """

    features = []
    labels = []

    events = mne.make_fixed_length_events(raw, id=1, duration=epoch_size)
    epochs = mne.Epochs(raw,
                        events,
                        tmin=0.,
                        tmax=epoch_size,
                        baseline=None,
                        detrend=1,
                        decim=8,
                        preload=True)

    for N in range(len(epochs)):
        features.append(
            spectral_features(epochs[N],
                              max_freq=max_freq,
                              n_fft=n_fft,
                              filter=filter))
        labels.append("{}-{}".format(label, N))
        print('.', end='', flush=True)
    print('|', end='', flush=True)
    return labels, features
コード例 #40
0
ファイル: test_evoked.py プロジェクト: jdammers/mne-python
def test_plot_evoked_cov():
    """Test plot_evoked with noise_cov."""
    import matplotlib.pyplot as plt
    evoked = _get_epochs().average()
    cov = read_cov(cov_fname)
    cov['projs'] = []  # avoid warnings
    evoked.plot(noise_cov=cov, time_unit='s')
    with pytest.raises(TypeError, match='Covariance'):
        evoked.plot(noise_cov=1., time_unit='s')
    with pytest.raises(IOError, match='No such file'):
        evoked.plot(noise_cov='nonexistent-cov.fif', time_unit='s')
    raw = read_raw_fif(raw_sss_fname)
    events = make_fixed_length_events(raw)
    epochs = Epochs(raw, events)
    cov = compute_covariance(epochs)
    evoked_sss = epochs.average()
    with warnings.catch_warnings(record=True) as w:
        evoked_sss.plot(noise_cov=cov, time_unit='s')
    plt.close('all')
    assert any('relative scal' in str(ww.message) for ww in w)
コード例 #41
0
def test_plot_evoked_cov():
    """Test plot_evoked with noise_cov."""
    import matplotlib.pyplot as plt
    evoked = _get_epochs().average()
    cov = read_cov(cov_fname)
    cov['projs'] = []  # avoid warnings
    evoked.plot(noise_cov=cov, time_unit='s')
    with pytest.raises(TypeError, match='Covariance'):
        evoked.plot(noise_cov=1., time_unit='s')
    with pytest.raises(IOError, match='No such file'):
        evoked.plot(noise_cov='nonexistent-cov.fif', time_unit='s')
    raw = read_raw_fif(raw_sss_fname)
    events = make_fixed_length_events(raw)
    epochs = Epochs(raw, events)
    cov = compute_covariance(epochs)
    evoked_sss = epochs.average()
    with warnings.catch_warnings(record=True) as w:
        evoked_sss.plot(noise_cov=cov, time_unit='s')
    plt.close('all')
    assert any('relative scal' in str(ww.message) for ww in w)
コード例 #42
0
def _compute_rest_psd(subject, kind):

    fname = op.join(
        cfg.camcan_meg_raw_path,
        subject, kind, '%s_raw.fif' % kind)

    raw = mne.io.read_raw_fif(fname)
    mne.channels.fix_mag_coil_types(raw.info)
    raw = _run_maxfilter(raw, subject, kind)
    _compute_add_ssp_exg(raw)

    reject = _get_global_reject_epochs(raw)

    stop = raw.times[-1]
    duration = 30.
    overlap = 8.
    stop = raw.times[-1]
    events = mne.make_fixed_length_events(
        raw, id=3000, start=0, duration=overlap,
        stop=stop - duration)

    epochs = mne.Epochs(
        raw, events, event_id=3000, tmin=0, tmax=duration, proj=True,
        baseline=None, reject=reject, preload=True, decim=1)
    #  make sure not to decim it induces power line artefacts!

    picks = mne.pick_types(raw.info, meg=True)
    psd, freqs = mne.time_frequency.psd_welch(
        epochs, fmin=0, fmax=150, n_fft=4096,  # ~12 seconds
        n_overlap=512,
        picks=picks)

    out_path = op.join(
        cfg.derivative_path, subject)

    out_fname = op.join(out_path, 'rest_sensors_psd_welch-epo.h5')

    mne.externals.h5io.write_hdf5(
        out_fname, {'psd': psd, 'freqs': freqs},
        overwrite=True)
    return {'n_events': len(events), 'n_events_good': psd.shape[0]}
def create_cognitive_full_features(data_path):
    preprocess = 'raw'
    for f in data_path.rglob("*{:}.edf".format(preprocess)):
        print(f.name)
        task = re.findall('(?<=_S[0-9]{2}_T[0-9]{2}_).+(?=_raw\.edf)',
                          f.name)[0]
        uid = re.findall('.+(?=_S[0-9]+_T[0-9]+_)', f.name)[0]
        # Read eeg file
        raw = mne.io.read_raw_edf(f)

        # Rename Channel
        mne.rename_channels(raw.info, renameChannels)
        # Set montage (3d electrode location)
        raw = raw.set_montage('standard_1020')
        raw = raw.pick(EEG_channels)
        # raw.crop(tmin=(raw.times[-1] - 120))

        # Create events every 20 seconds
        reject_criteria = dict(eeg=140e-6, )  # 100 µV
        events_array = mne.make_fixed_length_events(raw,
                                                    start=0.5,
                                                    stop=None,
                                                    duration=0.5)
        epochs = mne.Epochs(raw,
                            events_array,
                            tmin=-0.5,
                            tmax=0.5,
                            reject=reject_criteria,
                            preload=True)
        epochs.drop_bad()
        print(epochs.get_data().shape)
        # epochs.plot_drop_log(show=True, subject=uid)

        epochs = epochs.copy().pick(EEG_channels)

        bandpower_df = calculate_features(epochs)
        bandpower_df.loc[:, ('info', 'condition')] = task
        bandpower_df.loc[:, ('info', 'user')] = uid

        bandpower_df.to_csv(f.parent / 'eeg_features_full_set.csv')
        bandpower_df.to_pickle(f.parent / 'eeg_features_full_set.pd')
    def get_epochs(self,resample=None):
        from mne.time_frequency import psd_multitaper
        raw = self.raw
        validation_windowsize = self.validation_windowsize
        front = self.front
        back = self.back
#        l_freq = self.l_freq
#        h_freq = self.h_freq
        events = mne.make_fixed_length_events(raw,id=1,start=front,
                                             stop=raw.times[-1]-back,
                                             duration=validation_windowsize)
        epochs = mne.Epochs(raw,events,event_id=1,tmin=0,tmax=validation_windowsize,
                           preload=True)
        if resample is not None:
            epochs.resample(resample)
#        psds,freq = psd_multitaper(epochs,fmin=l_freq,
#                                        fmax=h_freq,
#                                        tmin=0,tmax=validation_windowsize,
#                                        low_bias=True,)
#        psds = 10 * np.log10(psds)
        self.epochs = epochs
コード例 #45
0
def get_Onest_Amplitude_Duration_of_spindles(raw,channelList,
                                        annotations=None,
                                        moving_window_size=200,
                                        lower_threshold=.9,
                                        syn_channels=3,
                                        l_bound=0.5,h_bound=2,
                                        tol=1,higher_threshold=3.5,
                                        front=300,back=100,
                                        sleep_stage=True,
                                        proba=True,
                                        validation_windowsize=3,
                                        l_freq=11,h_freq=16):
    """
    raw: data after preprocessing
    channelList: channel list of interest, and in this study we use       'F3','F4','C3','C4','O1','O2'
    annotations: pandas DataFrame object containing manual annotations, such as sleep stages, spindle locations.
    moving_window_size: size of the moving window for convolved root mean square computation. It should work better when it is the sampling frequency, which, in this case is 500 (we downsample subjects with 1000 Hz sampling rate). 
    lower_threshold: highpass threshold for spindle detection: decision making = trimmed_mean + lower_T * trimmed_std
    higher_threshold: lowpass threshold for spindle detection: decision making = trimmed_mean + higher_T * trimmed_std
    syn_channels: criteria for selecting spindles: at least # of channels have spindle instance and also in the mean channel
    l_bound: low boundary for duration of a spindle instance
    h_bound: high boundary for duration of a spindle instance
    tol : tolerance for determing spindles (criteria in time)
    front : First few seconds of recordings that we are not interested because there might be artifacts, or it is confirmed subjects could not fall asleep within such a short period
    back : last few seconds of recordings that we are not interested due to the recording procedures
    """
    # process the data without any other information
    time=np.linspace(0,raw.last_samp/raw.info['sfreq'],raw._data[0,:].shape[0])
    RMS = np.zeros((len(channelList),raw._data[0,:].shape[0]))
    peak_time={} #preallocate
    sfreq=raw.info['sfreq']
    mph,mpl = {},{}

    for ii, names in enumerate(channelList):

        peak_time[names]=[]
        segment,_ = raw[ii,:]
        RMS[ii,:] = window_rms(segment[0,:],moving_window_size) 
        mph[names] = trim_mean(RMS[ii,int(front*sfreq):-int(back*sfreq)],0.05) + lower_threshold * trimmed_std(RMS[ii,:],0.05) 
        mpl[names] = trim_mean(RMS[ii,int(front*sfreq):-int(back*sfreq)],0.05) + higher_threshold * trimmed_std(RMS[ii,:],0.05)
        pass_ = RMS[ii,:] > mph[names]#should be greater than then mean not the threshold to compute duration

        up = np.where(np.diff(pass_.astype(int))>0)
        down = np.where(np.diff(pass_.astype(int))<0)
        up = up[0]
        down = down[0]
        #######key to idenfity segments that goes beyond the lower threshold########
        #print(down[0],up[0])
        if down[0] < up[0]:
            down = down[1:]
        #print(down[0],up[0])
        #############################
        if (up.shape > down.shape) or (up.shape < down.shape):
            size = np.min([up.shape,down.shape])
            up = up[:size]
            down = down[:size]
        C = np.vstack((up,down))
        for pairs in C.T:
            if l_bound < (time[pairs[1]] - time[pairs[0]]) < h_bound:
                SegmentForPeakSearching = RMS[ii,pairs[0]:pairs[1]]
                if np.max(SegmentForPeakSearching) < mpl[names]:
                    temp_temp_time = time[pairs[0]:pairs[1]]
                    ints_temp = np.argmax(SegmentForPeakSearching)
                    peak_time[names].append(temp_temp_time[ints_temp])
    peak_time['mean']=[];peak_at=[];duration=[]
    RMS_mean=hmean(RMS)
    # apply the same algorithm to the mean of the RMSs
    mph['mean'] = trim_mean(RMS_mean[int(front*sfreq):-int(back*sfreq)],0.05) + lower_threshold * trimmed_std(RMS_mean,0.05)
    mpl['mean'] = trim_mean(RMS_mean[int(front*sfreq):-int(back*sfreq)],0.05) + higher_threshold * trimmed_std(RMS_mean,0.05)
    pass_ =RMS_mean > mph['mean']
    up = np.where(np.diff(pass_.astype(int))>0)
    down= np.where(np.diff(pass_.astype(int))<0)
    up = up[0]
    down = down[0]
    ###############################
    #print(down[0],up[0])
    if down[0] < up[0]:
        down = down[1:]
    #print(down[0],up[0])
    #############################
    if (up.shape > down.shape) or (up.shape < down.shape):
        size = np.min([up.shape,down.shape])
        up = up[:size]
        down = down[:size]
    C = np.vstack((up,down))
    for pairs in C.T:
        
        if l_bound < (time[pairs[1]] - time[pairs[0]]) < h_bound:
            SegmentForPeakSearching = RMS_mean[pairs[0]:pairs[1],]
            if np.max(SegmentForPeakSearching)< mpl['mean']:
                temp_time = time[pairs[0]:pairs[1]]
                ints_temp = np.argmax(SegmentForPeakSearching)
                peak_time['mean'].append(temp_time[ints_temp])
                peak_at.append(SegmentForPeakSearching[ints_temp])
                duration_temp = time[pairs[1]] - time[pairs[0]]
                duration.append(duration_temp) 
    time_find=[];mean_peak_power=[];Duration=[];
    for item,PEAK,duration_time in zip(peak_time['mean'],peak_at,duration):
        temp_timePoint=[]
        for ii, names in enumerate(channelList):
            try:
                temp_timePoint.append(min(enumerate(peak_time[names]), key=lambda x: abs(x[1]-item))[1])
            except:
                temp_timePoint.append(item + 2)
        try:
            if np.sum((abs(np.array(temp_timePoint) - item)<tol).astype(int))>=syn_channels:
                time_find.append(float(item))
                mean_peak_power.append(PEAK)
                Duration.append(duration_time)
        except:
            pass
    ############ the end of the processing in which no other inputs ##
    #### update the spindles we found if we want to add information of sleep stages ######
    if sleep_stage:
        
        temp_time_find=[];temp_mean_peak_power=[];temp_duration=[];
        # seperate out stage 2
        stages = annotations[annotations.Annotation.apply(stage_check)]
        On = stages[::2];Off = stages[1::2]
        stage_on_off = list(zip(On.Onset.values, Off.Onset.values))
        if abs(np.diff(stage_on_off[0]) - 30) < 2:
            pass
        else:
            On = stages[1::2];Off = stages[::2]
            stage_on_off = list(zip(On.Onset.values[1:], Off.Onset.values[2:]))
        for single_time_find, single_mean_peak_power, single_duration in zip(time_find,mean_peak_power,Duration):
            for on_time,off_time in stage_on_off:
                if intervalCheck([on_time,off_time],single_time_find,tol=tol):
                    temp_time_find.append(single_time_find)
                    temp_mean_peak_power.append(single_mean_peak_power)
                    temp_duration.append(single_duration)
        time_find=temp_time_find;mean_peak_power=temp_mean_peak_power;Duration=temp_duration
    
    ####### decision function based on spindles we have just found ####
    """
    A single floating representation is computed based on the validation window size (say 3 seconds), and information like peak power densities and peak frequencies are added to the feature space.
    We fit the standandardized features with the labels (spindles found by the automated pipeline)
    A prediction probability is computed using scikit-learn::logisticregression
    """
    decision_features=None;auto_proba=None;auto_label=None
    if proba:
        result = pd.DataFrame({'Onset':time_find,'Duration':Duration,'Annotation':['spindle']*len(Duration)})     
        auto_label,_ = discritized_onset_label_auto(raw,result,validation_windowsize)
        events = mne.make_fixed_length_events(raw,id=1,start=front,stop=raw.times[-1]-back,duration=validation_windowsize)
        epochs = mne.Epochs(raw,events,event_id=1,tmin=0,tmax=validation_windowsize,preload=True)
        data = epochs.get_data()[:,:,:-1]
        full_prop=[]        
        for d in data:    
            temp_p=[]
            #fig,ax = plt.subplots(nrows=2,ncols=3,figsize=(8,8))
            for ii,(name) in enumerate(zip(channelList)):#,ax.flatten())):
                rms = window_rms(d[ii,:],500)
                l = trim_mean(rms,0.05) + lower_threshold * trimmed_std(rms,0.05)
                h = trim_mean(rms,0.05) + higher_threshold * trimmed_std(rms,0.05)
                prop = (sum(rms>l)+sum(rms<h))/(sum(rms<h) - sum(rms<l))
                if np.isinf(prop):
                    prop = (sum(rms>l)+sum(rms<h))
                temp_p.append(prop)
                
            
            full_prop.append(temp_p)
        psds,freq = mne.time_frequency.psd_multitaper(epochs,fmin=l_freq,fmax=h_freq,tmin=0,tmax=3,low_bias=True,)
        psds = 10* np.log10(psds)
        features = pd.DataFrame(np.concatenate((np.array(full_prop),psds.max(2),freq[np.argmax(psds,2)]),1))
        decision_features = StandardScaler().fit_transform(features.values,auto_label)
        clf = LogisticRegressionCV(Cs=np.logspace(-4,6,11),cv=5,tol=1e-7,max_iter=int(1e7))
        clf.fit(decision_features,auto_label)
        auto_proba=clf.predict_proba(decision_features)[:,-1]
    return time_find,mean_peak_power,Duration,mph,mpl,auto_proba,auto_label
raw = mne.io.read_raw_ctf(raw_fname, verbose='error')
raw.crop(0, crop_to).load_data().pick_types(meg=True, eeg=False).resample(80)
raw.apply_gradient_compensation(3)
projs_ecg, _ = compute_proj_ecg(raw, n_grad=1, n_mag=2)
projs_eog, _ = compute_proj_eog(raw, n_grad=1, n_mag=2, ch_name='MLT31-4407')
raw.info['projs'] += projs_ecg
raw.info['projs'] += projs_eog
raw.apply_proj()
cov = mne.compute_raw_covariance(raw)  # compute before band-pass of interest

##############################################################################
# Now we band-pass filter our data and create epochs.

raw.filter(14, 30)
events = mne.make_fixed_length_events(raw, duration=5.)
epochs = mne.Epochs(raw, events=events, tmin=0, tmax=5.,
                    baseline=None, reject=dict(mag=8e-13), preload=True)
del raw

##############################################################################
# Compute the forward and inverse
# -------------------------------

# This source space is really far too coarse, but we do this for speed
# considerations here
pos = 15.  # 1.5 cm is very broad, done here for speed!
src = mne.setup_volume_source_space('bst_resting', pos, bem=bem,
                                    subjects_dir=subjects_dir, verbose=True)
fwd = mne.make_forward_solution(epochs.info, trans, src, bem)
data_cov = mne.compute_covariance(epochs)
コード例 #47
0
magFlat = 1e-14
gradFlat = 1000e-15
picks = []
event_id, tmin, tmax = 1, 0.0, 1.0 ##for CRM and DFNAM using 1 sec epochs for others use 2 second 

data_path = '/home/custine/MEG/data/epi_conn/' + subjID + '/'
raw_fname = data_path + runName + '_raw.fif'
event_fname = data_path + 'eve/' + runName + '.eve'
epoch_fname = data_path + 'eve/' + runName + '-epo.fif'
evoked_fname = data_path + 'ave_projon/' + runName + '-ave.fif'

#########
# #Read Raw file 
raw = mne.io.Raw(raw_fname)
print raw.info
print(raw.info['ch_names'])
picks = mne.pick_types(raw.info, meg= True, eeg = False, eog = True, stim = True, exclude = [])
print picks

# #Epoch data into 5s intervals
events = mne.make_fixed_length_events(raw, 1, start=0, stop=None, duration=2.)
mne.write_events(event_fname, events)    
print "Done " + subjID

epochs = mne.Epochs(raw, events, event_id, tmin, tmax, baseline = (None,0), picks = picks, proj = True, name = '2sec', preload = True, flat = dict(mag = magFlat, grad= gradFlat), reject= None) #dict(mag=magRej, grad=gradRej, eeg = eegRej))
print epochs
epochs.save(epoch_fname)

evoked = epochs.average(picks = None)
evoked.save(evoked_fname)
print evoked
                                                        auc_threshold='adapt')
r32 = eegPinelineDesign.detection_pipeline_crossvalidation(raw_32,raw_32.ch_names,
                                                        annotation,1000,
                                                        0.4,3.4,16,
                                                        11,16,f,cv=cv,
                                                        auc_threshold='adapt')
r61 = eegPinelineDesign.detection_pipeline_crossvalidation(raw_61,raw_61.ch_names,
                                                        annotation,1000,
                                                        0.4,3.4,int(61/2),
                                                        11,16,f,cv=cv,
                                                        auc_threshold='adapt')
print(np.mean(r6[3],0),np.mean(r32[3],0),np.mean(r61[3],0))

front=300;back=100
stop = raw_6.times[-1]-back
events = mne.make_fixed_length_events(raw_6,1,start=front,stop=stop,duration=3,)
epochs = mne.Epochs(raw_6,events,1,tmin=0,tmax=3,proj=False,preload=True)
epochs.resample(64)
gold_standard = eegPinelineDesign.read_annotation(raw_6,f)
manual_labels,_ = eegPinelineDesign.discritized_onset_label_manual(raw_6,gold_standard,3)
freqs = np.arange(11,17,1)
n_cycles = freqs / 2.
time_bandwidth = 2.0  # Least possible frequency-smoothing (1 taper)
power = tfr_multitaper(epochs,freqs,n_cycles=n_cycles,time_bandwidth=time_bandwidth,return_itc=False,average=False,)

clf = Pipeline([('vectorizer',Vectorizer()),
                ('scaler',StandardScaler()),
                ('est',exported_pipeline)])

data = power.data
labels = manual_labels
コード例 #49
0
def intra(subj):
    '''
    Performs initial computations within subject and returns average PSD and variance of all epochs.
    '''
    print('Now beginning intra processing on ' + subj + '...\n') * 5

    # Set function parameters
    fname_label = subjects_dir + '/' + subj + '/' + 'label/%s.label' % label_name
    fname_raw = data_path + subj + '/' + subj + '_rest_raw_sss.fif'
    if os.path.isfile(data_path + subj + '/' + subj + '_rest_raw_sss-ico-4-fwd.fif'): 
        fname_fwd = data_path + subj + '/' + subj + '_rest_raw_sss-ico-4-fwd.fif'
    else: 
        print('Subject ' + subj + ' does not have a ico-4-fwd.fif on file.')	

    if label_name.startswith('lh.'):
    	hemi = 'left'
    elif label_name.startswith('rh.'):
    	hemi = 'right'	
    
    # Load data
    label = mne.read_label(fname_label)
    raw = fiff.Raw(fname_raw)
    forward_meg = mne.read_forward_solution(fname_fwd)
    
    # Estimate noise covariance from teh raw data
    cov = mne.compute_raw_data_covariance(raw, reject=dict(eog=150e-6))
    write_cov(data_path + subj + '/' + subj + '-cov.fif', cov)
    
    # Make inverse operator
    info = raw.info
    inverse_operator = make_inverse_operator(info, forward_meg, cov, loose=None, depth=0.8)
    
    # Epoch data into 4s intervals
    events = mne.make_fixed_length_events(raw, 1, start=0, stop=None, 
    		duration=4.)
    
    # Set up pick list: (MEG minus bad channels)
    include = []
    exclude = raw.info['bads']
    picks = fiff.pick_types(raw.info, meg=True, eeg=False, stim=False, eog=True, include=include, exclude=exclude)
    
    # Read epochs and remove bad epochs
    epochs = mne.Epochs(raw, events, event_id, tmin, tmax, proj=True, picks=picks, baseline=(None, 0), preload=True, reject=dict(grad=4000e-13, mag=4e-12, eog=150e-6))
    
    # Pull data for averaging later
    epc_array = epochs.get_data()
    
    # Compute the inverse solution
    inv = apply_inverse_epochs(epochs, inverse_operator, lambda2, method, label=label)
    
    #Need to add a line here to automatically create stc directory within subj
    
    epoch_num = 1
    epoch_num_str = str(epoch_num)
    for i in inv:
#    	i.save(data_path + subj + '/tmp/' + label_name[3:] + '_rest_raw_sss-oct-6-inv' + epoch_num_str)
	i.save(data_path + subj + '/tmp/' + label_name[3:] + '_rest_raw_sss-ico-4-inv' + epoch_num_str)
    	epoch_num = epoch_num + 1
    	epoch_num_str = str(epoch_num)
    
    # The following is used to remove the empty opposing hemisphere files
    # and then move the files to save into the appropriate directory
    
    if hemi == 'left':
    	filelist = [ f for f in os.listdir(data_path + subj + '/tmp') if f.endswith("-rh.stc") ]	
    	for f in filelist:
            os.remove(data_path + subj + '/tmp/' + f)
    	keepers = [ f for f in os.listdir(data_path + subj + '/tmp') if f.endswith("-lh.stc") ]
    	for f in keepers:
    	    src = f 
            os.rename(data_path + subj + '/tmp/' + src, data_path + subj + '/inv/' + src)
    
    elif hemi == 'right':
    	filelist = [ f for f in os.listdir(data_path + subj + '/tmp') if f.endswith("-lh.stc") ]
        for f in filelist:
            os.remove(data_path + subj + '/tmp/' + f)
    	keepers = [ f for f in os.listdir(data_path + subj + '/tmp') if f.endswith("-rh.stc") ]
        for f in keepers:
            src = f 
            os.rename(data_path + subj + '/tmp/' + src, data_path + subj + '/inv/' + src)
    
    
    # define frequencies of interest
    bandwidth = 4.  # bandwidth of the windows in Hz
    
    # compute source space psd in label
    
    # Note: By using "return_generator=True" stcs will be a generator object
    # instead of a list. This allows us so to iterate without having to
    # keep everything in memory.
    
    psd = compute_source_psd_epochs(epochs, inverse_operator, lambda2=lambda2,
                                     method=method, fmin=fmin, fmax=fmax,
                                     bandwidth=bandwidth, label=label, return_generator=False)
    
    epoch_num = 1
    epoch_num_str = str(epoch_num)
    for i in psd:
    	i.save(data_path + subj + '/' + 'tmp' + '/' + label_name[3:] + '_dspm_snr-1_PSD'+ epoch_num_str)
    	epoch_num = epoch_num + 1
        epoch_num_str = str(epoch_num)
    
    if hemi == 'left':
        filelist = [ f for f in os.listdir(data_path + subj + '/tmp') if f.endswith("-rh.stc") ]
        for f in filelist:
            os.remove(data_path + subj + '/tmp/' + f)
    	keepers = [ f for f in os.listdir(data_path + subj + '/tmp') if f.endswith("-lh.stc") ]
        for f in keepers:
            src = f
            os.rename(data_path + subj + '/tmp/' + src,data_path + subj + '/psd/' + src)
    
    elif hemi == 'right':
        filelist = [ f for f in os.listdir(data_path + subj + '/tmp') if f.endswith("-lh.stc") ]
        for f in filelist:
            os.remove(data_path + subj + '/tmp/' + f)
    	keepers = [ f for f in os.listdir(data_path + subj + '/tmp') if f.endswith("-rh.stc") ]
        for f in keepers:
            src = f
            os.rename(data_path + subj + '/tmp/' + src,data_path + subj + '/psd/' + src)
   
 
    # This code computes the average PSDs of each epoch. Each PSD file is an array of shape N_vertices*N_frequencies. This code averages the PSD value of each vertex together and outputs the average PSD value of each frequency. Then, it averages the PSD values of each epoch, outputting one average PSD value per frequency value, i.e., this is the average across epochs.
    
    n_epochs = len(epc_array)
    for i, stc in enumerate(psd):
        if i >= n_epochs:
            break
    
        if i == 0:
            psd_avg = np.mean(stc.data, axis=0)
        else:
            psd_avg += np.mean(stc.data, axis=0)
    
    print('Length of psd for subject ' + subj + ' is ' + str(len(psd)) + '.')
    print('Number of epochs for subject ' + subj + ' is ' + str(n_epochs) + '.')
   
    if len(psd) != 0:
        psd_avg /= n_epochs
    
    # Compute variance for each epoch and then variance across epochs 
    
    n_epochs = len(epc_array)
    for i, stc in enumerate(psd):
        if i >= n_epochs:
            psd_var = np.array()
	    break
        
        if i == 0:
            psd_var = np.var(stc.data, axis=0)
        else:
            psd_var = np.vstack((psd_var,np.var(stc.data, axis=0)))
    
    if len(psd) >= 2:
        tot_var = np.var(psd_var, axis=0)

    if len(psd) <= 1:
	failed_subj = subj
	print(failed_subj + ' failed. No PSD values calculated, likely because all epochs were rejected.')
	return failed_subj, failed_subj, failed_subj

    if len(psd) >= 2:
        return (psd_avg, tot_var, len(psd_avg))
コード例 #50
0
ファイル: test_event.py プロジェクト: TalLinzen/mne-python
def test_make_fixed_length_events():
    """Test making events of a fixed length
    """
    raw = fiff.Raw(raw_fname)
    events = make_fixed_length_events(raw, id=1)
    assert_true(events.shape[1], 3)
def thresholding_filterbased_spindle_searching(raw,channelList,annotations,moving_window_size=200,lower_threshold=.9,
                                        syn_channels=3,l_bound=0.5,h_bound=2,tol=1,higher_threshold=3.5,
                                        front=300,back=100,sleep_stage=True,proba=False,validation_windowsize=3):
    
    
    time=np.linspace(0,raw.last_samp/raw.info['sfreq'],raw._data[0,:].shape[0])
    RMS = np.zeros((len(channelList),raw._data[0,:].shape[0]))
    peak_time={} #preallocate
    sfreq=raw.info['sfreq']
    mph,mpl = {},{}

    for ii, names in enumerate(channelList):

        peak_time[names]=[]
        segment,_ = raw[ii,:]
        RMS[ii,:] = window_rms(segment[0,:],moving_window_size) 
        mph[names] = trim_mean(RMS[ii,int(front*sfreq):-int(back*sfreq)],0.05) + lower_threshold * trimmed_std(RMS[ii,:],0.05) 
        mpl[names] = trim_mean(RMS[ii,int(front*sfreq):-int(back*sfreq)],0.05) + higher_threshold * trimmed_std(RMS[ii,:],0.05)
        pass_ = RMS[ii,:] > mph[names]#should be greater than then mean not the threshold to compute duration

        up = np.where(np.diff(pass_.astype(int))>0)
        down = np.where(np.diff(pass_.astype(int))<0)
        up = up[0]
        down = down[0]
        ###############################
        #print(down[0],up[0])
        if down[0] < up[0]:
            down = down[1:]
        #print(down[0],up[0])
        #############################
        if (up.shape > down.shape) or (up.shape < down.shape):
            size = np.min([up.shape,down.shape])
            up = up[:size]
            down = down[:size]
        C = np.vstack((up,down))
        for pairs in C.T:
            if l_bound < (time[pairs[1]] - time[pairs[0]]) < h_bound:
                SegmentForPeakSearching = RMS[ii,pairs[0]:pairs[1]]
                if np.max(SegmentForPeakSearching) < mpl[names]:
                    temp_temp_time = time[pairs[0]:pairs[1]]
                    ints_temp = np.argmax(SegmentForPeakSearching)
                    peak_time[names].append(temp_temp_time[ints_temp])
                    
        

    peak_time['mean']=[];peak_at=[];duration=[]
    RMS_mean=hmean(RMS)
    
    mph['mean'] = trim_mean(RMS_mean[int(front*sfreq):-int(back*sfreq)],0.05) + lower_threshold * trimmed_std(RMS_mean,0.05)
    mpl['mean'] = trim_mean(RMS_mean[int(front*sfreq):-int(back*sfreq)],0.05) + higher_threshold * trimmed_std(RMS_mean,0.05)
    pass_ =RMS_mean > mph['mean']
    up = np.where(np.diff(pass_.astype(int))>0)
    down= np.where(np.diff(pass_.astype(int))<0)
    up = up[0]
    down = down[0]
    ###############################
    #print(down[0],up[0])
    if down[0] < up[0]:
        down = down[1:]
    #print(down[0],up[0])
    #############################
    if (up.shape > down.shape) or (up.shape < down.shape):
        size = np.min([up.shape,down.shape])
        up = up[:size]
        down = down[:size]
    C = np.vstack((up,down))
    for pairs in C.T:
        
        if l_bound < (time[pairs[1]] - time[pairs[0]]) < h_bound:
            SegmentForPeakSearching = RMS_mean[pairs[0]:pairs[1],]
            if np.max(SegmentForPeakSearching)< mpl['mean']:
                temp_time = time[pairs[0]:pairs[1]]
                ints_temp = np.argmax(SegmentForPeakSearching)
                peak_time['mean'].append(temp_time[ints_temp])
                peak_at.append(SegmentForPeakSearching[ints_temp])
                duration_temp = time[pairs[1]] - time[pairs[0]]
                duration.append(duration_temp) 
            
        
    time_find=[];mean_peak_power=[];Duration=[];
    for item,PEAK,duration_time in zip(peak_time['mean'],peak_at,duration):
        temp_timePoint=[]
        for ii, names in enumerate(channelList):
            try:
                temp_timePoint.append(min(enumerate(peak_time[names]), key=lambda x: abs(x[1]-item))[1])
            except:
                temp_timePoint.append(item + 2)
        try:
            if np.sum((abs(np.array(temp_timePoint) - item)<tol).astype(int))>=syn_channels:
                time_find.append(float(item))
                mean_peak_power.append(PEAK)
                Duration.append(duration_time)
        except:
            pass
    if sleep_stage:
        
        temp_time_find=[];temp_mean_peak_power=[];temp_duration=[];
        # seperate out stage 2
        stages = annotations[annotations.Annotation.apply(stage_check)]
        On = stages[::2];Off = stages[1::2]
        stage_on_off = list(zip(On.Onset.values, Off.Onset.values))
        if abs(np.diff(stage_on_off[0]) - 30) < 2:
            pass
        else:
            On = stages[1::2];Off = stages[::2]
            stage_on_off = list(zip(On.Onset.values[1:], Off.Onset.values[2:]))
        for single_time_find, single_mean_peak_power, single_duration in zip(time_find,mean_peak_power,Duration):
            for on_time,off_time in stage_on_off:
                if intervalCheck([on_time,off_time],single_time_find,tol=tol):
                    temp_time_find.append(single_time_find)
                    temp_mean_peak_power.append(single_mean_peak_power)
                    temp_duration.append(single_duration)
        time_find=temp_time_find;mean_peak_power=temp_mean_peak_power;Duration=temp_duration
    
    result = pd.DataFrame({'Onset':time_find,'Duration':Duration,'Annotation':['spindle']*len(Duration)})     
    auto_label,_ = discritized_onset_label_auto(raw,result,validation_windowsize)
    decision_features=None
    if proba:
        events = mne.make_fixed_length_events(raw,id=1,start=0,duration=validation_windowsize)
        epochs = mne.Epochs(raw,events,event_id=1,tmin=0,tmax=validation_windowsize,preload=True)
        data = epochs.get_data()[:,:,:-1]
        full_prop=[]        
        for d in data:    
            temp_p=[]
            #fig,ax = plt.subplots(nrows=2,ncols=3,figsize=(8,8))
            for ii,(name) in enumerate(zip(channelList)):#,ax.flatten())):
                rms = window_rms(d[ii,:],500)
                l = trim_mean(rms,0.05) + lower_threshold * trimmed_std(rms,0.05)
                h = trim_mean(rms,0.05) + higher_threshold * trimmed_std(rms,0.05)
                prop = (sum(rms>l)+sum(rms<h))/(sum(rms<h) - sum(rms<l))
                temp_p.append(prop)
                
            
            full_prop.append(temp_p)
        psds,freq = mne.time_frequency.psd_multitaper(epochs,fmin=11,fmax=16,tmin=0,tmax=3,low_bias=True,)
        psds = 10* np.log10(psds)
        features = pd.DataFrame(np.concatenate((np.array(full_prop),psds.max(2),freq[np.argmax(psds,2)]),1))
        decision_features = StandardScaler().fit_transform(features.values,auto_label)
        clf = LogisticRegressionCV(Cs=np.logspace(-4,6,11),cv=5,tol=1e-7,max_iter=int(1e7))
        clf.fit(decision_features,auto_label)
        auto_proba=clf.predict_proba(decision_features)[:,-1]
            
    return time_find,mean_peak_power,Duration,mph,mpl,auto_proba,auto_label
コード例 #52
0
def compute_plv_pli_cc(raw,duration,plv_threshold_set,pli_threshold_set,cc_threshold_set,labels,fmin=11,fmax=16):
    # make events
    event_array = mne.make_fixed_length_events(raw,id=1,duration=float(duration))
    event_array[:,-1] = np.arange(1,len(event_array)+1)
    event_array[:,1] = duration * raw.info['sfreq']
    # make epochs
    tmin, tmax = -duration*0.2, duration #20% overlapping previous epoch
    epochs = mne.Epochs(raw,event_array,tmin=tmin,tmax=tmax,
                       baseline=None,preload=True,proj=False)
    sfreq = raw.info['sfreq']
    
    
    features = ['mean','variance','delta_mean',
          'delta variance','change variance',
         'activity','mobility','complexity',
         'spectral_entropy']#,'time_stamp']
    # initialize folder     
    if not os.path.exists('epoch_COH_%.2f'%duration):
        os.makedirs('epoch_COH_%.2f'%duration)
        
    if not os.path.exists('epoch_PLI_%.2f'%duration):
        os.makedirs('epoch_PLI_%.2f'%duration)
    
    if not os.path.exists('epoch_PLV_%.2f'%duration):
        os.makedirs('epoch_PLV_%.2f'%duration)
    
    
    epochFeatures = {name:[] for name in features}
    time_list=[]
    con_methods=['coh','plv','pli']
    # the entire pipeline
    for ii,epoch_data in enumerate(epochs):
        # easiest way to compute coh
        temp_connection,freqs,times,n_epochs,n_tapers=mne.connectivity.spectral_connectivity(
            epochs[str(ii+2)],
            method='coh',
            mode='multitaper',
            sfreq=sfreq,
            fmin=fmin,
            fmax=fmax,
            faverage=True,
                                            )
        
        temp_connection = [temp_connection[:,:,0]]
        time_list.append(epochs[str(ii+2)].events[0][0])
        print('computing features for epoch %d'%(ii+1))
        #epochFeatures['time_stamp'].append
        epochFeatures['mean'].append(np.mean(epoch_data))
        epochFeatures['variance'].append(np.var(epoch_data))
        epochFeatures['delta_mean'].append(np.mean(-np.diff(epoch_data,axis=1)))
        epochFeatures['delta variance'].append(np.var(np.mean(-np.diff(epoch_data,axis=1))))
        if ii == 0:
            epochFeatures['change variance'].append(0)
        elif ii == 1:
            epochFeatures['change variance'].append(np.mean(np.var(epoch_data - epochFeatures['mean'][ii-1])))
        else:
            epochFeatures['change variance'].append(np.mean(np.var(epoch_data - epochFeatures['mean'][ii-1] - epochFeatures['mean'][ii-1])))

        activity = np.var(epoch_data)
        epochFeatures['activity'].append(activity)
        tempData = -np.diff(epoch_data,axis=1)
        mobility = np.std(tempData)/np.sqrt(activity)
        epochFeatures['mobility'].append(mobility)

        startRange = epoch_data[:,:-2]
        endRange = epoch_data[:,2:]
        tempData = endRange - startRange
        complexity = (np.std(tempData)/(np.std(-np.diff(epoch_data,axis=1)))) /((np.std(-np.diff(epoch_data,axis=1)))/np.sqrt(activity))
        epochFeatures['complexity'].append(complexity)

        specEnt = np.zeros(shape=(len(raw.ch_names),))
        for iter in range(6):
                ampSpec=np.abs(np.fft.fft(epoch_data[0,:])) / np.sum(np.abs(np.fft.fft(epoch_data[0,:])))
                specEnt[iter]=-np.sum(ampSpec * np.log2(ampSpec))
        epochFeatures['spectral_entropy'].append(np.mean(specEnt))

        dist_list_plv = np.zeros(shape=(len(raw.ch_names),len(raw.ch_names)))
        dist_list_pli = np.zeros(shape=(len(raw.ch_names),len(raw.ch_names)))
        for node_1 in range(len(raw.ch_names)):
            for node_2 in range(len(raw.ch_names)):
                if node_1 != node_2:
                    data_1 = epoch_data[node_1,:]
                    data_2 = epoch_data[node_2,:]
                    PLV=phase_locking_value(np.angle(signal.hilbert(data_1,axis=0)),
                                             np.angle(signal.hilbert(data_2,axis=0)))
                    dist_list_plv[node_1,node_2]=np.abs(np.mean(PLV))
                    PLI=np.angle(signal.hilbert(data_1,axis=0))-np.angle(signal.hilbert(data_2,axis=0))
                    dist_list_pli[node_1,node_2]=np.abs(np.mean(np.sign(PLI)))

        temp_connection.append(dist_list_plv)        
        temp_connection.append(dist_list_pli)
        
        try:
            if labels[ii+2] ==1:
                title_label='spindle'
            else:
                title_label=''
        except:
            title_label=''
           
        #fig,ax = plt.subplots(figsize=(10,25))
        con_res=dict()
        for method, c in zip(con_methods,temp_connection):
            con_res[method] = c
        colors=plt.cm.rainbow(np.linspace(0,1,len(raw.ch_names)))
        time_plot = np.linspace(epochs[str(ii+2)].events[0][0]/raw.info['sfreq']-0.2*(epochs[str(ii+2)].events[0][0]/raw.info['sfreq']),
                                       epochs[str(ii+2)].events[0][0]/raw.info['sfreq']+duration,
                                              epoch_data.shape[1])
        for plv_threshold,pli_threshold,cc_threshold in zip(plv_threshold_set,pli_threshold_set,cc_threshold_set):
            thresholds = {'plv':plv_threshold,'pli':pli_threshold,'coh':cc_threshold}
            for jj, method in enumerate(con_methods):
                fig,ax = plt.subplots(figsize=(16,16))
                mne.viz.plot_connectivity_circle(np.array(con_res[method]>thresholds[method],dtype=int),raw.ch_names,fig=fig,show=False,
                                                 title='%s,threshold:%.2f,%s'%(method,thresholds[method],title_label),facecolor='black',textcolor='white',
                                                                     colorbar=False,fontsize_title=22,fontsize_names=22,
                                                                     subplot=221,node_colors=colors,
                                                 )
                adjecency_df = pd.DataFrame(np.array(con_res[method]>thresholds[method],dtype=int),columns=np.arange(1,len(raw.ch_names)+1))
                adjecency_df.to_csv('epoch_%s_%.2f\\epoch_%d_%.2f(%s).csv'%(method.upper(),duration,ii+1,thresholds[method],title_label))
                
                axes = fig.add_subplot(212)
                ax   = fig.add_subplot(222)
                for kk,(name,color) in enumerate(zip(raw.ch_names,colors)):
                    ax.plot(time_plot,epoch_data[kk,:],label=name,color=color)
                    band_pass_data = mne.filter.filter_data(epoch_data[kk,:],raw.info['sfreq'],fmin,fmax)
                    axes.plot(time_plot,band_pass_data,label=name,color=color)
                ax.legend(loc='upper right')
                ax.set_title('%.2f-%.2f sec %s'%(time_plot.min(),time_plot.max(),title_label),color='w')
                ax.set_xlabel('Time',color='w')
                ax.set_ylabel('$\mu$V',color='w')
                plt.setp(plt.getp(ax, 'yticklabels'), color='w') #set yticklabels color
                plt.setp(plt.getp(ax, 'xticklabels'), color='w') #set xticklabels color
                axes.legend(loc='upper right')
                axes.set_title('band pass data %.2f-%.2f Hz %s'%(fmin,fmax,title_label),color='w')
                axes.set_xlabel('Time',color='w')
                axes.set_ylabel('$\mu$V',color='w')
                plt.setp(plt.getp(axes, 'yticklabels'), color='w') #set yticklabels color
                plt.setp(plt.getp(axes, 'xticklabels'), color='w') #set xticklabels color
                
                

                fig.set_facecolor('black')
                fig.savefig('epoch_%s_%.2f\\epoch_%d_%.2f(%s).png'%(method.upper(),duration,ii+1,thresholds[method],title_label),
                                                          facecolor=fig.get_facecolor(), edgecolor='none')
                plt.close('all') 
        #connection.append(temp_connection)

    epochFeatures = pd.DataFrame(epochFeatures)
    a,b=mne.time_frequency.psd_multitaper(epochs,fmin=8,fmax=16)
    epochFeatures['skewness_of_amplitude_spectrum']=np.mean(stats.skew(a,axis=2),1)
    epochFeatures['spindle']=labels[1:]
    

    return epochFeatures
コード例 #53
0
evoked_data = np.mean(epochs_data, axis=0)

evokeds = mne.EvokedArray(evoked_data, info=info, tmin=-0.2,
                          comment='Arbitrary', nave=nave)
evokeds.plot(picks=picks, show=True, units={'mag': '-'},
             titles={'mag': 'sin and cos averaged'}, time_unit='s')

###############################################################################
# Create epochs by windowing the raw data.

# The events are spaced evenly every 1 second.
duration = 1.

# create a fixed size events array
# start=0 and stop=None by default
events = mne.make_fixed_length_events(raw, event_id, duration=duration)
print(events)

# for fixed size events no start time before and after event
tmin = 0.
tmax = 0.99  # inclusive tmax, 1 second epochs

# create :class:`Epochs <mne.Epochs>` object
epochs = mne.Epochs(raw, events=events, event_id=event_id, tmin=tmin,
                    tmax=tmax, baseline=None, verbose=True)
epochs.plot(scalings='auto', block=True)

###############################################################################
# Create overlapping epochs using :func:`mne.make_fixed_length_events` (50 %
# overlap). This also roughly doubles the amount of events compared to the
# previous event list.
コード例 #54
0
ファイル: test_cov.py プロジェクト: jhouck/mne-python
def test_low_rank():
    """Test low-rank covariance matrix estimation."""
    raw = read_raw_fif(raw_fname).set_eeg_reference(projection=True).crop(0, 3)
    raw = maxwell_filter(raw, regularize=None)  # heavily reduce the rank
    sss_proj_rank = 139  # 80 MEG + 60 EEG - 1 proj
    n_ch = 366
    proj_rank = 365  # one EEG proj
    events = make_fixed_length_events(raw)
    methods = ('empirical', 'diagonal_fixed', 'oas')
    epochs = Epochs(raw, events, tmin=-0.2, tmax=0, preload=True)
    bounds = {
        'None': dict(empirical=(-6000, -5000),
                     diagonal_fixed=(-1500, -500),
                     oas=(-700, -600)),
        'full': dict(empirical=(-9000, -8000),
                     diagonal_fixed=(-2000, -1600),
                     oas=(-1600, -1000)),
    }
    for rank in ('full', None):
        covs = compute_covariance(
            epochs, method=methods, return_estimators=True,
            verbose='error', rank=rank)
        for cov in covs:
            method = cov['method']
            these_bounds = bounds[str(rank)][method]
            this_rank = _cov_rank(cov, epochs.info)
            if rank is None or method == 'empirical':
                assert this_rank == sss_proj_rank
            else:
                assert this_rank == proj_rank
            assert these_bounds[0] < cov['loglik'] < these_bounds[1], \
                (rank, method)
            if method == 'empirical':
                emp_cov = cov  # save for later, rank param does not matter

    # Test equivalence with mne.cov.regularize subspace
    with pytest.raises(ValueError, match='are dependent.*must equal'):
        regularize(emp_cov, epochs.info, rank=None, mag=0.1, grad=0.2)
    assert _cov_rank(emp_cov, epochs.info) == sss_proj_rank
    reg_cov = regularize(emp_cov, epochs.info, proj=True, rank='full')
    assert _cov_rank(reg_cov, epochs.info) == proj_rank
    del reg_cov
    with catch_logging() as log:
        reg_r_cov = regularize(emp_cov, epochs.info, proj=True, rank=None,
                               verbose=True)
    log = log.getvalue()
    assert 'jointly' in log
    assert _cov_rank(reg_r_cov, epochs.info) == sss_proj_rank
    reg_r_only_cov = regularize(emp_cov, epochs.info, proj=False, rank=None)
    assert _cov_rank(reg_r_only_cov, epochs.info) == sss_proj_rank
    assert_allclose(reg_r_only_cov['data'], reg_r_cov['data'])
    del reg_r_only_cov, reg_r_cov

    # test that rank=306 is same as rank='full'
    epochs_meg = epochs.copy().pick_types()
    assert len(epochs_meg.ch_names) == 306
    epochs_meg.info.update(bads=[], projs=[])
    cov_full = compute_covariance(epochs_meg, method='oas',
                                  rank='full', verbose='error')
    assert _cov_rank(cov_full, epochs_meg.info) == 306
    cov_dict = compute_covariance(epochs_meg, method='oas',
                                  rank=306, verbose='error')
    assert _cov_rank(cov_dict, epochs_meg.info) == 306
    assert_allclose(cov_full['data'], cov_dict['data'])

    # Work with just EEG data to simplify projection / rank reduction
    raw.pick_types(meg=False, eeg=True)
    n_proj = 2
    raw.add_proj(compute_proj_raw(raw, n_eeg=n_proj))
    n_ch = len(raw.ch_names)
    rank = n_ch - n_proj - 1  # plus avg proj
    assert len(raw.info['projs']) == 3
    epochs = Epochs(raw, events, tmin=-0.2, tmax=0, preload=True)
    assert len(raw.ch_names) == n_ch
    emp_cov = compute_covariance(epochs, rank='full', verbose='error')
    assert _cov_rank(emp_cov, epochs.info) == rank
    reg_cov = regularize(emp_cov, epochs.info, proj=True, rank='full')
    assert _cov_rank(reg_cov, epochs.info) == rank
    reg_r_cov = regularize(emp_cov, epochs.info, proj=False, rank=None)
    assert _cov_rank(reg_r_cov, epochs.info) == rank
    dia_cov = compute_covariance(epochs, rank=None, method='diagonal_fixed',
                                 verbose='error')
    assert _cov_rank(dia_cov, epochs.info) == rank
    assert_allclose(dia_cov['data'], reg_cov['data'])
    # test our deprecation: can simply remove later
    epochs.pick_channels(epochs.ch_names[:103])
    # degenerate
    with pytest.raises(ValueError, match='can.*only be used with rank="full"'):
        compute_covariance(epochs, rank=None, method='pca')
    with pytest.raises(ValueError, match='can.*only be used with rank="full"'):
        compute_covariance(epochs, rank=None, method='factor_analysis')
コード例 #55
0
# define parameters
fname = data_path() + '/SubjectCMC.ds'
raw = mne.io.read_raw_ctf(fname)
raw.crop(50., 250.).load_data()  # crop for memory purposes

# Filter muscular activity to only keep high frequencies
emg = raw.copy().pick_channels(['EMGlft'])
emg.filter(20., None)

# Filter MEG data to focus on alpha band
raw.pick_types(meg=True, ref_meg=True, eeg=False, eog=False)
raw.filter(15., 30., method='iir')

# Build epochs as sliding windows over the continuous raw file
events = mne.make_fixed_length_events(raw, id=1, duration=.250)

# Epoch length is 1.5 second
meg_epochs = Epochs(raw, events, tmin=0., tmax=1.500, baseline=None,
                    detrend=1, decim=8)
emg_epochs = Epochs(emg, events, tmin=0., tmax=1.500, baseline=None)

# Prepare classification
X = meg_epochs.get_data()
y = emg_epochs.get_data().var(axis=2)[:, 0]  # target is EMG power

# Classification pipeline with SPoC spatial filtering and Ridge Regression
clf = make_pipeline(SPoC(n_components=2, log=True, reg='oas'), Ridge())

# Define a two fold cross-validation
cv = KFold(n_splits=2, shuffle=False)
コード例 #56
0
    os.makedirs('example')
os.chdir('example')
raw_file = 'D:\\NING - spindle\\training set\\suj29_l5nap_day1.fif'
annotation_file = 'D:\\NING - spindle\\training set\\suj29_nap_day1_edited_annotations.txt'

raw = mne.io.read_raw_fif(raw_file,preload=True,)
annotation = pd.read_csv(annotation_file)
montage = "standard_1020"
montage = mne.channels.read_montage(montage)
raw.set_montage(montage)
raw.set_channel_types({'LOc':'eog','ROc':'eog'})
channelList = ['F3','F4','C3','C4','O1','O2']
raw.pick_channels(channelList)
picks = mne.pick_types(raw.info,meg=False,eeg=True,eog=False,stim=False)
raw.notch_filter(np.arange(60,241,60),picks=picks,filter_length='auto',phase='zero')
raw.filter(1,40,filter_length='auto',phase='zero',h_trans_bandwidth='auto')


for duration in epoch_set:
    
    gold_standard = read_annotation(raw,annotation)
    manual_labels = discritized_onset_label_manual(raw,gold_standard,duration)
    event_array = mne.make_fixed_length_events(raw,id=1,duration=float(duration))
    #event_array[:,1] = duration * raw.info['sfreq']
    #event_array[:,-1] = np.arange(1,len(event_array)+1)
    epochFeatures=compute_plv_pli_cc(raw,duration,plv_threshold_set,pli_threshold_set,cc_threshold_set,manual_labels)
    if not os.path.exists('feature'):
        os.makedirs('feature')
    epochFeatures.to_csv('feature\\feature_%.2f.csv'%(duration))