Ejemplo n.º 1
0
def test_baseline():

    d = Dataset(micromed_file)
    ev = [ev['start'] for ev in d.read_markers()][1]
    chans = d.header['chan_name'][:2]
    data = d.read_data(events=ev, pre=1, post=1, chan=chans)

    time_interval = (-.5, -.1)
    out = apply_baseline(data, time=time_interval)
    mout = math(select(out, time=time_interval), operator_name='mean', axis='time')
    assert_array_almost_equal(mout(trial=0), array([1, 1]))

    out = apply_baseline(data, time=time_interval, baseline='zscore')
    mout = math(select(out, time=time_interval), operator_name='mean', axis='time')
    assert_array_almost_equal(mout(trial=0), array([0, 0]))

    out = apply_baseline(data, time=time_interval, baseline='percent')
    mout = math(select(out, time=time_interval), operator_name='mean', axis='time')
    assert_array_almost_equal(mout(trial=0), array([0, 0]))

    freq_interval = (10, 15)
    freq = frequency(data)
    out = apply_baseline(freq, freq=freq_interval, baseline='dB')
    mout = math(select(out, freq=freq_interval), operator_name='mean', axis='freq')
    assert_array_almost_equal(mout(trial=0), array([0, 0]))

    out = apply_baseline(freq, freq=freq_interval, baseline='normchange')
    assert out.data[0].max() <= 1
    assert out.data[0].min() >= -1
Ejemplo n.º 2
0
def test_blackrock_ns4_02():
    d = Dataset(ns4_file)

    d = Dataset(ns4_file)
    N_SMP = d.header['n_samples']
    data = d.read_data(begsam=N_SMP - 1, endsam=N_SMP + 10)
    assert isnan(data.data[0][0, -10:]).all()
def test_wrong_variable_name():
    fieldtrip.VAR = 'unknown'
    with raises(KeyError):
        Dataset(fieldtrip_file)

    with raises(KeyError):
        Dataset(hdf5_file)
Ejemplo n.º 4
0
def test_bci2000_data():
    assert bci2000_file.exists()

    d = Dataset(bci2000_file)
    assert len(d.read_markers()) == 0

    data = d.read_data()
    assert data.data[0][0, 0] == 179.702
def test_write_read_fieldtrip():
    data = create_data(n_trial=1, n_chan=2)

    data.export(fieldtrip_file, export_format='fieldtrip')
    d = Dataset(fieldtrip_file)
    ftdata = d.read_data()
    assert_array_equal(data.data[0], ftdata.data[0])

    assert len(d.read_markers()) == 0
def test_openephys_markers():
    d = Dataset(filename, session=1)
    markers = d.read_markers()

    assert len(markers) == 10
    assert markers[0]['name'] == 'START RECORDING #0'
    assert markers[7]['name'] == 'Network Event'
    assert markers[7]['end'] == 95.402
    assert markers[-1]['name'] == 'END RECORDING #1'
Ejemplo n.º 7
0
def test_dataset_events():
    d = Dataset(micromed_file)
    events = [ev['start'] for ev in d.read_markers()][::30]

    data = d.read_data(events=events)

    assert data.time[0].shape[0] == 512
    assert data.time[0].shape[0] == data.data[0].shape[1]
    assert (data.number_of('time') == 512).all()
def test_brainvision_dataset_01():
    d = Dataset(brainvision_dir / 'test.vhdr')
    data = d.read_data(begtime=1, endtime=2)

    assert data.data[0][0, 0] == -24.0
    assert len(d.read_markers()) == 12

    data = d.read_data(begsam=-1, endsam=5)
    assert isnan(data.data[0][0, 0])
Ejemplo n.º 9
0
def test_ioeeg_mobert_end():
    d = Dataset(moberg_file)
    n_smp = d.header['n_samples']
    data = d.read_data(begsam=n_smp - 1, endsam=n_smp + 1)

    assert data(trial=0, chan='Fp1')[0] == -12302.22384929657
    assert isnan(data(trial=0, chan='Fp1')[1])

    assert len(d.read_markers()) == 0
def test_brainvision_dataset_02():
    d = Dataset(brainvision_dir / 'test_old_layout_latin1_software_filter.vhdr')
    data = d.read_data(begsam=1, endsam=2)
    assert_almost_equal(data.data[0][0, 0], 5.1)

    data = d.read_data(endsam=255)
    assert isnan(data.data[0][0, -1])

    data = d.read_data()
    assert data.data[0].shape == (29, 251)
Ejemplo n.º 11
0
def test_xltek_videos():
    d = Dataset(ktlx_file)

    with raises(IndexError):
        d.read_videos(0, 2)

    videos, v_beg, v_end = d.read_videos(100, 200)
    assert len(videos) == 2
    assert v_beg == 58.410209
    assert v_end == 37.177808
Ejemplo n.º 12
0
def test_micromed_header():
    d = Dataset(micromed_file)
    assert d.header['chan_name'][-1] == 'EMG'
    assert d.header['chan_name'][:5] == ['FP1', 'FP2', 'AF7', 'AF3', 'AFz']
    assert d.header['subj_id'] == 'Tp_metrologie Tp_metrologie'

    orig = d.header['orig']
    assert orig['dvideo_begin'] == iinfo('u4').max

    markers = d.read_markers()
    assert len(markers) == 444
    assert markers[0]['name'] == '1'
    assert markers[0]['start'] == markers[0]['end']

    data = d.read_data(chan=('FP1', 'AFz'), begsam=10, endsam=20)
    assert data.data[0][1, 5] == -334.27734375

    data = d.read_data(chan=('FP1', 'AFz'), begsam=-10, endsam=20)
    assert all(isnan(data.data[0][1, :10]))

    data = d.read_data(chan=('FP1', 'AFz'), begsam=900, endsam=1010)
    assert all(isnan(data.data[0][1, -10:]))

    data = d.read_data(chan=('FP1', 'AFz'), begsam=-10, endsam=-2)
    assert all(isnan(data.data[0][1, :]))

    with raises(OSError):
        d.read_videos(10, 20)
Ejemplo n.º 13
0
def test_fetch():
    dset = Dataset(str(gui_file))
    annot = Annotations(str(annot_psg_path))

    seg = fetch(dset,
                annot,
                stage=['NREM2', 'NREM3'],
                epoch='locked',
                reject_epoch=True,
                reject_artf=True)
    assert len(seg) == 356

    seg = fetch(dset,
                annot,
                cat=(0, 1, 0, 0),
                stage=['NREM2', 'NREM3'],
                reject_epoch=True,
                reject_artf=True)
    assert len(seg) == 31
    assert seg[14]['times'][0] == (34380, 34410)

    seg = fetch(dset, annot, cat=(0, 0, 1, 0), stage=['NREM1'])
    seg.read_data(['EEG Fpz-Cz'], ref_chan=['EEG Pz-Oz'])
    assert seg[0]['data']()[0][0].shape == (297000, )
    assert approx(seg[0]['data']()[0][0][100]) == -4.3201466
Ejemplo n.º 14
0
def compute_md5(p):
    """Compute m5sum for a file. If the file is .gz (in the case of .nii.gz)
    then it reads the archived version (the .gz contains metadata that changes
    every time)
    """
    md5_ = md5()

    if p.suffix == '.gz':
        f = gzip.open(p, 'rb')
        val = f.read()

    elif p.suffix == '.vhdr':
        # skip first two lines because it contains the time the file was written
        f = p.open('rb')
        val = b'\n'.join(f.read().split(b'\n')[2:])

    elif p.suffix == '.eeg':
        f = None
        val = Dataset(p).read_data().data[0].astype(int).tobytes()

    elif p.suffix == '.npy':
        f = None
        val = load(p).tobytes()

    else:
        f = p.open('rb')
        val = f.read()

    md5_.update(val)
    if f is not None:
        f.close()

    return md5_.hexdigest()
Ejemplo n.º 15
0
def test_events():
    d = Dataset(ns2_file)
    create_empty_annotations(annot_file, d)

    annot = Annotations(annot_file)
    with raises(IndexError):
        annot.add_event_type('spindle')

    annot.add_rater('test')
    annot.add_event_type('spindle')
    annot.add_event_type('spindle')

    assert len(annot.event_types) == 1

    annot.add_event('slowwave', (1, 2), chan=('FP1', ))
    annot.add_event('spindle', (3, 4))
    assert len(annot.event_types) == 2
    assert len(annot.get_events()) == 2

    annot.remove_event_type('spindle')
    assert len(annot.event_types) == 1
    assert len(annot.get_events()) == 1

    annot.remove_event('slowwave')
    assert len(annot.event_types) == 1
    assert len(annot.get_events()) == 0
Ejemplo n.º 16
0
def test_epochs():
    d = Dataset(ns2_file)
    create_empty_annotations(annot_file, d)

    annot = Annotations(annot_file)
    annot.add_rater('test')

    assert len(annot.get_epochs()) == 50
    assert len(annot.get_epochs(time=(1000, 2000))) == 16
Ejemplo n.º 17
0
def load_and_slice_data_for_feature_extraction(
        edf_filepath: str,
        epochstages: List[str],
        bad_segments: List[int] = None,
        epochoffset_secs: float = None,
        end_offset: float = None,
        chans_to_consider: List[str] = None,
        epoch_len=pysleep_defaults.epoch_len,
        stages_to_consider=pysleep_defaults.nrem_stages):
    if epochoffset_secs is None:
        epochoffset_secs = 0
    if end_offset is not None:
        last_good_epoch = int((end_offset - epochoffset_secs) / epoch_len)
        epochstages = epochstages[0:last_good_epoch]

    d = Dataset(edf_filepath)

    eeg_data = d.read_data().data[0]
    if not (1 < np.sum(np.abs(eeg_data)) / eeg_data.size < 200):
        raise EEGError(
            "edf data should be in mV, please rescale units in edf file")

    if bad_segments is not None:
        for bad_epoch in bad_segments:
            epochstages[bad_epoch] = 'artifact'
    epochstages = pysleep_utils.convert_epochstages_to_eegevents(
        epochstages, start_offset=epochoffset_secs)
    epochstages_to_consider = epochstages.loc[
        epochstages['description'].isin(stages_to_consider), :]
    starts = epochstages_to_consider['onset'].tolist()
    ends = (epochstages_to_consider['onset'] +
            epochstages_to_consider['duration']).tolist()

    for i in range(len(starts) - 1, 0, -1):
        if starts[i] == ends[i - 1]:
            del starts[i]
            del ends[i - 1]

    data = d.read_data(begtime=starts, endtime=ends, chan=chans_to_consider)
    data.starts = starts
    data.ends = ends
    return data
Ejemplo n.º 18
0
def read_ieeg_block(filename, electrode_file, conditions, minimalduration,
                    output_dir):
    d = Dataset(filename, bids=True)
    markers = d.read_markers()

    electrodes = Electrodes(electrode_file)
    elec_names = [x['name'] for x in electrodes.electrodes.tsv]
    elec_names = [x for x in elec_names if x in d.header['chan_name']
                  ]  # exclude elec location that have no corresponding channel

    all_conditions = [x for v in conditions.values() for x in v]
    clean_labels = _reject_channels(d, elec_names, all_conditions,
                                    minimalduration)

    outputs = []
    for active_baseline, data_conds in conditions.items():
        block_beg = []
        block_end = []

        for mrk in markers:

            if mrk['name'] in data_conds:
                dur = (mrk['end'] - mrk['start'])
                if dur >= minimalduration:
                    block_beg.append(mrk['start'])
                    block_end.append(mrk['end'])

        data = d.read_data(begtime=block_beg,
                           endtime=block_end,
                           chan=clean_labels)

        output_task = Task(filename)
        output_task.extension = '.pkl'
        output_task.task += active_baseline
        output_file = output_dir / output_task.get_filename()
        with output_file.open('wb') as f:
            dump(data, f)
        outputs.append(output_file)

    return outputs
Ejemplo n.º 19
0
def test_ioeeg_eeglab_hdf5():

    d1 = Dataset(eeglab_hdf5_1_file)
    d2 = Dataset(eeglab_hdf5_2_file)

    data1 = d1.read_data(begsam=100, endsam=200)
    data2 = d2.read_data(begsam=100, endsam=200)

    assert data1.data[0][0, 0] == data2.data[0][0, 0]

    assert len(d1.read_markers()) == 2
Ejemplo n.º 20
0
def test_ioeeg_eeglab():

    d1 = Dataset(eeglab_1_file)
    d2 = Dataset(eeglab_2_file)

    data1 = d1.read_data()
    data2 = d2.read_data()

    assert data1.data[0][0, 0] == data2.data[0][0, 0]

    assert len(d1.read_markers()) == 2
Ejemplo n.º 21
0
def test_openephys_dataset_01():
    d = Dataset(openephys_dir)
    data = d.read_data(begtime=1, endtime=2)

    d = Dataset(openephys_dir)
    data = d.read_data(chan=[
        'CH1',
    ], begsam=10, endsam=1400)
    assert data.data[0][0, 0] == -132.6

    mrk = d.read_markers()
    assert len(mrk) == 0
Ejemplo n.º 22
0
def test_bookmarks():
    d = Dataset(ns2_file)
    create_empty_annotations(annot_file, d)

    annot = Annotations(annot_file)

    with raises(IndexError):
        annot.current_rater

    with raises(IndexError):
        annot.add_bookmark('bookmark', (1, 2), ('Fpz', ))

    annot.add_rater('test')
    annot.add_bookmark('bookmark', (1, 2), ('Fpz', ))
    assert len(annot.get_bookmarks()) == 1
    annot.remove_bookmark('bookmark')
    assert len(annot.get_bookmarks()) == 0
Ejemplo n.º 23
0
def test_get_cycles():
    d = Dataset(ns2_file)
    create_empty_annotations(annot_file, d)

    annot = Annotations(annot_file)
    annot.add_rater('test')

    annot.set_cycle_mrkr(510)
    annot.set_cycle_mrkr(540)
    annot.set_cycle_mrkr(570)
    annot.set_cycle_mrkr(600, end=True)

    cycles = annot.get_cycles()
    assert len(cycles) == 3
    assert cycles[2] == (570, 600, 3)

    annot.remove_cycle_mrkr(510)
    annot.clear_cycles()

    cycles = annot.get_cycles()
    assert cycles is None
Ejemplo n.º 24
0
def test_ioeeg_eeglab_begsam():

    d1 = Dataset(eeglab_1_file)
    data = d1.read_data(begsam=-10, endsam=1)
    assert isnan(data.data[0][0, 0])
Ejemplo n.º 25
0
from pytest import approx

from wonambi import Dataset
from wonambi.detect.slowwave import DetectSlowWave

from .paths import psg_file

d = Dataset(psg_file)
data = d.read_data(chan=('EEG Fpz-Cz', 'EEG Pz-Oz'),
                   begtime=27930,
                   endtime=27960)


def test_detect_slowwave_Massimini2004():
    detsw = DetectSlowWave()
    detsw.invert = True
    assert repr(detsw) == 'detsw_Massimini2004_0.10-4.00Hz'

    sw = detsw(data)
    assert len(sw.events) == 1


def test_detect_slowwave_AASM_Massimini2004():
    detsw = DetectSlowWave(method='AASM/Massimini2004')
    detsw.invert = True
    assert repr(detsw) == 'detsw_AASM/Massimini2004_0.10-4.00Hz'

    sw = detsw(data)
    assert len(sw.events) == 15

Ejemplo n.º 26
0
def extract_band_power(
        edf_filepath: str,
        bands: dict = pysleep_defaults.default_freq_bands,
        chans_to_consider: List[str] = None,
        epochoffset_secs: float = None,
        end_time: float = None,
        epoch_len: int = pysleep_defaults.epoch_len) -> pd.DataFrame:
    """

    :param edf_filepath: The edf to extract bandpower for
    :param bands: bands to extract power in, if None, then defaults will be used i.e.
        bands = {
            'delta': (1, 4),
            'theta': (4, 7),
            'alpha': (8, 12),
            'sigma': (11, 16),
            'slow_sigma': (11, 13),
            'fast_sigma': (13, 16),
            'beta': (13, 30)
        }
    :param chans_to_consider: which channels to consider
    :param epochoffset_secs: start time of the recording to extract band power for (when do epochs start), onset is measured from this
    :param end_time: end time of the recording to extract band power for
    :param epoch_len: how long a time bin you want your power to be averaged over
    :return: chan_epoch_band as a numpy array, and times, bands, chans
    """

    d = Dataset(edf_filepath)
    if not (epochoffset_secs is None or epochoffset_secs >= 0):
        raise error_handling.EEGError('Epochoffset is negative!' +
                                      str(epochoffset_secs))
    if not ((end_time is None) or
            (end_time <= d.header['n_samples'] / d.header['s_freq'])):
        raise error_handling.EEGError("end time (" + str(end_time) +
                                      ") larger than record end!" +
                                      str(d.header['n_samples'] /
                                          d.header['s_freq']))
    data = d.read_data(begtime=epochoffset_secs,
                       endtime=end_time,
                       chan=chans_to_consider)
    power = timefrequency(data, method='spectrogram')
    abs_power = math(power, operator_name='abs')
    chan_time_freq = abs_power.data[0]
    all_chans = np.ones((chan_time_freq.shape[0], ), dtype=bool)
    epochoffset_secs = 0 if epochoffset_secs is None else epochoffset_secs
    time_axis = np.round(abs_power.axis['time'][0], 2) - epochoffset_secs
    freq_axis = np.round(abs_power.axis['freq'][0], 2)
    chan_axis = abs_power.axis['chan'][0]
    freq_binsize = freq_axis[1] - freq_axis[0]
    assert epoch_len > 0, "epoch len must be greater than zero"
    times = np.arange(0, time_axis[-1], epoch_len)
    cont = []
    for band, freqs in bands.items():
        freq_mask = (freqs[0] <= freq_axis) & (freqs[1] >= freq_axis)
        for win_start in times:
            time_mask = (win_start < time_axis) & (time_axis <
                                                   win_start + epoch_len)
            idx = np.ix_(all_chans, time_mask, freq_mask)
            if idx:
                chan_epoch_per_band = chan_time_freq[idx].mean(axis=1).mean(
                    axis=1) / freq_binsize
            else:
                chan_epoch_per_band = np.zeros((len(chans_to_consider), ))
            for chan, power in zip(chan_axis, chan_epoch_per_band):
                cont.append(
                    pd.Series({
                        'onset': win_start,
                        'duration': epoch_len,
                        'band': band.split('_')[0],
                        'chan': chan,
                        'power': power
                    }))
    band_power = pd.concat(
        cont, axis=1).T.apply(lambda x: pd.to_numeric(x, errors='ignore'))
    return band_power
Ejemplo n.º 27
0
from numpy import isnan

from wonambi import Dataset

from .paths import axon_abf_file


d = Dataset(axon_abf_file)


def test_abf_read():
    assert len(d.header['chan_name']) == 1
    assert d.header['start_time'].minute == 47

    data = d.read_data(begtime=1, endtime=2)

    assert data.data[0][0, 0] == 2.1972655922581912

    markers = d.read_markers()
    assert len(markers) == 0


def test_abf_boundary():
    data = d.read_data(begsam=-10, endsam=5)
    assert isnan(data.data[0][0, :10]).all()

    n_smp = d.header['n_samples']
    data = d.read_data(begsam=n_smp - 2, endsam=n_smp + 10)
    assert isnan(data.data[0][0, 2:]).all()
Ejemplo n.º 28
0
from pytest import approx, raises

from wonambi import Dataset
from wonambi.detect.spindle import DetectSpindle

from .paths import psg_file

d = Dataset(psg_file)
data = d.read_data(chan=('EEG Fpz-Cz', 'EEG Pz-Oz'), begtime=35790, endtime=35820)


def test_detect_spindle_Moelle2011():
    detsp = DetectSpindle()
    assert repr(detsp) == 'detsp_Moelle2011_12-15Hz_00.5-03.0s'

    sp = detsp(data)
    assert len(sp.events) == 4


def test_detect_spindle_Nir2011():
    detsp = DetectSpindle(method='Nir2011')

    sp = detsp(data)
    assert len(sp.events) == 4


def test_detect_spindle_Wamsley2012():
    detsp = DetectSpindle(method='Wamsley2012')

    sp = detsp(data)
    assert len(sp.events) == 4
Ejemplo n.º 29
0
from numpy import isnan

from wonambi import Dataset

from .paths import mff_file

d = Dataset(mff_file)


def test_mff_read():
    d.read_data(begtime=10, endtime=20)


def test_mff_before_start():
    data = d.read_data(begsam=-100, endsam=10)
    assert isnan(data.data[0][0, 0])


def test_mff_after_end():
    n_samples = d.header['n_samples']
    data = d.read_data(begsam=n_samples - 100, endsam=n_samples + 100)
    assert isnan(data.data[0][0, -1])


def test_mff_markers():
    markers = d.read_markers()
    assert len(markers) == 8
Ejemplo n.º 30
0
def test_create_annot():
    d = Dataset(ns2_file)
    create_empty_annotations(annot_file, d)