Example #1
0
def test_plot_topomap_mne():
    "Test plot.Topomap with MNE data"
    ds = datasets.get_mne_sample(sub=[0, 1], sns=True)
    p = plot.Topomap(ds['meg'].summary(time=(.1, .12)), proj='left', show=False)
    p.close()
    # grad
    ds = datasets.get_mne_sample(sub=[0], sns='grad')
    assert_raises(NotImplementedError, plot.Topomap, 'meg.sub(time=.1)', ds=ds, show=False)
Example #2
0
def test_plot_topomap_mne():
    "Test plot.Topomap with MNE data"
    ds = datasets.get_mne_sample(sub=[0, 1], sns=True)
    p = plot.Topomap(ds['meg'].summary(time=(.1, .12)), proj='left')
    p.close()
    # grad
    ds = datasets.get_mne_sample(sub=[0], sns='grad')
    with pytest.raises(NotImplementedError), pytest.warns(RuntimeWarning):
        plot.Topomap('meg.sub(time=.1)', ds=ds)
Example #3
0
def test_plot_topomap_mne():
    "Test plot.Topomap with MNE data"
    ds = datasets.get_mne_sample(sub=[0, 1], sns=True)
    p = plot.Topomap(ds['meg'].summary(time=(.1, .12)), proj='left')
    p.close()
    # grad
    ds = datasets.get_mne_sample(sub=[0], sns='grad')
    with pytest.raises(NotImplementedError), pytest.warns(RuntimeWarning):
        plot.Topomap('meg.sub(time=.1)', ds=ds)
Example #4
0
def test_plot_topomap_mne():
    "Test plot.Topomap with MNE data"
    ds = datasets.get_mne_sample(sub=[0, 1], sns=True)
    p = plot.Topomap(ds['meg'].summary(time=(.1, .12)),
                     proj='left',
                     show=False)
    p.close()
Example #5
0
def test_epoch_trigger_shift():
    "Test the shift_mne_epoch_trigger() function"
    epochs = datasets.get_mne_sample(sns=True, sub="[1,2,3]")['epochs']
    n_lost_start = np.sum(epochs.times < epochs.tmin + 0.05)
    n_lost_end = np.sum(epochs.times > epochs.tmax - 0.05)
    data = epochs.get_data()

    epochs_s = shift_mne_epoch_trigger(epochs, [0, 0, 0])
    assert_array_equal(epochs_s.get_data(), data)

    epochs_s = shift_mne_epoch_trigger(epochs, [-0.05, 0., 0.05])
    data_s = epochs_s.get_data()
    assert_array_equal(data_s[0], data[0, :, : -(n_lost_end + n_lost_start)])
    assert_array_equal(data_s[1], data[1, :, n_lost_start: -n_lost_end])
    assert_array_equal(data_s[2], data[2, :, n_lost_end + n_lost_start:])
    assert_allclose(epochs_s.times, epochs.times[n_lost_start: -n_lost_end],
                    rtol=1e-1, atol=1e-3)  # ms accuracy

    epochs_s = shift_mne_epoch_trigger(epochs, [0.05, 0., 0.05])
    data_s = epochs_s.get_data()
    assert_array_equal(data_s[0], data[0, :, n_lost_end:])
    assert_array_equal(data_s[1], data[1, :, :-n_lost_end])
    assert_array_equal(data_s[2], data[2, :, n_lost_end:])
    assert_allclose(epochs_s.times, epochs.times[:-n_lost_end],
                    rtol=1e-1, atol=1e-3)  # ms accuracy
Example #6
0
def test_epoch_trigger_shift():
    "Test the shift_mne_epoch_trigger() function"
    epochs = datasets.get_mne_sample(sns=True, sub="[1,2,3]")['epochs']
    epochs.info['projs'] = []
    n_lost_start = np.sum(epochs.times < epochs.tmin + 0.05)
    n_lost_end = np.sum(epochs.times > epochs.tmax - 0.05)
    data = epochs.get_data()

    # don't shift
    epochs_s = shift_mne_epoch_trigger(epochs, [0, 0, 0])
    assert_array_equal(epochs_s.get_data(), data)

    epochs_s = shift_mne_epoch_trigger(epochs, [-0.05, 0., 0.05])
    data_s = epochs_s.get_data()
    assert_array_equal(data_s[0], data[0, :, :-(n_lost_end + n_lost_start)])
    assert_array_equal(data_s[1], data[1, :, n_lost_start:-n_lost_end])
    assert_array_equal(data_s[2], data[2, :, n_lost_end + n_lost_start:])
    assert_allclose(epochs_s.times,
                    epochs.times[n_lost_start:-n_lost_end],
                    rtol=1e-1,
                    atol=1e-3)  # ms accuracy

    epochs_s = shift_mne_epoch_trigger(epochs, [0.05, 0., 0.05])
    data_s = epochs_s.get_data()
    assert_array_equal(data_s[0], data[0, :, n_lost_end:])
    assert_array_equal(data_s[1], data[1, :, :-n_lost_end])
    assert_array_equal(data_s[2], data[2, :, n_lost_end:])
    assert_allclose(epochs_s.times,
                    epochs.times[:-n_lost_end],
                    rtol=1e-1,
                    atol=1e-3)  # ms accuracy
Example #7
0
def test_dataobjects():
    "Test handing MNE-objects as data-objects"
    ds = datasets.get_mne_sample(sns=True)
    ds['C'] = Factor(ds['index'] > 155, labels={False: 'a', True: 'b'})
    sds = ds.sub("side % C != ('L', 'b')")
    ads = sds.aggregate('side % C')
    eq_(ads.n_cases, 3)
Example #8
0
def test_xhemi():
    y = datasets.get_mne_stc(ndvar=True)
    data_dir = mne.datasets.sample.data_path()
    subjects_dir = os.path.join(data_dir, 'subjects')
    load.update_subjects_dir(y, subjects_dir)

    lh, rh = xhemi(y, mask=False)
    assert lh.source.rh_n == 0
    assert rh.source.rh_n == 0
    assert lh.max() == pytest.approx(10.80, abs=1e-2)
    assert rh.max() == pytest.approx(7.91, abs=1e-2)

    # volume source space
    ds = datasets.get_mne_sample(src='vol', ori='vector', hpf=1)
    y = ds[0, 'src']
    with pytest.raises(NotImplementedError):
        xhemi(y)
    # make symmetric
    coords = list(map(tuple, y.source.coordinates))
    index = [r == 0 or (-r, a, s) in coords for r, a, s in coords]
    y = y.sub(source=np.array(index))
    # test xhemi
    yl, yr = xhemi(y)
    assert yl.source == yr.source
    # test vector mirroring
    r, a, s = coords[10]
    assert r  # make sure it's not on midline
    for i_orig, coords in enumerate(y.source.coordinates):
        if tuple(coords) == (r, a, s):
            break
    for i_flipped, coords in enumerate(yr.source.coordinates):
        if tuple(coords) == (-r, a, s):
            break
    assert_array_equal(yr.x[i_flipped], y.x[i_orig] * [[-1], [1], [1]])
Example #9
0
def test_vec_source():
    "Test vector source space"
    ds = datasets.get_mne_sample(0, 0.1, src='vol', sub="(modality=='A') & (side == 'L')", ori='vector', stc=True)
    # conversion: vector
    stc = ds[0, 'stc']
    stc2 = load.fiff.stc_ndvar([stc, stc], ds.info['subject'], 'vol-10', ds.info['subjects_dir'])
    assert_dataobj_equal(stc2[1], ds[0, 'src'], name=False)
    # non-vector
    if hasattr(stc, 'magnitude'):  # added in mne 0.18
        stc = stc.magnitude()
        ndvar = load.fiff.stc_ndvar(stc, ds.info['subject'], 'vol-10', ds.info['subjects_dir'])
        assert_dataobj_equal(ndvar, ds[0, 'src'].norm('space'), name=False)
    # test
    res = testnd.Vector('src', ds=ds, samples=2)
    clusters = res.find_clusters()
    assert_array_equal(clusters['n_sources'], [799, 1, 7, 1, 2, 1])
    # NDVar
    v = ds['src']
    assert v.sub(source='lh', time=0).shape == (72, 712, 3)
    # parc
    v = ds[0, 'src']
    v = set_parc(v, Factor('abcdefg', repeat=227))
    v1 = v.sub(source='a')
    assert len(v1.source) == 227
    v2 = v.sub(source=('b', 'c'))
    assert len(v2.source) == 454
    assert 'b' in v2.source.parc
    assert 'd' not in v2.source.parc
    with pytest.raises(IndexError):
        v.sub(source='ab')
    with pytest.raises(IndexError):
        v.sub(source=['a', 'bc'])
Example #10
0
def test_source_ndvar():
    "Test NDVar with source dimension"
    ds = datasets.get_mne_sample(-0.1, 0.1, src='ico', sub='index<=1')
    v = ds['src', 0]
    assert v.source.parc.name == 'aparc'
    v_2009 = set_parc(v, 'aparc.a2009s')
    assert v_2009.source.parc.name == 'aparc.a2009s'
    conn = v_2009.source.connectivity()
    assert np.sum(v.source.parc == v_2009.source.parc) < len(v.source)
    v_back = set_parc(v_2009, 'aparc')
    assert v_back.source.parc.name == 'aparc'
    assert_array_equal(v.source.parc, v_back.source.parc)
    assert v.x is v_back.x
    assert_array_equal(v_back.source.connectivity(), conn)

    # labels_from_cluster
    v1, v2 = ds['src']
    v1 = v1 * (v1 > 15)
    labels1 = labels_from_clusters(v1)
    assert len(labels1) == 1
    labels1s = labels_from_clusters(v1.sum('time'))
    assert len(labels1s) == 1
    assert_label_equal(labels1s[0], labels1[0])
    v2 = v2 * (v2 > 2)
    labels2 = labels_from_clusters(concatenate((v1, v2), 'case'))
    assert len(labels2) == 2
    assert_label_equal(labels1[0], labels2[0])
Example #11
0
def test_morphing():
    mne.set_log_level('warning')
    data_dir = mne.datasets.sample.data_path()
    subjects_dir = os.path.join(data_dir, 'subjects')

    sss = datasets._mne_source_space('fsaverage', 'ico-4', subjects_dir)
    vertices_to = [sss[0]['vertno'], sss[1]['vertno']]
    ds = datasets.get_mne_sample(-0.1,
                                 0.1,
                                 src='ico',
                                 sub='index==0',
                                 stc=True)
    stc = ds['stc', 0]
    morph_mat = mne.compute_morph_matrix('sample', 'fsaverage', stc.vertices,
                                         vertices_to, None, subjects_dir)
    ndvar = ds['src']

    morphed_ndvar = morph_source_space(ndvar, 'fsaverage')
    morphed_stc = mne.morph_data_precomputed('sample', 'fsaverage', stc,
                                             vertices_to, morph_mat)
    assert_array_equal(morphed_ndvar.x[0], morphed_stc.data)
    morphed_stc_ndvar = load.fiff.stc_ndvar([morphed_stc],
                                            'fsaverage',
                                            'ico-4',
                                            subjects_dir,
                                            'dSPM',
                                            False,
                                            'src',
                                            parc=None)
    assert_dataobj_equal(morphed_ndvar, morphed_stc_ndvar)
Example #12
0
def test_plot_brain():
    """Test plot.brain plots"""
    src = datasets.get_mne_sample(src='ico', sub=[0])['src']

    p = plot.brain.dspm(src)
    cb = p.plot_colorbar(show=False)
    cb.close()
    p.close()
    # not closing figures leads to weird interactions with the QT backend

    p = plot.brain.dspm(src, hemi='lh')
    cb = p.plot_colorbar(show=False)
    cb.close()
    p.close()

    p = plot.brain.cluster(src, hemi='rh', views='parietal')
    cb = p.plot_colorbar(show=False)
    cb.close()
    p.close()

    image = plot.brain.bin_table(src, tstart=0.1, tstop=0.3, tstep=0.1)
    print(repr(image))
    print(image)

    # plot p-map
    pmap = src.abs()
    pmap /= src.max()
    p = plot.brain.p_map(pmap, src)
    cb = p.plot_colorbar(show=False)
    cb.close()
    p.close()
Example #13
0
def test_source_ndvar():
    "Test NDVar with source dimension"
    ds = datasets.get_mne_sample(-0.1, 0.1, src='ico', sub='index<=1')
    v = ds['src', 0]
    assert v.source.parc.name == 'aparc'
    v_2009 = set_parc(v, 'aparc.a2009s')
    assert v_2009.source.parc.name == 'aparc.a2009s'
    conn = v_2009.source.connectivity()
    assert np.sum(v.source.parc == v_2009.source.parc) < len(v.source)
    v_back = set_parc(v_2009, 'aparc')
    assert v_back.source.parc.name == 'aparc'
    assert_array_equal(v.source.parc, v_back.source.parc)
    assert v.x is v_back.x
    assert_array_equal(v_back.source.connectivity(), conn)

    # labels_from_cluster
    v1, v2 = ds['src']
    v1 = v1 * (v1 > 15)
    labels1 = labels_from_clusters(v1)
    assert len(labels1) == 1
    labels1s = labels_from_clusters(v1.sum('time'))
    assert len(labels1s) == 1
    assert_label_equal(labels1s[0], labels1[0])
    v2 = v2 * (v2 > 2)
    labels2 = labels_from_clusters(concatenate((v1, v2), 'case'))
    assert len(labels2) == 2
    assert_label_equal(labels1[0], labels2[0])
Example #14
0
def test_dataobjects():
    "Test handing MNE-objects as data-objects"
    ds = datasets.get_mne_sample(sns=True)
    ds['C'] = Factor(ds['index'] > 155, labels={False: 'a', True: 'b'})
    sds = ds.sub("side % C != ('L', 'b')")
    ads = sds.aggregate('side % C')
    eq_(ads.n_cases, 3)
Example #15
0
def test_anova_parc():
    "Test ANOVA with parc argument and source space data"
    set_log_level('warning', 'mne')
    ds = datasets.get_mne_sample(src='ico', sub="side.isin(('L', 'R'))")
    y = ds['src'].sub(source=('lateraloccipital-lh', 'cuneus-lh'))
    y1 = y.sub(source='lateraloccipital-lh')
    y2 = y.sub(source='cuneus-lh')
    kwa = dict(ds=ds, tstart=0.2, tstop=0.3, samples=100)

    resp = testnd.anova(y, "side*modality", pmin=0.05, parc='source', **kwa)
    c1p = resp.find_clusters(source='lateraloccipital-lh')
    c2p = resp.find_clusters(source='cuneus-lh')
    del c1p['p_parc', 'id']
    del c2p['p_parc', 'id']
    res1 = testnd.anova(y1, "side*modality", pmin=0.05, **kwa)
    c1 = res1.find_clusters()
    del c1['id']
    res2 = testnd.anova(y2, "side*modality", pmin=0.05, **kwa)
    c2 = res2.find_clusters()
    del c2['id']
    assert_dataset_equal(c1p, c1)
    assert_dataset_equal(c2p, c2)
    assert_array_equal(c2['p'], [
        0.85, 0.88, 0.97, 0.75, 0.99, 0.99, 0.98, 0.0, 0.12, 0.88, 0.25, 0.97,
        0.34, 0.96
    ])

    # without multiprocessing
    configure(n_workers=0)
    ress = testnd.anova(y, "side*modality", pmin=0.05, parc='source', **kwa)
    c1s = ress.find_clusters(source='lateraloccipital-lh')
    c2s = ress.find_clusters(source='cuneus-lh')
    del c1s['p_parc', 'id']
    del c2s['p_parc', 'id']
    assert_dataset_equal(c1s, c1)
    assert_dataset_equal(c2s, c2)
    configure(n_workers=True)

    # parc but single label
    resp2 = testnd.anova(y2, "side*modality", pmin=0.05, parc='source', **kwa)
    c2sp = resp2.find_clusters(source='cuneus-lh')
    del c2sp['p_parc', 'id']
    assert_dataset_equal(c2sp, c2)

    # not defined
    assert_raises(NotImplementedError,
                  testnd.anova,
                  y,
                  "side*modality",
                  tfce=True,
                  parc='source',
                  **kwa)
Example #16
0
def test_dataobjects():
    "Test handing MNE-objects as data-objects"
    ds = datasets.get_mne_sample(sns=True)
    ds['C'] = Factor(ds['index'] > 155, labels={False: 'a', True: 'b'})
    sds = ds.sub("side % C != ('L', 'b')")
    ads = sds.aggregate('side % C')
    eq_(ads.n_cases, 3)

    # connectivity
    sensor = ds['sns'].sensor
    c = sensor.connectivity()
    assert_array_equal(c[:, 0] < c[:, 1], True)
    eq_(c.max(), len(sensor) - 1)
Example #17
0
def test_morphing():
    mne.set_log_level('warning')
    sss = datasets._mne_source_space('fsaverage', 'ico-4', subjects_dir)
    vertices_to = [sss[0]['vertno'], sss[1]['vertno']]
    ds = datasets.get_mne_sample(-0.1, 0.1, src='ico', sub='index==0', stc=True)
    stc = ds['stc', 0]
    morph_mat = mne.compute_morph_matrix('sample', 'fsaverage', stc.vertno,
                                         vertices_to, None, subjects_dir)
    ndvar = ds['src']

    morphed_ndvar = morph_source_space(ndvar, 'fsaverage')
    morphed_stc = mne.morph_data_precomputed('sample', 'fsaverage', stc,
                                             vertices_to, morph_mat)
    assert_array_equal(morphed_ndvar.x[0], morphed_stc.data)
    morphed_stc_ndvar = load.fiff.stc_ndvar([morphed_stc], 'fsaverage', 'ico-4',
                                            subjects_dir, 'src', parc=None)
    assert_dataobj_equal(morphed_ndvar, morphed_stc_ndvar)
Example #18
0
def test_anova_parc():
    "Test ANOVA with parc argument and source space data"
    set_log_level('warning', 'mne')
    ds = datasets.get_mne_sample(src='ico', sub="side.isin(('L', 'R'))")
    y = ds['src'].sub(source=('lateraloccipital-lh', 'cuneus-lh'))
    y1 = y.sub(source='lateraloccipital-lh')
    y2 = y.sub(source='cuneus-lh')
    kwa = dict(ds=ds, tstart=0.2, tstop=0.3, samples=100)

    resp = testnd.anova(y, "side*modality", pmin=0.05, parc='source', **kwa)
    c1p = resp.find_clusters(source='lateraloccipital-lh')
    c2p = resp.find_clusters(source='cuneus-lh')
    del c1p['p_parc', 'id']
    del c2p['p_parc', 'id']
    res1 = testnd.anova(y1, "side*modality", pmin=0.05, **kwa)
    c1 = res1.find_clusters()
    del c1['id']
    res2 = testnd.anova(y2, "side*modality", pmin=0.05, **kwa)
    c2 = res2.find_clusters()
    del c2['id']
    assert_dataset_equal(c1p, c1)
    assert_dataset_equal(c2p, c2)
    assert_array_equal(c2['p'], [0.85, 0.88, 0.97, 0.75, 0.99, 0.99, 0.98, 0.0,
                                 0.12, 0.88, 0.25, 0.97, 0.34, 0.96])

    # without multiprocessing
    testnd.configure(0)
    ress = testnd.anova(y, "side*modality", pmin=0.05, parc='source', **kwa)
    c1s = ress.find_clusters(source='lateraloccipital-lh')
    c2s = ress.find_clusters(source='cuneus-lh')
    del c1s['p_parc', 'id']
    del c2s['p_parc', 'id']
    assert_dataset_equal(c1s, c1)
    assert_dataset_equal(c2s, c2)
    testnd.configure(-1)

    # parc but single label
    resp2 = testnd.anova(y2, "side*modality", pmin=0.05, parc='source', **kwa)
    c2sp = resp2.find_clusters(source='cuneus-lh')
    del c2sp['p_parc', 'id']
    assert_dataset_equal(c2sp, c2)

    # not defined
    assert_raises(NotImplementedError, testnd.anova, y, "side*modality",
                  tfce=True, parc='source', **kwa)
Example #19
0
def test_plot_topomap():
    "Test plot.Topomap"
    ds = datasets.get_uts(utsnd=True)
    topo = ds.eval('utsnd.summary(time=(0.075, 0.125))')

    p = plot.Topomap(topo, ds=ds, show=False)
    p.close()
    p = plot.Topomap(topo, ds=ds, vmax=0.2, w=2, show=False)
    p.close()
    p = plot.Topomap(topo, 'A%B', ds=ds, axw=2, show=False)
    p.close()
    p = plot.Topomap(topo, ds=ds, sensorlabels=None, show=False)
    p.close()

    # MNE data
    ds = datasets.get_mne_sample(sub=[0, 1], sns=True)
    p = plot.Topomap(ds['sns'].summary(time=(.1, .12)), proj='left', show=False)
    p.close()
Example #20
0
def test_plot_brain():
    """Test plot.brain plots"""
    src = datasets.get_mne_sample(src='ico', sub=[0])['src']

    # size
    b = plot.brain.brain(src.source, hemi='rh', w=400, h=300, mask=False)
    eq_(b.screenshot().shape, (300, 400, 3))
    b.set_size(200, 150)
    eq_(b.screenshot().shape, (150, 200, 3))
    b.close()
    # both hemispheres
    b = plot.brain.brain(src.source, w=600, h=300, mask=False)
    eq_(b.screenshot().shape, (300, 600, 3))
    b.set_size(400, 150)
    eq_(b.screenshot().shape, (150, 400, 3))
    b.close()

    # plot shortcuts
    p = plot.brain.dspm(src)
    cb = p.plot_colorbar(show=False)
    cb.close()
    p.close()

    p = plot.brain.dspm(src, hemi='lh')
    cb = p.plot_colorbar(show=False)
    cb.close()
    p.close()

    p = plot.brain.cluster(src, hemi='rh', views='parietal')
    cb = p.plot_colorbar(show=False)
    cb.close()
    p.close()

    image = plot.brain.bin_table(src, tstart=0.1, tstop=0.3, tstep=0.1)
    print(repr(image))
    print(image)

    # plot p-map
    pmap = src.abs()
    pmap /= src.max()
    p = plot.brain.p_map(pmap, src)
    cb = p.plot_colorbar(show=False)
    cb.close()
    p.close()
Example #21
0
def test_interpolation():
    "Test MNE channel interpolation by epoch"
    ds = datasets.get_mne_sample(sub=[0, 1, 2, 3])
    bads1 = ['MEG 0531', 'MEG 1321']
    bads3 = ['MEG 0531', 'MEG 2231']
    bads_list = [[], bads1, [], bads3]
    test_epochs = ds['epochs']
    epochs1 = test_epochs.copy()
    epochs3 = test_epochs.copy()

    _interpolate_bads_meg(test_epochs, bads_list, {})
    assert_array_equal(test_epochs._data[0], epochs1._data[0])
    assert_array_equal(test_epochs._data[2], epochs1._data[2])
    epochs1.info['bads'] = bads1
    epochs1.interpolate_bads(mode='accurate')
    assert_array_almost_equal(test_epochs._data[1], epochs1._data[1], 25)
    epochs3.info['bads'] = bads3
    epochs3.interpolate_bads(mode='accurate')
    assert_array_almost_equal(test_epochs._data[3], epochs3._data[3], 25)
Example #22
0
def test_plot_brain():
    """Test plot.brain plots"""
    src = datasets.get_mne_sample(src='ico', sub=[0])['src']

    p = plot.brain.dspm(src)
    cb = p.plot_colorbar(show=False)
    cb.close()

    p = plot.brain.dspm(src, hemi='lh')
    cb = p.plot_colorbar(show=False)
    cb.close()

    p = plot.brain.cluster(src, hemi='rh', views='parietal')
    cb = p.plot_colorbar(show=False)
    cb.close()

    image = plot.brain.bin_table(src, tstart=0.1, tstop=0.3, tstep=0.1)
    print repr(image)
    print image
Example #23
0
def test_vec_source():
    "Test vector source space"
    ds = datasets.get_mne_sample(0,
                                 0.1, (0, 0),
                                 src='vol',
                                 sub="(modality=='A') & (side == 'L')",
                                 ori='vector',
                                 stc=True)
    # conversion: vector
    stc = ds[0, 'stc']
    stc2 = load.fiff.stc_ndvar([stc, stc], ds.info['subject'], 'vol-10',
                               ds.info['subjects_dir'])
    assert_dataobj_equal(stc2[1], ds[0, 'src'], name=False)
    # non-vector
    if hasattr(stc, 'magnitude'):  # added in mne 0.18
        stc = stc.magnitude()
        ndvar = load.fiff.stc_ndvar(stc, ds.info['subject'], 'vol-10',
                                    ds.info['subjects_dir'])
        assert_dataobj_equal(ndvar, ds[0, 'src'].norm('space'), name=False)
    # test
    res = testnd.Vector('src', ds=ds, samples=2)
    clusters = res.find_clusters()
    assert_array_equal(clusters['n_sources'], [799, 1, 7, 1, 2, 1])
    # NDVar
    v = ds['src']
    assert v.sub(source='lh', time=0).shape == (72, 712, 3)
    # parc
    v = ds[0, 'src']
    v = set_parc(v, Factor('abcdefg', repeat=227))
    v1 = v.sub(source='a')
    assert len(v1.source) == 227
    v2 = v.sub(source=('b', 'c'))
    assert len(v2.source) == 454
    assert 'b' in v2.source.parc
    assert 'd' not in v2.source.parc
    with pytest.raises(IndexError):
        v.sub(source='ab')
    with pytest.raises(IndexError):
        v.sub(source=['a', 'bc'])
Example #24
0
def test_select_components():
    "Test Select-Epochs GUI Document"
    tempdir = TempDir()
    PATH = join(tempdir, 'test-ica.fif')

    ds = datasets.get_mne_sample()
    ds['epochs'] = ds['epochs'].pick_types('mag')
    ica = mne.preprocessing.ICA(0.95)
    ica.fit(ds['epochs'])
    ica.save(PATH)

    frame = gui.select_components(PATH, ds)
    frame.model.toggle(1)
    frame.OnSave(None)
    ica = mne.preprocessing.read_ica(PATH)
    assert ica.exclude == [1]

    frame.OnUndo(None)
    frame.OnSave(None)
    ica = mne.preprocessing.read_ica(PATH)
    assert ica.exclude == []

    frame.Close()
Example #25
0
def test_dataobjects():
    "Test handing MNE-objects as data-objects"
    shift = np.array([0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
                      0.0, 0.0, 0.0, 0.1, -0.1])
    epochs = datasets.get_mne_epochs()
    ds = Dataset(('a', Factor('ab', repeat=8)),
                 ('epochs', epochs))
    ds['ets'] = shift_mne_epoch_trigger(epochs, shift, min(shift), max(shift))

    # ds operations
    sds = ds.sub("a == 'a'")
    ads = ds.aggregate('a')

    # asndvar
    ndvar = asndvar(ds['epochs'])
    ndvar = asndvar(ds['ets'])

    # connectivity
    ds = datasets.get_mne_sample(sub=[0], sns=True)
    sensor = ds['meg'].sensor
    c = sensor.connectivity()
    assert_array_equal(c[:, 0] < c[:, 1], True)
    eq_(c.max(), len(sensor) - 1)
Example #26
0
def test_dataobjects():
    "Test handing MNE-objects as data-objects"
    shift = np.array([0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
                      0.0, 0.0, 0.0, 0.1, -0.1])
    epochs = datasets.get_mne_epochs()
    ds = Dataset(('a', Factor('ab', repeat=8)),
                 ('epochs', epochs))
    ds['ets'] = shift_mne_epoch_trigger(epochs, shift, min(shift), max(shift))

    # ds operations
    sds = ds.sub("a == 'a'")
    ads = ds.aggregate('a')

    # asndvar
    ndvar = asndvar(ds['epochs'])
    ndvar = asndvar(ds['ets'])

    # connectivity
    ds = datasets.get_mne_sample(sub=[0], sns=True)
    sensor = ds['meg'].sensor
    c = sensor.connectivity()
    assert_array_equal(c[:, 0] < c[:, 1], True)
    eq_(c.max(), len(sensor) - 1)
Example #27
0
def test_select_epochs():
    "Test Select-Epochs GUI Document"
    set_log_level('warning', 'mne')
    ds = datasets.get_mne_sample(sns=True)
    tempdir = TempDir()
    path = os.path.join(tempdir, 'rej.pickled')

    # create a file
    doc = Document(ds, 'meg')
    doc.set_path(path)
    doc.set_case(1, False, 'tag', None)
    doc.set_case(2, None, None, ['2'])
    doc.set_bad_channels([1])
    # check modifications
    eq_(doc.accept[1], False)
    eq_(doc.tag[1], 'tag')
    eq_(doc.interpolate[1], [])
    eq_(doc.interpolate[2], ['2'])
    eq_(doc.bad_channels, [1])
    assert_array_equal(doc.accept[2:], True)
    # save
    doc.save()

    # check the file
    ds_ = load.unpickle(path)
    eq_(doc.epochs.sensor.dimindex(ds_.info['bad_channels']), [1])

    # load the file
    ds = datasets.get_mne_sample(sns=True)
    doc = Document(ds, 'meg', path=path)
    # modification checks
    eq_(doc.accept[1], False)
    eq_(doc.tag[1], 'tag')
    eq_(doc.interpolate[1], [])
    eq_(doc.interpolate[2], ['2'])
    eq_(doc.bad_channels, [1])
    assert_array_equal(doc.accept[2:], True)

    # Test model
    # ==========
    ds = datasets.get_mne_sample(sns=True)
    doc = Document(ds, 'meg')
    model = Model(doc)

    # accept
    model.set_case(0, False, None, None)
    eq_(doc.accept[0], False)
    model.history.undo()
    eq_(doc.accept[0], True)
    model.history.redo()
    eq_(doc.accept[0], False)

    # interpolate
    model.toggle_interpolation(2, '3')
    eq_(doc.interpolate[2], ['3'])
    model.toggle_interpolation(2, '4')
    eq_(doc.interpolate[2], ['3', '4'])
    model.toggle_interpolation(2, '3')
    eq_(doc.interpolate[2], ['4'])
    model.toggle_interpolation(3, '3')
    eq_(doc.interpolate[2], ['4'])
    eq_(doc.interpolate[3], ['3'])
    model.history.undo()
    model.history.undo()
    eq_(doc.interpolate[2], ['3', '4'])
    eq_(doc.interpolate[3], [])
    model.history.redo()
    eq_(doc.interpolate[2], ['4'])

    # bad channels
    model.set_bad_channels([1])
    model.set_bad_channels([1, 10])
    eq_(doc.bad_channels, [1, 10])
    model.history.undo()
    eq_(doc.bad_channels, [1])
    model.history.redo()
    eq_(doc.bad_channels, [1, 10])

    # reload to reset
    model.load(path)
    # tests
    eq_(doc.accept[1], False)
    eq_(doc.tag[1], 'tag')
    eq_(doc.interpolate[1], [])
    eq_(doc.interpolate[2], ['2'])
    eq_(doc.bad_channels, [1])
    assert_array_equal(doc.accept[2:], True)
Example #28
0
def test_source_estimate():
    "Test SourceSpace dimension"
    mne.set_log_level('warning')
    ds = datasets.get_mne_sample(src='ico')
    dsa = ds.aggregate('side')

    # test auto-conversion
    asndvar('epochs', ds=ds)
    asndvar('epochs', ds=dsa)
    asndvar(dsa['epochs'][0])

    # source space clustering
    res = testnd.ttest_ind('src',
                           'side',
                           ds=ds,
                           samples=0,
                           pmin=0.05,
                           tstart=0.05,
                           mintime=0.02,
                           minsource=10)
    assert res.clusters.n_cases == 52

    # test disconnecting parc
    src = ds['src']
    source = src.source
    parc = source.parc
    orig_conn = set(map(tuple, source.connectivity()))
    disc_conn = set(map(tuple, source.connectivity(True)))
    assert len(disc_conn) < len(orig_conn)
    for pair in orig_conn:
        s, d = pair
        if pair in disc_conn:
            assert parc[s] == parc[d]
        else:
            assert parc[s] != parc[d]

    # threshold-based test with parc
    srcl = src.sub(source='lh')
    res = testnd.ttest_ind(srcl,
                           'side',
                           ds=ds,
                           samples=10,
                           pmin=0.05,
                           tstart=0.05,
                           mintime=0.02,
                           minsource=10,
                           parc='source')
    assert res._cdist.dist.shape[1] == len(srcl.source.parc.cells)
    label = 'superiortemporal-lh'
    c_all = res.find_clusters(maps=True)
    c_label = res.find_clusters(maps=True, source=label)
    assert_array_equal(c_label['location'], label)
    for case in c_label.itercases():
        id_ = case['id']
        idx = c_all['id'].index(id_)[0]
        assert case['v'] == c_all[idx, 'v']
        assert case['tstart'] == c_all[idx, 'tstart']
        assert case['tstop'] == c_all[idx, 'tstop']
        assert case['p'] <= c_all[idx, 'p']
        assert_dataobj_equal(case['cluster'],
                             c_all[idx, 'cluster'].sub(source=label))

    # threshold-free test with parc
    res = testnd.ttest_ind(srcl,
                           'side',
                           ds=ds,
                           samples=10,
                           tstart=0.05,
                           parc='source')
    cl = res.find_clusters(0.05)
    assert cl.eval("p.min()") == res.p.min()
    mp = res.masked_parameter_map()
    assert mp.min() == res.t.min()
    assert mp.max() == res.t.max(res.p <= 0.05)
    assert mp.max() == pytest.approx(-4.95817732)

    # indexing source space
    s_sub = src.sub(source='fusiform-lh')
    idx = source.index_for_label('fusiform-lh')
    s_idx = src[idx]
    assert_dataobj_equal(s_sub, s_idx)

    # concatenate
    src_reconc = concatenate((src.sub(source='lh'), src.sub(source='rh')),
                             'source')
    assert_dataobj_equal(src_reconc, src)
Example #29
0
def test_plot_topomap_mne():
    "Test plot.Topomap with MNE data"
    ds = datasets.get_mne_sample(sub=[0, 1], sns=True)
    p = plot.Topomap(ds["meg"].summary(time=(0.1, 0.12)), proj="left", show=False)
    p.close()
Example #30
0
def test_source_estimate():
    "Test SourceSpace dimension"
    mne.set_log_level('warning')
    ds = datasets.get_mne_sample(src='ico')
    dsa = ds.aggregate('side')

    # test auto-conversion
    asndvar('epochs', ds=ds)
    asndvar('epochs', ds=dsa)
    asndvar(dsa['epochs'][0])

    # source space clustering
    res = testnd.ttest_ind('src', 'side', ds=ds, samples=0, pmin=0.05,
                           tstart=0.05, mintime=0.02, minsource=10)
    assert_equal(res.clusters.n_cases, 52)

    # test disconnecting parc
    src = ds['src']
    source = src.source
    parc = source.parc
    orig_conn = set(map(tuple, source.connectivity()))
    disc_conn = set(map(tuple, source.connectivity(True)))
    assert_true(len(disc_conn) < len(orig_conn))
    for pair in orig_conn:
        s, d = pair
        if pair in disc_conn:
            assert_equal(parc[s], parc[d])
        else:
            assert_not_equal(parc[s], parc[d])

    # threshold-based test with parc
    srcl = src.sub(source='lh')
    res = testnd.ttest_ind(srcl, 'side', ds=ds, samples=10, pmin=0.05,
                           tstart=0.05, mintime=0.02, minsource=10,
                           parc='source')
    assert_equal(res._cdist.dist.shape[1], len(srcl.source.parc.cells))
    label = 'superiortemporal-lh'
    c_all = res._clusters(maps=True)
    c_label = res._clusters(maps=True, source=label)
    assert_array_equal(c_label['location'], label)
    for case in c_label.itercases():
        id_ = case['id']
        idx = c_all['id'].index(id_)[0]
        assert_equal(case['v'], c_all[idx, 'v'])
        assert_equal(case['tstart'], c_all[idx, 'tstart'])
        assert_equal(case['tstop'], c_all[idx, 'tstop'])
        assert_less_equal(case['p'], c_all[idx, 'p'])
        assert_dataobj_equal(case['cluster'],
                             c_all[idx, 'cluster'].sub(source=label))

    # threshold-free test with parc
    res = testnd.ttest_ind(srcl, 'side', ds=ds, samples=10, tstart=0.05,
                           parc='source')
    cl = res._clusters(0.05)
    assert_equal(cl.eval("p.min()"), res.p.min())
    mp = res.masked_parameter_map()
    assert_in(mp.min(), (0, res.t.min()))
    assert_in(mp.max(), (0, res.t.max()))

    # indexing source space
    s_sub = src.sub(source='fusiform-lh')
    idx = source.index_for_label('fusiform-lh')
    s_idx = src[idx]
    assert_dataobj_equal(s_sub, s_idx)
def test_select_epochs():
    "Test Select-Epochs GUI Document"
    set_log_level('warning', 'mne')
    ds = datasets.get_mne_sample(sns=True)
    tempdir = TempDir()
    path = os.path.join(tempdir, 'rej.pickled')

    # Test Document
    # =============
    # create a file
    doc = Document(ds, 'meg')
    doc.set_path(path)
    doc.set_case(1, False, 'tag', None)
    doc.set_case(2, None, None, ['2'])
    doc.set_bad_channels([1])
    # check modifications
    eq_(doc.accept[1], False)
    eq_(doc.tag[1], 'tag')
    eq_(doc.interpolate[1], [])
    eq_(doc.interpolate[2], ['2'])
    eq_(doc.bad_channels, [1])
    assert_array_equal(doc.accept[2:], True)
    # save
    doc.save()

    # check the file
    ds_ = load.unpickle(path)
    eq_(doc.epochs.sensor._array_index(ds_.info['bad_channels']), [1])

    # load the file
    ds = datasets.get_mne_sample(sns=True)
    doc = Document(ds, 'meg', path=path)
    # modification checks
    eq_(doc.accept[1], False)
    eq_(doc.tag[1], 'tag')
    eq_(doc.interpolate[1], [])
    eq_(doc.interpolate[2], ['2'])
    eq_(doc.bad_channels, [1])
    assert_array_equal(doc.accept[2:], True)

    # Test Model
    # ==========
    ds = datasets.get_mne_sample(sns=True)
    doc = Document(ds, 'meg')
    model = Model(doc)

    # accept
    model.set_case(0, False, None, None)
    eq_(doc.accept[0], False)
    model.history.undo()
    eq_(doc.accept[0], True)
    model.history.redo()
    eq_(doc.accept[0], False)

    # interpolate
    model.toggle_interpolation(2, '3')
    eq_(doc.interpolate[2], ['3'])
    model.toggle_interpolation(2, '4')
    eq_(doc.interpolate[2], ['3', '4'])
    model.toggle_interpolation(2, '3')
    eq_(doc.interpolate[2], ['4'])
    model.toggle_interpolation(3, '3')
    eq_(doc.interpolate[2], ['4'])
    eq_(doc.interpolate[3], ['3'])
    model.history.undo()
    model.history.undo()
    eq_(doc.interpolate[2], ['3', '4'])
    eq_(doc.interpolate[3], [])
    model.history.redo()
    eq_(doc.interpolate[2], ['4'])

    # bad channels
    model.set_bad_channels([1])
    model.set_bad_channels([1, 10])
    eq_(doc.bad_channels, [1, 10])
    model.history.undo()
    eq_(doc.bad_channels, [1])
    model.history.redo()
    eq_(doc.bad_channels, [1, 10])

    # reload to reset
    model.load(path)
    # tests
    eq_(doc.accept[1], False)
    eq_(doc.tag[1], 'tag')
    eq_(doc.interpolate[1], [])
    eq_(doc.interpolate[2], ['2'])
    eq_(doc.bad_channels, [1])
    assert_array_equal(doc.accept[2:], True)

    # Test GUI
    # ========
    frame = gui.select_epochs(ds)
    assert_false(frame.CanBackward())
    ok_(frame.CanForward())
    frame.OnForward(None)
Example #32
0
import os
import timeit

import mne
from eelbrain import datasets, save

mne.set_log_level("warning")

fname = "temp.pickled"
if not os.path.exists(fname):
    ds = datasets.get_mne_sample(-0.1, 0.2, src="ico", sub="modality == 'A'")
    source = ds["src"].source
    y = ds["src"][0].x
    save.pickle((y, source), fname)


setup = (
    """
from itertools import izip
import numpy as np
import scipy as sp
from eelbrain.lab import stats, load

y, source = load.unpickle(%r)

out = np.empty(y.shape, np.uint32)
bin_buff = np.empty(y.shape, np.bool_)
int_buff = np.empty(y.shape, np.uint32)
threshold = 1
tail = 0
struct = sp.ndimage.generate_binary_structure(y.ndim, 1)
Example #33
0
import os
import timeit

import mne
from eelbrain import datasets, save

mne.set_log_level('warning')

fname = 'temp.pickled'
if not os.path.exists(fname):
    ds = datasets.get_mne_sample(-0.1, 0.2, src='ico', sub="modality == 'A'")
    source = ds['src'].source
    y = ds['src'][0].x
    save.pickle((y, source), fname)


setup = '''
from itertools import izip
import numpy as np
import scipy as sp
from eelbrain.lab import stats, load

y, source = load.unpickle(%r)

out = np.empty(y.shape, np.uint32)
bin_buff = np.empty(y.shape, np.bool_)
int_buff = np.empty(y.shape, np.uint32)
threshold = 1
tail = 0
struct = sp.ndimage.generate_binary_structure(y.ndim, 1)
struct[::2] = False