def test_label_in_src(): """Test label in src.""" src = read_source_spaces(src_fname) label = read_label(v1_label_fname) # construct label from source space vertices vert_in_src = np.intersect1d(label.vertices, src[0]['vertno'], True) where = np.in1d(label.vertices, vert_in_src) pos_in_src = label.pos[where] values_in_src = label.values[where] label_src = Label(vert_in_src, pos_in_src, values_in_src, hemi='lh').fill(src) # check label vertices vertices_status = np.in1d(src[0]['nearest'], label.vertices) vertices_in = np.nonzero(vertices_status)[0] vertices_out = np.nonzero(np.logical_not(vertices_status))[0] assert_array_equal(label_src.vertices, vertices_in) assert_array_equal(np.in1d(vertices_out, label_src.vertices), False) # check values value_idx = np.digitize(src[0]['nearest'][vertices_in], vert_in_src, True) assert_array_equal(label_src.values, values_in_src[value_idx]) # test exception vertices = np.append([-1], vert_in_src) pytest.raises(ValueError, Label(vertices, hemi='lh').fill, src) # test filling empty label label = Label([], hemi='lh') label.fill(src) assert_array_equal(label.vertices, np.array([], int))
def test_label_addition(): """Test label addition """ pos = np.random.rand(10, 3) values = np.arange(10.) / 10 idx0 = list(range(7)) idx1 = list(range(7, 10)) # non-overlapping idx2 = list(range(5, 10)) # overlapping l0 = Label(idx0, pos[idx0], values[idx0], 'lh', color='red') l1 = Label(idx1, pos[idx1], values[idx1], 'lh') l2 = Label(idx2, pos[idx2], values[idx2], 'lh', color=(0, 1, 0, .5)) assert_equal(len(l0), len(idx0)) # adding non-overlapping labels l01 = l0 + l1 assert_equal(len(l01), len(l0) + len(l1)) assert_array_equal(l01.values[:len(l0)], l0.values) assert_equal(l01.color, l0.color) # subtraction assert_labels_equal(l01 - l0, l1, comment=False, color=False) assert_labels_equal(l01 - l1, l0, comment=False, color=False) # adding overlappig labels l = l0 + l2 i0 = np.where(l0.vertices == 6)[0][0] i2 = np.where(l2.vertices == 6)[0][0] i = np.where(l.vertices == 6)[0][0] assert_equal(l.values[i], l0.values[i0] + l2.values[i2]) assert_equal(l.values[0], l0.values[0]) assert_array_equal(np.unique(l.vertices), np.unique(idx0 + idx2)) assert_equal(l.color, _blend_colors(l0.color, l2.color)) # adding lh and rh l2.hemi = 'rh' # this now has deprecated behavior bhl = l0 + l2 assert_equal(bhl.hemi, 'both') assert_equal(len(bhl), len(l0) + len(l2)) assert_equal(bhl.color, l.color) # subtraction assert_labels_equal(bhl - l0, l2) assert_labels_equal(bhl - l2, l0) bhl2 = l1 + bhl assert_labels_equal(bhl2.lh, l01) assert_equal(bhl2.color, _blend_colors(l1.color, bhl.color)) # subtraction bhl_ = bhl2 - l1 assert_labels_equal(bhl_.lh, bhl.lh, comment=False, color=False) assert_labels_equal(bhl_.rh, bhl.rh) assert_labels_equal(bhl2 - l2, l0 + l1) assert_labels_equal(bhl2 - l1 - l0, l2)
def test_label_center_of_mass(): """Test computing the center of mass of a label.""" stc = read_source_estimate(stc_fname) stc.lh_data[:] = 0 vertex_stc = stc.center_of_mass('sample', subjects_dir=subjects_dir)[0] assert_equal(vertex_stc, 124791) label = Label(stc.vertices[1], pos=None, values=stc.rh_data.mean(axis=1), hemi='rh', subject='sample') vertex_label = label.center_of_mass(subjects_dir=subjects_dir) assert_equal(vertex_label, vertex_stc) labels = read_labels_from_annot('sample', parc='aparc.a2009s', subjects_dir=subjects_dir) src = read_source_spaces(src_fname) # Try a couple of random ones, one from left and one from right # Visually verified in about the right place using mne_analyze for label, expected in zip([labels[2], labels[3], labels[-5]], [141162, 145221, 55979]): label.values[:] = -1 pytest.raises(ValueError, label.center_of_mass, subjects_dir=subjects_dir) label.values[:] = 0 pytest.raises(ValueError, label.center_of_mass, subjects_dir=subjects_dir) label.values[:] = 1 assert_equal(label.center_of_mass(subjects_dir=subjects_dir), expected) assert_equal(label.center_of_mass(subjects_dir=subjects_dir, restrict_vertices=label.vertices), expected) # restrict to source space idx = 0 if label.hemi == 'lh' else 1 # this simple nearest version is not equivalent, but is probably # close enough for many labels (including the test ones): pos = label.pos[np.where(label.vertices == expected)[0][0]] pos = (src[idx]['rr'][src[idx]['vertno']] - pos) pos = np.argmin(np.sum(pos * pos, axis=1)) src_expected = src[idx]['vertno'][pos] # see if we actually get the same one src_restrict = np.intersect1d(label.vertices, src[idx]['vertno']) assert_equal(label.center_of_mass(subjects_dir=subjects_dir, restrict_vertices=src_restrict), src_expected) assert_equal(label.center_of_mass(subjects_dir=subjects_dir, restrict_vertices=src), src_expected) # degenerate cases pytest.raises(ValueError, label.center_of_mass, subjects_dir=subjects_dir, restrict_vertices='foo') pytest.raises(TypeError, label.center_of_mass, subjects_dir=subjects_dir, surf=1) pytest.raises(IOError, label.center_of_mass, subjects_dir=subjects_dir, surf='foo')
def test_label_sign_flip(): src = read_source_spaces(src_fname) label = Label(vertices=src[0]['vertno'][:5], hemi='lh') src[0]['nn'][label.vertices] = np.array( [[1., 0., 0.], [0., 1., 0.], [0, 0, 1.], [1. / np.sqrt(2), 1. / np.sqrt(2), 0.], [1. / np.sqrt(2), 1. / np.sqrt(2), 0.]]) known_flips = np.array([1, 1, np.nan, 1, 1]) idx = [0, 1, 3, 4] # indices that are usable (third row is orthognoal) flip = label_sign_flip(label, src) # Need the abs here because the direction is arbitrary assert_array_almost_equal(np.abs(np.dot(flip[idx], known_flips[idx])), len(idx))
def test_label_fill_restrict(fname): """Test label in fill and restrict.""" src = read_source_spaces(src_fname) label = read_label(fname) # construct label from source space vertices label_src = label.restrict(src) vert_in_src = label_src.vertices values_in_src = label_src.values if check_version('scipy', '1.3') and fname == real_label_fname: # Check that we can auto-fill patch info quickly for one condition for s in src: s['nearest'] = None with pytest.warns(None): label_src = label_src.fill(src) else: label_src = label_src.fill(src) assert src[0]['nearest'] is not None # check label vertices vertices_status = np.in1d(src[0]['nearest'], label.vertices) vertices_in = np.nonzero(vertices_status)[0] vertices_out = np.nonzero(np.logical_not(vertices_status))[0] assert_array_equal(label_src.vertices, vertices_in) assert_array_equal(np.in1d(vertices_out, label_src.vertices), False) # check values value_idx = np.digitize(src[0]['nearest'][vertices_in], vert_in_src, True) assert_array_equal(label_src.values, values_in_src[value_idx]) # test exception vertices = np.append([-1], vert_in_src) with pytest.raises(ValueError, match='does not contain all of the label'): Label(vertices, hemi='lh').fill(src) # test filling empty label label = Label([], hemi='lh') label.fill(src) assert_array_equal(label.vertices, np.array([], int))
def test_label_sign_flip(): """Test label sign flip computation.""" src = read_source_spaces(src_fname) label = Label(vertices=src[0]['vertno'][:5], hemi='lh') src[0]['nn'][label.vertices] = np.array( [[1., 0., 0.], [0., 1., 0.], [0, 0, 1.], [1. / np.sqrt(2), 1. / np.sqrt(2), 0.], [1. / np.sqrt(2), 1. / np.sqrt(2), 0.]]) known_flips = np.array([1, 1, np.nan, 1, 1]) idx = [0, 1, 3, 4] # indices that are usable (third row is orthognoal) flip = label_sign_flip(label, src) assert_array_almost_equal(np.dot(flip[idx], known_flips[idx]), len(idx)) bi_label = label + Label(vertices=src[1]['vertno'][:5], hemi='rh') src[1]['nn'][src[1]['vertno'][:5]] = -src[0]['nn'][label.vertices] flip = label_sign_flip(bi_label, src) known_flips = np.array([1, 1, np.nan, 1, 1, 1, 1, np.nan, 1, 1]) idx = [0, 1, 3, 4, 5, 6, 8, 9] assert_array_almost_equal(np.dot(flip[idx], known_flips[idx]), 0.) src[1]['nn'][src[1]['vertno'][:5]] *= -1 flip = label_sign_flip(bi_label, src) assert_array_almost_equal(np.dot(flip[idx], known_flips[idx]), len(idx))
def test_annot_io(): """Test I/O from and to *.annot files.""" # copy necessary files from fsaverage to tempdir tempdir = _TempDir() subject = 'fsaverage' label_src = os.path.join(subjects_dir, 'fsaverage', 'label') surf_src = os.path.join(subjects_dir, 'fsaverage', 'surf') label_dir = os.path.join(tempdir, subject, 'label') surf_dir = os.path.join(tempdir, subject, 'surf') os.makedirs(label_dir) os.mkdir(surf_dir) shutil.copy(os.path.join(label_src, 'lh.PALS_B12_Lobes.annot'), label_dir) shutil.copy(os.path.join(label_src, 'rh.PALS_B12_Lobes.annot'), label_dir) shutil.copy(os.path.join(surf_src, 'lh.white'), surf_dir) shutil.copy(os.path.join(surf_src, 'rh.white'), surf_dir) # read original labels with pytest.raises(IOError, match='\nPALS_B12_Lobes$'): read_labels_from_annot(subject, 'PALS_B12_Lobesey', subjects_dir=tempdir) labels = read_labels_from_annot(subject, 'PALS_B12_Lobes', subjects_dir=tempdir) # test saving parcellation only covering one hemisphere parc = [label for label in labels if label.name == 'LOBE.TEMPORAL-lh'] write_labels_to_annot(parc, subject, 'myparc', subjects_dir=tempdir) parc1 = read_labels_from_annot(subject, 'myparc', subjects_dir=tempdir) parc1 = [label for label in parc1 if not label.name.startswith('unknown')] assert_equal(len(parc1), len(parc)) for lt, rt in zip(parc1, parc): assert_labels_equal(lt, rt) # test saving only one hemisphere parc = [label for label in labels if label.name.startswith('LOBE')] write_labels_to_annot(parc, subject, 'myparc2', hemi='lh', subjects_dir=tempdir) annot_fname = os.path.join(tempdir, subject, 'label', '%sh.myparc2.annot') assert os.path.isfile(annot_fname % 'l') assert not os.path.isfile(annot_fname % 'r') parc1 = read_labels_from_annot(subject, 'myparc2', annot_fname=annot_fname % 'l', subjects_dir=tempdir) parc_lh = [label for label in parc if label.name.endswith('lh')] for lt, rt in zip(parc1, parc_lh): assert_labels_equal(lt, rt) # test that the annotation is complete (test Label() support) rr = read_surface(op.join(surf_dir, 'lh.white'))[0] label = sum(labels, Label(hemi='lh', subject='fsaverage')).lh assert_array_equal(label.vertices, np.arange(len(rr)))
def test_label_addition(): """Test label addition """ pos = np.random.rand(10, 3) values = np.arange(10.) / 10 idx0 = range(7) idx1 = range(7, 10) # non-overlapping idx2 = range(5, 10) # overlapping l0 = Label(idx0, pos[idx0], values[idx0], 'lh') l1 = Label(idx1, pos[idx1], values[idx1], 'lh') l2 = Label(idx2, pos[idx2], values[idx2], 'lh') assert len(l0) == len(idx0) # adding non-overlapping labels l01 = l0 + l1 assert len(l01) == len(l0) + len(l1) assert_array_equal(l01.values[:len(l0)], l0.values) # adding overlappig labels l = l0 + l2 i0 = np.where(l0.vertices == 6)[0][0] i2 = np.where(l2.vertices == 6)[0][0] i = np.where(l.vertices == 6)[0][0] assert l.values[i] == l0.values[i0] + l2.values[i2] assert l.values[0] == l0.values[0] assert_array_equal(np.unique(l.vertices), np.unique(idx0 + idx2)) # adding lh and rh l2.hemi = 'rh' # this now has deprecated behavior bhl = l0 + l2 assert bhl.hemi == 'both' assert len(bhl) == len(l0) + len(l2) bhl = l1 + bhl assert_labels_equal(bhl.lh, l01)
def test_generate_stc_single_hemi(_get_fwd_labels): """Test generation of source estimate, single hemi.""" fwd, labels = _get_fwd_labels labels_single_hemi = labels[1:] # keep only labels in one hemisphere mylabels = [] for i, label in enumerate(labels_single_hemi): new_label = Label(vertices=label.vertices, pos=label.pos, values=2 * i * np.ones(len(label.values)), hemi=label.hemi, comment=label.comment) mylabels.append(new_label) n_times = 10 tmin = 0 tstep = 1e-3 stc_data = np.ones((len(labels_single_hemi), n_times)) stc = simulate_stc(fwd['src'], mylabels, stc_data, tmin, tstep) for label in labels_single_hemi: idx = _get_idx_label_stc(label, stc) assert (np.all(stc.data[idx] == 1.0)) assert (stc.data[idx].shape[1] == n_times) # test with function def fun(x): return x**2 stc = simulate_stc(fwd['src'], mylabels, stc_data, tmin, tstep, fun) # the first label has value 0, the second value 2, the third value 6 for i, label in enumerate(labels_single_hemi): if label.hemi == 'lh': hemi_idx = 0 else: hemi_idx = 1 idx = np.intersect1d(stc.vertices[hemi_idx], label.vertices) idx = np.searchsorted(stc.vertices[hemi_idx], idx) if hemi_idx == 1: idx += len(stc.vertices[0]) res = ((2. * i)**2.) * np.ones((len(idx), n_times)) assert_array_almost_equal(stc.data[idx], res)
def test_simulate_stc(_get_fwd_labels): """Test generation of source estimate.""" fwd, labels = _get_fwd_labels mylabels = [] for i, label in enumerate(labels): new_label = Label(vertices=label.vertices, pos=label.pos, values=2 * i * np.ones(len(label.values)), hemi=label.hemi, comment=label.comment) mylabels.append(new_label) n_times = 10 tmin = 0 tstep = 1e-3 stc_data = np.ones((len(labels), n_times)) stc = simulate_stc(fwd['src'], mylabels, stc_data, tmin, tstep) assert_equal(stc.subject, 'sample') for label in labels: idx = _get_idx_label_stc(label, stc) assert (np.all(stc.data[idx] == 1.0)) assert (stc.data[idx].shape[1] == n_times) # test with function def fun(x): return x**2 stc = simulate_stc(fwd['src'], mylabels, stc_data, tmin, tstep, fun) # the first label has value 0, the second value 2, the third value 6 for i, label in enumerate(labels): idx = _get_idx_label_stc(label, stc) res = ((2. * i)**2.) * np.ones((len(idx), n_times)) assert_array_almost_equal(stc.data[idx], res) # degenerate conditions label_subset = mylabels[:2] data_subset = stc_data[:2] stc = simulate_stc(fwd['src'], label_subset, data_subset, tmin, tstep, fun) pytest.raises(ValueError, simulate_stc, fwd['src'], label_subset, data_subset[:-1], tmin, tstep, fun) pytest.raises(RuntimeError, simulate_stc, fwd['src'], label_subset * 2, np.concatenate([data_subset] * 2, axis=0), tmin, tstep, fun)
def test_simulate_stc_labels_overlap(): """Test generation of source estimate, overlapping labels.""" fwd = read_forward_solution_meg(fname_fwd, force_fixed=True, use_cps=True) labels = [ read_label( op.join(data_path, 'MEG', 'sample', 'labels', '%s.label' % label)) for label in label_names ] mylabels = [] for i, label in enumerate(labels): new_label = Label(vertices=label.vertices, pos=label.pos, values=2 * i * np.ones(len(label.values)), hemi=label.hemi, comment=label.comment) mylabels.append(new_label) # Adding the last label twice mylabels.append(new_label) n_times = 10 tmin = 0 tstep = 1e-3 stc_data = np.ones((len(mylabels), n_times)) # Test false with pytest.raises(RuntimeError, match='must be non-overlapping'): simulate_stc(fwd['src'], mylabels, stc_data, tmin, tstep, allow_overlap=False) # test True stc = simulate_stc(fwd['src'], mylabels, stc_data, tmin, tstep, allow_overlap=True) assert_equal(stc.subject, 'sample') assert (stc.data.shape[1] == n_times) # Some of the elements should be equal to 2 since we have duplicate labels assert (2 in stc.data)
def test_simulate_stc_labels_overlap(_get_fwd_labels): """Test generation of source estimate, overlapping labels.""" fwd, labels = _get_fwd_labels mylabels = [] for i, label in enumerate(labels): new_label = Label(vertices=label.vertices, pos=label.pos, values=2 * i * np.ones(len(label.values)), hemi=label.hemi, comment=label.comment) mylabels.append(new_label) # Adding the last label twice mylabels.append(new_label) n_times = 10 tmin = 0 tstep = 1e-3 stc_data = np.ones((len(mylabels), n_times)) # Test false with pytest.raises(RuntimeError, match='must be non-overlapping'): simulate_stc(fwd['src'], mylabels, stc_data, tmin, tstep, allow_overlap=False) # test True stc = simulate_stc(fwd['src'], mylabels, stc_data, tmin, tstep, allow_overlap=True) assert_equal(stc.subject, 'sample') assert (stc.data.shape[1] == n_times) # Some of the elements should be equal to 2 since we have duplicate labels assert (2 in stc.data)
def test_label_center_of_mass(): """Test computing the center of mass of a label""" stc = read_source_estimate(stc_fname) stc.lh_data[:] = 0 vertex_stc = stc.center_of_mass('sample', subjects_dir=subjects_dir)[0] assert_equal(vertex_stc, 124791) label = Label(stc.vertices[1], pos=None, values=stc.rh_data.mean(axis=1), hemi='rh', subject='sample') vertex_label = label.center_of_mass(subjects_dir=subjects_dir) assert_equal(vertex_label, vertex_stc) labels = read_labels_from_annot('sample', parc='aparc.a2009s', subjects_dir=subjects_dir) src = read_source_spaces(src_fname) # Try a couple of random ones, one from left and one from right # Visually verified in about the right place using mne_analyze for label, expected in zip([labels[2], labels[3], labels[-5]], [141162, 145221, 55979]): label.values[:] = -1 assert_raises(ValueError, label.center_of_mass, subjects_dir=subjects_dir) label.values[:] = 0 assert_raises(ValueError, label.center_of_mass, subjects_dir=subjects_dir) label.values[:] = 1 assert_equal(label.center_of_mass(subjects_dir=subjects_dir), expected) assert_equal( label.center_of_mass(subjects_dir=subjects_dir, restrict_vertices=label.vertices), expected) # restrict to source space idx = 0 if label.hemi == 'lh' else 1 # this simple nearest version is not equivalent, but is probably # close enough for many labels (including the test ones): pos = label.pos[np.where(label.vertices == expected)[0][0]] pos = (src[idx]['rr'][src[idx]['vertno']] - pos) pos = np.argmin(np.sum(pos * pos, axis=1)) src_expected = src[idx]['vertno'][pos] # see if we actually get the same one src_restrict = np.intersect1d(label.vertices, src[idx]['vertno']) assert_equal( label.center_of_mass(subjects_dir=subjects_dir, restrict_vertices=src_restrict), src_expected) assert_equal( label.center_of_mass(subjects_dir=subjects_dir, restrict_vertices=src), src_expected) # degenerate cases assert_raises(ValueError, label.center_of_mass, subjects_dir=subjects_dir, restrict_vertices='foo') assert_raises(TypeError, label.center_of_mass, subjects_dir=subjects_dir, surf=1) assert_raises(IOError, label.center_of_mass, subjects_dir=subjects_dir, surf='foo')
def _stc_to_label(stc, src, smooth, subjects_dir=None): """Compute a label from the non-zero sources in an stc object. Parameters ---------- stc : SourceEstimate The source estimates. src : SourceSpaces | str | None The source space over which the source estimates are defined. If it's a string it should the subject name (e.g. fsaverage). Can be None if stc.subject is not None. smooth : int Number of smoothing iterations. subjects_dir : str | None Path to SUBJECTS_DIR if it is not set in the environment. Returns ------- labels : list of Labels | list of list of Labels The generated labels. If connected is False, it returns a list of Labels (one per hemisphere). If no Label is available in a hemisphere, None is returned. If connected is True, it returns for each hemisphere a list of connected labels ordered in decreasing order depending of the maximum value in the stc. If no Label is available in an hemisphere, an empty list is returned. """ src = stc.subject if src is None else src if isinstance(src, string_types): subject = src else: subject = stc.subject if isinstance(src, string_types): subjects_dir = get_subjects_dir(subjects_dir) surf_path_from = op.join(subjects_dir, src, 'surf') rr_lh, tris_lh = read_surface(op.join(surf_path_from, 'lh.white')) rr_rh, tris_rh = read_surface(op.join(surf_path_from, 'rh.white')) rr = [rr_lh, rr_rh] tris = [tris_lh, tris_rh] else: if not isinstance(src, SourceSpaces): raise TypeError('src must be a string or a set of source spaces') if len(src) != 2: raise ValueError('source space should contain the 2 hemispheres') rr = [1e3 * src[0]['rr'], 1e3 * src[1]['rr']] tris = [src[0]['tris'], src[1]['tris']] labels = [] cnt = 0 for hemi_idx, (hemi, this_vertno, this_tris, this_rr) in enumerate( zip(['lh', 'rh'], stc.vertices, tris, rr)): this_data = stc.data[cnt:cnt + len(this_vertno)] e = mesh_edges(this_tris) e.data[e.data == 2] = 1 n_vertices = e.shape[0] e = e + sparse.eye(n_vertices, n_vertices) clusters = [this_vertno[np.any(this_data, axis=1)]] cnt += len(this_vertno) clusters = [c for c in clusters if len(c) > 0] if len(clusters) == 0: this_labels = None else: this_labels = [] colors = _n_colors(len(clusters)) for c, color in zip(clusters, colors): idx_use = c for k in range(smooth): e_use = e[:, idx_use] data1 = e_use * np.ones(len(idx_use)) idx_use = np.where(data1)[0] label = Label(idx_use, this_rr[idx_use], None, hemi, 'Label from stc', subject=subject, color=color) this_labels.append(label) this_labels = this_labels[0] labels.append(this_labels) return labels
def test_label_addition(): """Test label addition.""" pos = np.random.RandomState(0).rand(10, 3) values = np.arange(10.) / 10 idx0 = list(range(7)) idx1 = list(range(7, 10)) # non-overlapping idx2 = list(range(5, 10)) # overlapping l0 = Label(idx0, pos[idx0], values[idx0], 'lh', color='red') l1 = Label(idx1, pos[idx1], values[idx1], 'lh') l2 = Label(idx2, pos[idx2], values[idx2], 'lh', color=(0, 1, 0, .5)) assert_equal(len(l0), len(idx0)) l_good = l0.copy() l_good.subject = 'sample' l_bad = l1.copy() l_bad.subject = 'foo' assert_raises(ValueError, l_good.__add__, l_bad) assert_raises(TypeError, l_good.__add__, 'foo') assert_raises(ValueError, l_good.__sub__, l_bad) assert_raises(TypeError, l_good.__sub__, 'foo') # adding non-overlapping labels l01 = l0 + l1 assert_equal(len(l01), len(l0) + len(l1)) assert_array_equal(l01.values[:len(l0)], l0.values) assert_equal(l01.color, l0.color) # subtraction assert_labels_equal(l01 - l0, l1, comment=False, color=False) assert_labels_equal(l01 - l1, l0, comment=False, color=False) # adding overlappig labels l = l0 + l2 i0 = np.where(l0.vertices == 6)[0][0] i2 = np.where(l2.vertices == 6)[0][0] i = np.where(l.vertices == 6)[0][0] assert_equal(l.values[i], l0.values[i0] + l2.values[i2]) assert_equal(l.values[0], l0.values[0]) assert_array_equal(np.unique(l.vertices), np.unique(idx0 + idx2)) assert_equal(l.color, _blend_colors(l0.color, l2.color)) # adding lh and rh l2.hemi = 'rh' bhl = l0 + l2 assert_equal(bhl.hemi, 'both') assert_equal(len(bhl), len(l0) + len(l2)) assert_equal(bhl.color, l.color) assert_true('BiHemiLabel' in repr(bhl)) # subtraction assert_labels_equal(bhl - l0, l2) assert_labels_equal(bhl - l2, l0) bhl2 = l1 + bhl assert_labels_equal(bhl2.lh, l01) assert_equal(bhl2.color, _blend_colors(l1.color, bhl.color)) assert_array_equal((l2 + bhl).rh.vertices, bhl.rh.vertices) # rh label assert_array_equal((bhl + bhl).lh.vertices, bhl.lh.vertices) assert_raises(TypeError, bhl.__add__, 5) # subtraction bhl_ = bhl2 - l1 assert_labels_equal(bhl_.lh, bhl.lh, comment=False, color=False) assert_labels_equal(bhl_.rh, bhl.rh) assert_labels_equal(bhl2 - l2, l0 + l1) assert_labels_equal(bhl2 - l1 - l0, l2) bhl_ = bhl2 - bhl2 assert_array_equal(bhl_.vertices, [])
def test_source_simulator(_get_fwd_labels): """Test Source Simulator.""" fwd, _ = _get_fwd_labels src = fwd['src'] hemi_to_ind = {'lh': 0, 'rh': 1} tstep = 1. / 6. label_vertices = [[], [], []] label_vertices[0] = np.arange(1000) label_vertices[1] = np.arange(500, 1500) label_vertices[2] = np.arange(1000) hemis = ['lh', 'lh', 'rh'] mylabels = [] src_vertices = [] for i, vert in enumerate(label_vertices): new_label = Label(vertices=vert, hemi=hemis[i]) mylabels.append(new_label) src_vertices.append( np.intersect1d(src[hemi_to_ind[hemis[i]]]['vertno'], new_label.vertices)) wfs = [[], [], []] wfs[0] = np.array([0, 1., 0]) # 1d array wfs[1] = [ np.array([0, 1., 0]), # list np.array([0, 1.5, 0]) ] wfs[2] = np.array([[1, 1, 1.]]) # 2d array events = [[], [], []] events[0] = np.array([[0, 0, 1], [3, 0, 1]]) events[1] = np.array([[0, 0, 1], [3, 0, 1]]) events[2] = np.array([[0, 0, 1], [2, 0, 1]]) verts_lh = np.intersect1d(range(1500), src[0]['vertno']) verts_rh = np.intersect1d(range(1000), src[1]['vertno']) diff_01 = len(np.setdiff1d(src_vertices[0], src_vertices[1])) diff_10 = len(np.setdiff1d(src_vertices[1], src_vertices[0])) inter_10 = len(np.intersect1d(src_vertices[1], src_vertices[0])) output_data_lh = np.zeros([len(verts_lh), 6]) tmp = np.array([0, 1., 0, 0, 1, 0]) output_data_lh[:diff_01, :] = np.tile(tmp, (diff_01, 1)) tmp = np.array([0, 2, 0, 0, 2.5, 0]) output_data_lh[diff_01:diff_01 + inter_10, :] = np.tile(tmp, (inter_10, 1)) tmp = np.array([0, 1, 0, 0, 1.5, 0]) output_data_lh[diff_01 + inter_10:, :] = np.tile(tmp, (diff_10, 1)) data_rh_wf = np.array([1., 1, 2, 1, 1, 0]) output_data_rh = np.tile(data_rh_wf, (len(src_vertices[2]), 1)) output_data = np.vstack([output_data_lh, output_data_rh]) ss = SourceSimulator(src, tstep) for i in range(3): ss.add_data(mylabels[i], wfs[i], events[i]) stc = ss.get_stc() stim_channel = ss.get_stim_channel() # Stim channel data must have the same size as stc time samples assert len(stim_channel) == stc.data.shape[1] stim_channel = ss.get_stim_channel(0, 0) assert len(stim_channel) == 0 assert np.all(stc.vertices[0] == verts_lh) assert np.all(stc.vertices[1] == verts_rh) assert_array_almost_equal(stc.lh_data, output_data_lh) assert_array_almost_equal(stc.rh_data, output_data_rh) assert_array_almost_equal(stc.data, output_data) counter = 0 for stc, stim in ss: assert stc.data.shape[1] == 6 counter += 1 assert counter == 1 half_ss = SourceSimulator(src, tstep, duration=0.5) for i in range(3): half_ss.add_data(mylabels[i], wfs[i], events[i]) half_stc = half_ss.get_stc() assert_array_almost_equal(stc.data[:, :3], half_stc.data) ss = SourceSimulator(src) with pytest.raises(ValueError, match='No simulation parameters'): ss.get_stc() with pytest.raises(ValueError, match='label must be a Label'): ss.add_data(1, wfs, events) with pytest.raises(ValueError, match='Number of waveforms and events ' 'should match'): ss.add_data(mylabels[0], wfs[:2], events) # Verify that the chunks have the correct length. source_simulator = SourceSimulator(src, tstep=tstep, duration=10 * tstep) source_simulator.add_data(mylabels[0], np.array([1, 1, 1]), [[0, 0, 0]]) source_simulator._chk_duration = 6 # Quick hack to get short chunks. stcs = [stc for stc, _ in source_simulator] assert len(stcs) == 2 assert stcs[0].data.shape[1] == 6 assert stcs[1].data.shape[1] == 4
def test_label_addition(): """Test label addition """ pos = np.random.RandomState(0).rand(10, 3) values = np.arange(10.) / 10 idx0 = list(range(7)) idx1 = list(range(7, 10)) # non-overlapping idx2 = list(range(5, 10)) # overlapping l0 = Label(idx0, pos[idx0], values[idx0], 'lh', color='red') l1 = Label(idx1, pos[idx1], values[idx1], 'lh') l2 = Label(idx2, pos[idx2], values[idx2], 'lh', color=(0, 1, 0, .5)) assert_equal(len(l0), len(idx0)) l_good = l0.copy() l_good.subject = 'sample' l_bad = l1.copy() l_bad.subject = 'foo' assert_raises(ValueError, l_good.__add__, l_bad) assert_raises(TypeError, l_good.__add__, 'foo') assert_raises(ValueError, l_good.__sub__, l_bad) assert_raises(TypeError, l_good.__sub__, 'foo') # adding non-overlapping labels l01 = l0 + l1 assert_equal(len(l01), len(l0) + len(l1)) assert_array_equal(l01.values[:len(l0)], l0.values) assert_equal(l01.color, l0.color) # subtraction assert_labels_equal(l01 - l0, l1, comment=False, color=False) assert_labels_equal(l01 - l1, l0, comment=False, color=False) # adding overlappig labels l = l0 + l2 i0 = np.where(l0.vertices == 6)[0][0] i2 = np.where(l2.vertices == 6)[0][0] i = np.where(l.vertices == 6)[0][0] assert_equal(l.values[i], l0.values[i0] + l2.values[i2]) assert_equal(l.values[0], l0.values[0]) assert_array_equal(np.unique(l.vertices), np.unique(idx0 + idx2)) assert_equal(l.color, _blend_colors(l0.color, l2.color)) # adding lh and rh l2.hemi = 'rh' # this now has deprecated behavior bhl = l0 + l2 assert_equal(bhl.hemi, 'both') assert_equal(len(bhl), len(l0) + len(l2)) assert_equal(bhl.color, l.color) assert_true('BiHemiLabel' in repr(bhl)) # subtraction assert_labels_equal(bhl - l0, l2) assert_labels_equal(bhl - l2, l0) bhl2 = l1 + bhl assert_labels_equal(bhl2.lh, l01) assert_equal(bhl2.color, _blend_colors(l1.color, bhl.color)) assert_array_equal((l2 + bhl).rh.vertices, bhl.rh.vertices) # rh label assert_array_equal((bhl + bhl).lh.vertices, bhl.lh.vertices) assert_raises(TypeError, bhl.__add__, 5) # subtraction bhl_ = bhl2 - l1 assert_labels_equal(bhl_.lh, bhl.lh, comment=False, color=False) assert_labels_equal(bhl_.rh, bhl.rh) assert_labels_equal(bhl2 - l2, l0 + l1) assert_labels_equal(bhl2 - l1 - l0, l2) bhl_ = bhl2 - bhl2 assert_array_equal(bhl_.vertices, [])
def labels_from_clusters(clusters, names=None): """Create Labels from source space clusters Parameters ---------- clusters : NDVar NDVar which is non-zero on the cluster. Can have a case dimension to define multiple labels (one label per case). names : None | list of str | str Label names corresponding to clusters (default is "cluster%i"). Returns ------- labels : list of mne.Label One label for each cluster. See Also -------- NDVar.label_clusters : clusters from thresholding data """ from mne.label import _n_colors if isinstance(names, str): names = [names] source = clusters.source source_space = clusters.source.get_source_space() subject = source.subject collapse = tuple(dim for dim in clusters.dimnames if dim not in ('case', 'source')) if collapse: clusters_index = clusters.any(collapse) else: clusters_index = clusters != 0 if clusters_index.has_case: n_clusters = len(clusters) else: n_clusters = 1 clusters_index = (clusters_index, ) if names is None: names = ("cluster%i" % i for i in range(n_clusters)) elif len(names) != n_clusters: err = "Number of names difference from number of clusters." raise ValueError(err) colors = _n_colors(n_clusters) labels = [] for cluster, color, name in zip(clusters_index, colors, names): lh_vertices = source.lh_vertices[cluster.x[:source.lh_n]] rh_vertices = source.rh_vertices[cluster.x[source.lh_n:]] if len(lh_vertices) and len(rh_vertices): lh = Label(lh_vertices, hemi='lh', name=name + '-lh', subject=subject, color=color).fill(source_space) rh = Label(rh_vertices, hemi='rh', name=name + '-rh', subject=subject, color=color).fill(source_space) label = BiHemiLabel(lh, rh, name, color) elif len(lh_vertices): label = Label(lh_vertices, hemi='lh', name=name + '-lh', subject=subject, color=color).fill(source_space) elif len(rh_vertices): label = Label(rh_vertices, hemi='rh', name=name + '-lh', subject=subject, color=color).fill(source_space) else: raise ValueError("Empty Cluster") labels.append(label) return labels
def test_generate_stc(): """ Test generation of source estimate """ fwd = read_forward_solution_meg(fname_fwd, force_fixed=True) labels = [ read_label( op.join(data_path, 'MEG', 'sample', 'labels', '%s.label' % label)) for label in label_names ] mylabels = [] for i, label in enumerate(labels): new_label = Label(vertices=label.vertices, pos=label.pos, values=2 * i * np.ones(len(label.values)), hemi=label.hemi, comment=label.comment) mylabels.append(new_label) n_times = 10 tmin = 0 tstep = 1e-3 stc_data = np.ones((len(labels), n_times)) stc = generate_stc(fwd['src'], mylabels, stc_data, tmin, tstep) for label in labels: if label.hemi == 'lh': hemi_idx = 0 else: hemi_idx = 1 idx = np.intersect1d(stc.vertices[hemi_idx], label.vertices) idx = np.searchsorted(stc.vertices[hemi_idx], idx) if hemi_idx == 1: idx += len(stc.vertices[0]) assert_true(np.all(stc.data[idx] == 1.0)) assert_true(stc.data[idx].shape[1] == n_times) # test with function def fun(x): return x**2 stc = generate_stc(fwd['src'], mylabels, stc_data, tmin, tstep, fun) # the first label has value 0, the second value 2, the third value 6 for i, label in enumerate(labels): if label.hemi == 'lh': hemi_idx = 0 else: hemi_idx = 1 idx = np.intersect1d(stc.vertices[hemi_idx], label.vertices) idx = np.searchsorted(stc.vertices[hemi_idx], idx) if hemi_idx == 1: idx += len(stc.vertices[0]) res = ((2. * i)**2.) * np.ones((len(idx), n_times)) assert_array_almost_equal(stc.data[idx], res)
def test_simulate_stc(): """Test generation of source estimate.""" fwd = read_forward_solution_meg(fname_fwd, force_fixed=True, use_cps=True) labels = [read_label(op.join(data_path, 'MEG', 'sample', 'labels', '%s.label' % label)) for label in label_names] mylabels = [] for i, label in enumerate(labels): new_label = Label(vertices=label.vertices, pos=label.pos, values=2 * i * np.ones(len(label.values)), hemi=label.hemi, comment=label.comment) mylabels.append(new_label) n_times = 10 tmin = 0 tstep = 1e-3 stc_data = np.ones((len(labels), n_times)) stc = simulate_stc(fwd['src'], mylabels, stc_data, tmin, tstep) assert_equal(stc.subject, 'sample') for label in labels: if label.hemi == 'lh': hemi_idx = 0 else: hemi_idx = 1 idx = np.intersect1d(stc.vertices[hemi_idx], label.vertices) idx = np.searchsorted(stc.vertices[hemi_idx], idx) if hemi_idx == 1: idx += len(stc.vertices[0]) assert (np.all(stc.data[idx] == 1.0)) assert (stc.data[idx].shape[1] == n_times) # test with function def fun(x): return x ** 2 stc = simulate_stc(fwd['src'], mylabels, stc_data, tmin, tstep, fun) # the first label has value 0, the second value 2, the third value 6 for i, label in enumerate(labels): if label.hemi == 'lh': hemi_idx = 0 else: hemi_idx = 1 idx = np.intersect1d(stc.vertices[hemi_idx], label.vertices) idx = np.searchsorted(stc.vertices[hemi_idx], idx) if hemi_idx == 1: idx += len(stc.vertices[0]) res = ((2. * i) ** 2.) * np.ones((len(idx), n_times)) assert_array_almost_equal(stc.data[idx], res) # degenerate conditions label_subset = mylabels[:2] data_subset = stc_data[:2] stc = simulate_stc(fwd['src'], label_subset, data_subset, tmin, tstep, fun) pytest.raises(ValueError, simulate_stc, fwd['src'], label_subset, data_subset[:-1], tmin, tstep, fun) pytest.raises(RuntimeError, simulate_stc, fwd['src'], label_subset * 2, np.concatenate([data_subset] * 2, axis=0), tmin, tstep, fun)
def test_source_simulator(_get_fwd_labels): """Test Source Simulator.""" fwd, _ = _get_fwd_labels src = fwd['src'] hemi_to_ind = {'lh': 0, 'rh': 1} tstep = 1. / 6. label_vertices = [[], [], []] label_vertices[0] = np.arange(1000) label_vertices[1] = np.arange(500, 1500) label_vertices[2] = np.arange(1000) hemis = ['lh', 'lh', 'rh'] mylabels = [] src_vertices = [] for i, vert in enumerate(label_vertices): new_label = Label(vertices=vert, hemi=hemis[i]) mylabels.append(new_label) src_vertices.append( np.intersect1d(src[hemi_to_ind[hemis[i]]]['vertno'], new_label.vertices)) wfs = [[], [], []] wfs[0] = np.array([0, 1., 0]) # 1d array wfs[1] = [ np.array([0, 1., 0]), # list np.array([0, 1.5, 0]) ] wfs[2] = np.array([[1, 1, 1.]]) # 2d array events = [[], [], []] events[0] = np.array([[0, 0, 1], [3, 0, 1]]) events[1] = np.array([[0, 0, 1], [3, 0, 1]]) events[2] = np.array([[0, 0, 1], [2, 0, 1]]) verts_lh = np.intersect1d(range(1500), src[0]['vertno']) verts_rh = np.intersect1d(range(1000), src[1]['vertno']) diff_01 = len(np.setdiff1d(src_vertices[0], src_vertices[1])) diff_10 = len(np.setdiff1d(src_vertices[1], src_vertices[0])) inter_10 = len(np.intersect1d(src_vertices[1], src_vertices[0])) output_data_lh = np.zeros([len(verts_lh), 6]) tmp = np.array([0, 1., 0, 0, 1, 0]) output_data_lh[:diff_01, :] = np.tile(tmp, (diff_01, 1)) tmp = np.array([0, 2, 0, 0, 2.5, 0]) output_data_lh[diff_01:diff_01 + inter_10, :] = np.tile(tmp, (inter_10, 1)) tmp = np.array([0, 1, 0, 0, 1.5, 0]) output_data_lh[diff_01 + inter_10:, :] = np.tile(tmp, (diff_10, 1)) data_rh_wf = np.array([1., 1, 2, 1, 1, 0]) output_data_rh = np.tile(data_rh_wf, (len(src_vertices[2]), 1)) output_data = np.vstack([output_data_lh, output_data_rh]) ss = SourceSimulator(src, tstep) for i in range(3): ss.add_data(mylabels[i], wfs[i], events[i]) stc = ss.get_stc() stim_channel = ss.get_stim_channel() # Make some size checks. assert ss.duration == 1.0 assert ss.n_times == 6 assert ss.last_samp == 5 assert len(stim_channel) == stc.data.shape[1] assert np.all(stc.vertices[0] == verts_lh) assert np.all(stc.vertices[1] == verts_rh) assert_array_almost_equal(stc.lh_data, output_data_lh) assert_array_almost_equal(stc.rh_data, output_data_rh) assert_array_almost_equal(stc.data, output_data) counter = 0 for stc, stim in ss: assert stc.data.shape[1] == 6 counter += 1 assert counter == 1 # Check validity of setting duration and start/stop parameters. half_ss = SourceSimulator(src, tstep, duration=0.5) for i in range(3): half_ss.add_data(mylabels[i], wfs[i], events[i]) half_stc = half_ss.get_stc() assert_array_almost_equal(stc.data[:, :3], half_stc.data) part_stc = ss.get_stc(start_sample=1, stop_sample=4) assert part_stc.shape == (24, 4) assert part_stc.times[0] == tstep # Check validity of other arguments. with pytest.raises(ValueError, match='start_sample must be'): ss.get_stc(2, 0) ss = SourceSimulator(src) with pytest.raises(ValueError, match='No simulation parameters'): ss.get_stc() with pytest.raises(ValueError, match='label must be a Label'): ss.add_data(1, wfs, events) with pytest.raises(ValueError, match='Number of waveforms and events ' 'should match'): ss.add_data(mylabels[0], wfs[:2], events) with pytest.raises(ValueError, match='duration must be None or'): ss = SourceSimulator(src, tstep, tstep / 2) # Verify first_samp functionality. ss = SourceSimulator(src, tstep) offset = 50 for i in range(3): # events are offset, but first_samp = 0 events[i][:, 0] += offset ss.add_data(mylabels[i], wfs[i], events[i]) offset_stc = ss.get_stc() assert ss.n_times == 56 assert ss.first_samp == 0 assert offset_stc.data.shape == (stc.data.shape[0], stc.data.shape[1] + offset) ss = SourceSimulator(src, tstep, first_samp=offset) for i in range(3): # events still offset, but first_samp > 0 ss.add_data(mylabels[i], wfs[i], events[i]) offset_stc = ss.get_stc() assert ss.n_times == 6 assert ss.first_samp == offset assert ss.last_samp == offset + 5 assert offset_stc.data.shape == stc.data.shape # Verify that the chunks have the correct length. source_simulator = SourceSimulator(src, tstep=tstep, duration=10 * tstep) source_simulator.add_data(mylabels[0], np.array([1, 1, 1]), [[0, 0, 0]]) source_simulator._chk_duration = 6 # Quick hack to get short chunks. stcs = [stc for stc, _ in source_simulator] assert len(stcs) == 2 assert stcs[0].data.shape[1] == 6 assert stcs[1].data.shape[1] == 4
def test_simulate_raw_bem(raw_data): """Test simulation of raw data with BEM.""" raw, src_ss, stc, trans, sphere = raw_data src = setup_source_space('sample', 'oct1', subjects_dir=subjects_dir) for s in src: s['nuse'] = 3 s['vertno'] = src[1]['vertno'][:3] s['inuse'].fill(0) s['inuse'][s['vertno']] = 1 # use different / more complete STC here vertices = [s['vertno'] for s in src] stc = SourceEstimate(np.eye(sum(len(v) for v in vertices)), vertices, 0, 1. / raw.info['sfreq']) stcs = [stc] * 15 raw_sim_sph = simulate_raw(raw.info, stcs, trans, src, sphere) raw_sim_bem = simulate_raw(raw.info, stcs, trans, src, bem_fname) # some components (especially radial) might not match that well, # so just make sure that most components have high correlation assert_array_equal(raw_sim_sph.ch_names, raw_sim_bem.ch_names) picks = pick_types(raw.info, meg=True, eeg=True) n_ch = len(picks) corr = np.corrcoef(raw_sim_sph[picks][0], raw_sim_bem[picks][0]) assert_array_equal(corr.shape, (2 * n_ch, 2 * n_ch)) med_corr = np.median(np.diag(corr[:n_ch, -n_ch:])) assert med_corr > 0.65 # do some round-trip localization for s in src: transform_surface_to(s, 'head', trans) locs = np.concatenate([s['rr'][s['vertno']] for s in src]) tmax = (len(locs) - 1) / raw.info['sfreq'] cov = make_ad_hoc_cov(raw.info) # The tolerance for the BEM is surprisingly high (28) but I get the same # result when using MNE-C and Xfit, even when using a proper 5120 BEM :( for use_raw, bem, tol in ((raw_sim_sph, sphere, 2), (raw_sim_bem, bem_fname, 31)): events = find_events(use_raw, 'STI 014') assert len(locs) == 6 evoked = Epochs(use_raw, events, 1, 0, tmax, baseline=None).average() assert len(evoked.times) == len(locs) fits = fit_dipole(evoked, cov, bem, trans, min_dist=1.)[0].pos diffs = np.sqrt(np.sum((locs - fits) ** 2, axis=-1)) * 1000 med_diff = np.median(diffs) assert med_diff < tol, '%s: %s' % (bem, med_diff) # also test event timings with SourceSimulator first_samp = raw.first_samp events = find_events(raw, initial_event=True, verbose=False) evt_times = events[:, 0] assert len(events) == 3 labels_sim = [[], [], []] # random l+r hemisphere points labels_sim[0] = Label([src_ss[0]['vertno'][1]], hemi='lh') labels_sim[1] = Label([src_ss[0]['vertno'][4]], hemi='lh') labels_sim[2] = Label([src_ss[1]['vertno'][2]], hemi='rh') wf_sim = np.array([2, 1, 0]) for this_fs in (0, first_samp): ss = SourceSimulator(src_ss, 1. / raw.info['sfreq'], first_samp=this_fs) for i in range(3): ss.add_data(labels_sim[i], wf_sim, events[np.newaxis, i]) assert ss.n_times == evt_times[-1] + len(wf_sim) - this_fs raw_sim = simulate_raw(raw.info, ss, src=src_ss, bem=bem_fname, first_samp=first_samp) data = raw_sim.get_data() amp0 = data[:, evt_times - first_samp].max() amp1 = data[:, evt_times + 1 - first_samp].max() amp2 = data[:, evt_times + 2 - first_samp].max() assert_allclose(amp0 / amp1, wf_sim[0] / wf_sim[1], rtol=1e-5) assert amp2 == 0 assert raw_sim.n_times == ss.n_times
def test_source_simulator(): """Test Source Simulator.""" fwd = read_forward_solution_meg(fname_fwd, force_fixed=True, use_cps=True) src = fwd['src'] hemi_to_ind = {'lh': 0, 'rh': 1} tmin = 0 tstep = 1. / 6. label_vertices = [[], [], []] label_vertices[0] = np.arange(1000) label_vertices[1] = np.arange(500, 1500) label_vertices[2] = np.arange(1000) hemis = ['lh', 'lh', 'rh'] mylabels = [] src_vertices = [] for i, vert in enumerate(label_vertices): new_label = Label(vertices=vert, hemi=hemis[i]) mylabels.append(new_label) src_vertices.append( np.intersect1d(src[hemi_to_ind[hemis[i]]]['vertno'], new_label.vertices)) wfs = [[], [], []] wfs[0] = np.array([0, 1., 0]) wfs[1] = [np.array([0, 1., 0]), np.array([0, 1.5, 0])] wfs[2] = np.array([1, 1, 1.]) events = [[], [], []] events[0] = np.array([[0, 0, 1], [3, 0, 1]]) events[1] = np.array([[0, 0, 1], [3, 0, 1]]) events[2] = np.array([[0, 0, 1], [2, 0, 1]]) verts_lh = np.intersect1d(range(1500), src[0]['vertno']) verts_rh = np.intersect1d(range(1000), src[1]['vertno']) diff_01 = len(np.setdiff1d(src_vertices[0], src_vertices[1])) diff_10 = len(np.setdiff1d(src_vertices[1], src_vertices[0])) inter_10 = len(np.intersect1d(src_vertices[1], src_vertices[0])) output_data_lh = np.zeros([len(verts_lh), 6]) tmp = np.array([0, 1., 0, 0, 1, 0]) output_data_lh[:diff_01, :] = np.tile(tmp, (diff_01, 1)) tmp = np.array([0, 2, 0, 0, 2.5, 0]) output_data_lh[diff_01:diff_01 + inter_10, :] = np.tile(tmp, (inter_10, 1)) tmp = np.array([0, 1, 0, 0, 1.5, 0]) output_data_lh[diff_01 + inter_10:, :] = np.tile(tmp, (diff_10, 1)) data_rh_wf = np.array([1., 1, 2, 1, 1, 0]) output_data_rh = np.tile(data_rh_wf, (len(src_vertices[2]), 1)) output_data = np.vstack([output_data_lh, output_data_rh]) ss = SourceSimulator(src, tmin, tstep) for i in range(3): ss.add_data(mylabels[i], wfs[i], events[i]) stc = ss.get_stc() stim_channel = ss.get_stim_channel() # Stim channel data must have the same size as stc time samples assert (len(stim_channel) == stc.data.shape[1]) stim_channel = ss.get_stim_channel(0., 0.) assert (len(stim_channel) == 0) assert (np.all(stc.vertices[0] == verts_lh)) assert (np.all(stc.vertices[1] == verts_rh)) assert_array_almost_equal(stc.lh_data, output_data_lh) assert_array_almost_equal(stc.rh_data, output_data_rh) assert_array_almost_equal(stc.data, output_data) counter = 0 for stc, stim in ss: counter += 1 assert counter == 1 half_ss = SourceSimulator(src, tmin, tstep, duration=0.5) for i in range(3): half_ss.add_data(mylabels[i], wfs[i], events[i]) half_stc = half_ss.get_stc() assert_array_almost_equal(stc.data[:, :3], half_stc.data) ss = SourceSimulator(src) with pytest.raises(ValueError, match='No simulation parameters'): ss.get_stc() with pytest.raises(ValueError, match='label must be a Label'): ss.add_data(1, wfs, events) with pytest.raises(ValueError, match='Number of waveforms and events ' 'should match'): ss.add_data(mylabels[0], wfs[:2], events)