def test_add_patch_info(): """Test adding patch info to source space""" # let's setup a small source space src = read_source_spaces(fname_small) src_new = read_source_spaces(fname_small) for s in src_new: s['nearest'] = None s['nearest_dist'] = None s['pinfo'] = None # test that no patch info is added for small dist_limit try: add_source_space_distances(src_new, dist_limit=0.00001) except RuntimeError: # what we throw when scipy version is wrong pass else: assert_true(all(s['nearest'] is None for s in src_new)) assert_true(all(s['nearest_dist'] is None for s in src_new)) assert_true(all(s['pinfo'] is None for s in src_new)) # now let's use one that works add_source_space_distances(src_new) for s1, s2 in zip(src, src_new): assert_array_equal(s1['nearest'], s2['nearest']) assert_allclose(s1['nearest_dist'], s2['nearest_dist'], atol=1e-7) assert_equal(len(s1['pinfo']), len(s2['pinfo'])) for p1, p2 in zip(s1['pinfo'], s2['pinfo']): assert_array_equal(p1, p2)
def test_add_patch_info(monkeypatch): """Test adding patch info to source space.""" # let's setup a small source space src = read_source_spaces(fname_small) src_new = read_source_spaces(fname_small) for s in src_new: s['nearest'] = None s['nearest_dist'] = None s['pinfo'] = None # test that no patch info is added for small dist_limit add_source_space_distances(src_new, dist_limit=0.00001) assert all(s['nearest'] is None for s in src_new) assert all(s['nearest_dist'] is None for s in src_new) assert all(s['pinfo'] is None for s in src_new) # now let's use one that works (and test our warning-throwing) monkeypatch.setattr(mne.source_space, '_DIST_WARN_LIMIT', 1) with pytest.warns(RuntimeWarning, match='Computing distances for 258'): add_source_space_distances(src_new) for s1, s2 in zip(src, src_new): assert_array_equal(s1['nearest'], s2['nearest']) assert_allclose(s1['nearest_dist'], s2['nearest_dist'], atol=1e-7) assert_equal(len(s1['pinfo']), len(s2['pinfo'])) for p1, p2 in zip(s1['pinfo'], s2['pinfo']): assert_array_equal(p1, p2)
def test_add_source_space_distances_limited(): """Test adding distances to source space with a dist_limit.""" tempdir = _TempDir() src = read_source_spaces(fname) src_new = read_source_spaces(fname) del src_new[0]['dist'] del src_new[1]['dist'] n_do = 200 # limit this for speed src_new[0]['vertno'] = src_new[0]['vertno'][:n_do].copy() src_new[1]['vertno'] = src_new[1]['vertno'][:n_do].copy() out_name = op.join(tempdir, 'temp-src.fif') try: add_source_space_distances(src_new, dist_limit=0.007) except RuntimeError: # what we throw when scipy version is wrong raise SkipTest('dist_limit requires scipy > 0.13') write_source_spaces(out_name, src_new) src_new = read_source_spaces(out_name) for so, sn in zip(src, src_new): assert_array_equal(so['dist_limit'], np.array([-0.007], np.float32)) assert_array_equal(sn['dist_limit'], np.array([0.007], np.float32)) do = so['dist'] dn = sn['dist'] # clean out distances > 0.007 in C code do.data[do.data > 0.007] = 0 do.eliminate_zeros() # make sure we have some comparable distances assert_true(np.sum(do.data < 0.007) > 400) # do comparison over the region computed d = (do - dn)[:sn['vertno'][n_do - 1]][:, :sn['vertno'][n_do - 1]] assert_allclose(np.zeros_like(d.data), d.data, rtol=0, atol=1e-6)
def test_add_source_space_distances_limited(tmpdir): """Test adding distances to source space with a dist_limit.""" tempdir = str(tmpdir) src = read_source_spaces(fname) src_new = read_source_spaces(fname) del src_new[0]['dist'] del src_new[1]['dist'] n_do = 200 # limit this for speed src_new[0]['vertno'] = src_new[0]['vertno'][:n_do].copy() src_new[1]['vertno'] = src_new[1]['vertno'][:n_do].copy() out_name = op.join(tempdir, 'temp-src.fif') add_source_space_distances(src_new, dist_limit=0.007) write_source_spaces(out_name, src_new) src_new = read_source_spaces(out_name) for so, sn in zip(src, src_new): assert_array_equal(so['dist_limit'], np.array([-0.007], np.float32)) assert_array_equal(sn['dist_limit'], np.array([0.007], np.float32)) do = so['dist'] dn = sn['dist'] # clean out distances > 0.007 in C code do.data[do.data > 0.007] = 0 do.eliminate_zeros() # make sure we have some comparable distances assert np.sum(do.data < 0.007) > 400 # do comparison over the region computed d = (do - dn)[:sn['vertno'][n_do - 1]][:, :sn['vertno'][n_do - 1]] assert_allclose(np.zeros_like(d.data), d.data, rtol=0, atol=1e-6)
def test_add_patch_info(): """Test adding patch info to source space.""" # let's setup a small source space src = read_source_spaces(fname_small) src_new = read_source_spaces(fname_small) for s in src_new: s['nearest'] = None s['nearest_dist'] = None s['pinfo'] = None # test that no patch info is added for small dist_limit try: add_source_space_distances(src_new, dist_limit=0.00001) except RuntimeError: # what we throw when scipy version is wrong pass else: assert all(s['nearest'] is None for s in src_new) assert all(s['nearest_dist'] is None for s in src_new) assert all(s['pinfo'] is None for s in src_new) # now let's use one that works add_source_space_distances(src_new) for s1, s2 in zip(src, src_new): assert_array_equal(s1['nearest'], s2['nearest']) assert_allclose(s1['nearest_dist'], s2['nearest_dist'], atol=1e-7) assert_equal(len(s1['pinfo']), len(s2['pinfo'])) for p1, p2 in zip(s1['pinfo'], s2['pinfo']): assert_array_equal(p1, p2)
def test_add_source_space_distances_limited(): """Test adding distances to source space with a dist_limit.""" tempdir = _TempDir() src = read_source_spaces(fname) src_new = read_source_spaces(fname) del src_new[0]['dist'] del src_new[1]['dist'] n_do = 200 # limit this for speed src_new[0]['vertno'] = src_new[0]['vertno'][:n_do].copy() src_new[1]['vertno'] = src_new[1]['vertno'][:n_do].copy() out_name = op.join(tempdir, 'temp-src.fif') try: add_source_space_distances(src_new, dist_limit=0.007) except RuntimeError: # what we throw when scipy version is wrong raise SkipTest('dist_limit requires scipy > 0.13') write_source_spaces(out_name, src_new) src_new = read_source_spaces(out_name) for so, sn in zip(src, src_new): assert_array_equal(so['dist_limit'], np.array([-0.007], np.float32)) assert_array_equal(sn['dist_limit'], np.array([0.007], np.float32)) do = so['dist'] dn = sn['dist'] # clean out distances > 0.007 in C code do.data[do.data > 0.007] = 0 do.eliminate_zeros() # make sure we have some comparable distances assert np.sum(do.data < 0.007) > 400 # do comparison over the region computed d = (do - dn)[:sn['vertno'][n_do - 1]][:, :sn['vertno'][n_do - 1]] assert_allclose(np.zeros_like(d.data), d.data, rtol=0, atol=1e-6)
def test_scale_mri(): """Test creating fsaverage and scaling it""" # create fsaverage tempdir = _TempDir() create_default_subject(subjects_dir=tempdir) is_mri = _is_mri_subject('fsaverage', tempdir) assert_true(is_mri, "Creating fsaverage failed") fid_path = os.path.join(tempdir, 'fsaverage', 'bem', 'fsaverage-fiducials.fif') os.remove(fid_path) create_default_subject(update=True, subjects_dir=tempdir) assert_true(os.path.exists(fid_path), "Updating fsaverage") # remove redundant label files label_temp = os.path.join(tempdir, 'fsaverage', 'label', '*.label') label_paths = glob(label_temp) for label_path in label_paths[1:]: os.remove(label_path) # create source space path = os.path.join(tempdir, 'fsaverage', 'bem', 'fsaverage-ico-0-src.fif') src = mne.setup_source_space('fsaverage', 'ico0', subjects_dir=tempdir, add_dist=False) src_path = os.path.join(tempdir, 'fsaverage', 'bem', 'fsaverage-ico-0-src.fif') write_source_spaces(src_path, src) # scale fsaverage os.environ['_MNE_FEW_SURFACES'] = 'true' scale_mri('fsaverage', 'flachkopf', [1, .2, .8], True, subjects_dir=tempdir) del os.environ['_MNE_FEW_SURFACES'] is_mri = _is_mri_subject('flachkopf', tempdir) assert_true(is_mri, "Scaling fsaverage failed") src_path = os.path.join(tempdir, 'flachkopf', 'bem', 'flachkopf-ico-0-src.fif') assert_true(os.path.exists(src_path), "Source space was not scaled") scale_labels('flachkopf', subjects_dir=tempdir) # scale source space separately os.remove(src_path) scale_source_space('flachkopf', 'ico-0', subjects_dir=tempdir) assert_true(os.path.exists(src_path), "Source space was not scaled") # add distances to source space src = mne.read_source_spaces(path) mne.add_source_space_distances(src) src.save(path, overwrite=True) # scale with distances os.remove(src_path) scale_source_space('flachkopf', 'ico-0', subjects_dir=tempdir) assert_true(os.path.exists(src_path), "Source space was not scaled")
def test_scale_mri(): """Test creating fsaverage and scaling it""" # create fsaverage tempdir = _TempDir() create_default_subject(subjects_dir=tempdir) is_mri = _is_mri_subject('fsaverage', tempdir) assert_true(is_mri, "Creating fsaverage failed") fid_path = os.path.join(tempdir, 'fsaverage', 'bem', 'fsaverage-fiducials.fif') os.remove(fid_path) create_default_subject(update=True, subjects_dir=tempdir) assert_true(os.path.exists(fid_path), "Updating fsaverage") # remove redundant label files label_temp = os.path.join(tempdir, 'fsaverage', 'label', '*.label') label_paths = glob(label_temp) for label_path in label_paths[1:]: os.remove(label_path) # create source space path = os.path.join(tempdir, 'fsaverage', 'bem', 'fsaverage-ico-0-src.fif') mne.setup_source_space('fsaverage', path, 'ico0', overwrite=True, subjects_dir=tempdir, add_dist=False) # scale fsaverage os.environ['_MNE_FEW_SURFACES'] = 'true' scale_mri('fsaverage', 'flachkopf', [1, .2, .8], True, subjects_dir=tempdir) del os.environ['_MNE_FEW_SURFACES'] is_mri = _is_mri_subject('flachkopf', tempdir) assert_true(is_mri, "Scaling fsaverage failed") src_path = os.path.join(tempdir, 'flachkopf', 'bem', 'flachkopf-ico-0-src.fif') assert_true(os.path.exists(src_path), "Source space was not scaled") scale_labels('flachkopf', subjects_dir=tempdir) # scale source space separately os.remove(src_path) scale_source_space('flachkopf', 'ico-0', subjects_dir=tempdir) assert_true(os.path.exists(src_path), "Source space was not scaled") # add distances to source space src = mne.read_source_spaces(path) mne.add_source_space_distances(src) src.save(path) # scale with distances os.remove(src_path) scale_source_space('flachkopf', 'ico-0', subjects_dir=tempdir)
def test_add_patch_info(monkeypatch): """Test adding patch info to source space.""" # let's setup a small source space src = _read_small_src(remove=False) src_new = _read_small_src() # test that no patch info is added for small dist_limit add_source_space_distances(src_new, dist_limit=0.00001) assert all(s['nearest'] is None for s in src_new) assert all(s['nearest_dist'] is None for s in src_new) assert all(s['pinfo'] is None for s in src_new) # now let's use one that works (and test our warning-throwing) with monkeypatch.context() as m: m.setattr(mne.source_space, '_DIST_WARN_LIMIT', 1) with pytest.warns(RuntimeWarning, match='Computing distances for 258'): add_source_space_distances(src_new) _compare_source_spaces(src, src_new, 'approx') # Old SciPy can't do patch info only src_new = _read_small_src() with monkeypatch.context() as m: m.setattr(scipy, '__version__', '1.0') with pytest.raises(RuntimeError, match='required to calculate patch '): add_source_space_distances(src_new, dist_limit=0) # New SciPy can if check_version('scipy', '1.3'): src_nodist = src.copy() for s in src_nodist: for key in ('dist', 'dist_limit'): s[key] = None add_source_space_distances(src_new, dist_limit=0) _compare_source_spaces(src, src_new, 'approx')
def _get_distance_matrix(src, dist_lim, n_jobs=1): """Get vertex-to-vertex distance matrix from source space. During inverse computation, the source space was downsampled (i.e. using ico4). Construct vertex-to-vertex distance matrices using only the vertices that are defined in the source solution. Parameters ---------- src : mne.SourceSpaces The source space to get the distance matrix for. dist_lim : float Maximum distance required. We don't care about distances beyond this maximum. n_jobs : int Number of CPU cores to use if distance computation is necessary. Defaults to 1. Returns ------- dist : ndarray (n_vertices, n_vertices) The vertex-to-vertex distance matrix. """ dist = [] # Check if distances have been pre-computed in the given source space. Give # a warning if the pre-computed distances may have had a too limited # dist_lim setting. needs_distance_computation = False for hemi in src: if 'dist' not in hemi or hemi['dist'] is None: needs_distance_computation = True else: if hemi['dist_limit'][0] < dist_lim: warn(f'Source space has pre-computed distances, but all ' f'distances are smaller than the searchlight radius ' f'({dist_lim}). You may want to consider recomputing ' f'the source space distances using the ' f'mne.add_source_space_distances function.') if needs_distance_computation: if dist_lim is None: dist_lim = np.inf if src.kind == 'volume': src = _add_volume_source_space_distances(src, dist_lim) else: src = mne.add_source_space_distances(src, dist_lim, n_jobs=n_jobs) for hemi in src: inuse = np.flatnonzero(hemi['inuse']) dist.append(hemi['dist'][np.ix_(inuse, inuse)].toarray()) # Collect the distances in a single matrix dist = block_diag(*dist) dist[dist == 0] = np.inf # Across hemisphere distance is infinity dist.flat[::dist.shape[0] + 1] = 0 # Distance to yourself is zero return dist
def test_scale_mri(): """Test creating fsaverage and scaling it""" # create fsaverage tempdir = _TempDir() create_default_subject(subjects_dir=tempdir) is_mri = _is_mri_subject("fsaverage", tempdir) assert_true(is_mri, "Creating fsaverage failed") fid_path = os.path.join(tempdir, "fsaverage", "bem", "fsaverage-fiducials.fif") os.remove(fid_path) create_default_subject(update=True, subjects_dir=tempdir) assert_true(os.path.exists(fid_path), "Updating fsaverage") # remove redundant label files label_temp = os.path.join(tempdir, "fsaverage", "label", "*.label") label_paths = glob(label_temp) for label_path in label_paths[1:]: os.remove(label_path) # create source space path = os.path.join(tempdir, "fsaverage", "bem", "fsaverage-ico-0-src.fif") mne.setup_source_space("fsaverage", path, "ico0", overwrite=True, subjects_dir=tempdir, add_dist=False) # scale fsaverage os.environ["_MNE_FEW_SURFACES"] = "true" scale_mri("fsaverage", "flachkopf", [1, 0.2, 0.8], True, subjects_dir=tempdir) del os.environ["_MNE_FEW_SURFACES"] is_mri = _is_mri_subject("flachkopf", tempdir) assert_true(is_mri, "Scaling fsaverage failed") src_path = os.path.join(tempdir, "flachkopf", "bem", "flachkopf-ico-0-src.fif") assert_true(os.path.exists(src_path), "Source space was not scaled") scale_labels("flachkopf", subjects_dir=tempdir) # scale source space separately os.remove(src_path) scale_source_space("flachkopf", "ico-0", subjects_dir=tempdir) assert_true(os.path.exists(src_path), "Source space was not scaled") # add distances to source space src = mne.read_source_spaces(path) mne.add_source_space_distances(src) src.save(path) # scale with distances os.remove(src_path) scale_source_space("flachkopf", "ico-0", subjects_dir=tempdir)
def test_add_source_space_distances(tmpdir): """Test adding distances to source space.""" src = read_source_spaces(fname) src_new = read_source_spaces(fname) del src_new[0]['dist'] del src_new[1]['dist'] n_do = 19 # limit this for speed src_new[0]['vertno'] = src_new[0]['vertno'][:n_do].copy() src_new[1]['vertno'] = src_new[1]['vertno'][:n_do].copy() out_name = tmpdir.join('temp-src.fif') n_jobs = 2 assert n_do % n_jobs != 0 with pytest.raises(ValueError, match='non-negative'): add_source_space_distances(src_new, dist_limit=-1) add_source_space_distances(src_new, n_jobs=n_jobs) write_source_spaces(out_name, src_new) src_new = read_source_spaces(out_name) # iterate over both hemispheres for so, sn in zip(src, src_new): v = so['vertno'][:n_do] assert_array_equal(so['dist_limit'], np.array([-0.007], np.float32)) assert_array_equal(sn['dist_limit'], np.array([np.inf], np.float32)) do = so['dist'] dn = sn['dist'] # clean out distances > 0.007 in C code (some residual), and Python ds = list() for d in [do, dn]: d.data[d.data > 0.007] = 0 d = d[v][:, v] d.eliminate_zeros() ds.append(d) # make sure we actually calculated some comparable distances assert np.sum(ds[0].data < 0.007) > 10 # do comparison d = ds[0] - ds[1] assert_allclose(np.zeros_like(d.data), d.data, rtol=0, atol=1e-9)
def test_add_source_space_distances(): """Test adding distances to source space.""" tempdir = _TempDir() src = read_source_spaces(fname) src_new = read_source_spaces(fname) del src_new[0]['dist'] del src_new[1]['dist'] n_do = 19 # limit this for speed src_new[0]['vertno'] = src_new[0]['vertno'][:n_do].copy() src_new[1]['vertno'] = src_new[1]['vertno'][:n_do].copy() out_name = op.join(tempdir, 'temp-src.fif') n_jobs = 2 assert n_do % n_jobs != 0 add_source_space_distances(src_new, n_jobs=n_jobs) write_source_spaces(out_name, src_new) src_new = read_source_spaces(out_name) # iterate over both hemispheres for so, sn in zip(src, src_new): v = so['vertno'][:n_do] assert_array_equal(so['dist_limit'], np.array([-0.007], np.float32)) assert_array_equal(sn['dist_limit'], np.array([np.inf], np.float32)) do = so['dist'] dn = sn['dist'] # clean out distances > 0.007 in C code (some residual), and Python ds = list() for d in [do, dn]: d.data[d.data > 0.007] = 0 d = d[v][:, v] d.eliminate_zeros() ds.append(d) # make sure we actually calculated some comparable distances assert np.sum(ds[0].data < 0.007) > 10 # do comparison d = ds[0] - ds[1] assert_allclose(np.zeros_like(d.data), d.data, rtol=0, atol=1e-9)
def test_add_source_space_distances(): """Test adding distances to source space""" tempdir = _TempDir() src = read_source_spaces(fname) src_new = read_source_spaces(fname) del src_new[0]['dist'] del src_new[1]['dist'] n_do = 20 # limit this for speed src_new[0]['vertno'] = src_new[0]['vertno'][:n_do].copy() src_new[1]['vertno'] = src_new[1]['vertno'][:n_do].copy() out_name = op.join(tempdir, 'temp-src.fif') add_source_space_distances(src_new) write_source_spaces(out_name, src_new) src_new = read_source_spaces(out_name) # iterate over both hemispheres for so, sn in zip(src, src_new): v = so['vertno'][:n_do] assert_array_equal(so['dist_limit'], np.array([-0.007], np.float32)) assert_array_equal(sn['dist_limit'], np.array([np.inf], np.float32)) do = so['dist'] dn = sn['dist'] # clean out distances > 0.007 in C code (some residual), and Python ds = list() for d in [do, dn]: d.data[d.data > 0.007] = 0 d = d[v][:, v] d.eliminate_zeros() ds.append(d) # make sure we actually calculated some comparable distances assert_true(np.sum(ds[0].data < 0.007) > 10) # do comparison d = ds[0] - ds[1] assert_allclose(np.zeros_like(d.data), d.data, rtol=0, atol=1e-9)
def test_scale_mri(): """Test creating fsaverage and scaling it.""" # create fsaverage using the testing "fsaverage" instead of the FreeSurfer # one tempdir = _TempDir() fake_home = testing.data_path() create_default_subject(subjects_dir=tempdir, fs_home=fake_home, verbose=True) assert _is_mri_subject('fsaverage', tempdir), "Creating fsaverage failed" fid_path = op.join(tempdir, 'fsaverage', 'bem', 'fsaverage-fiducials.fif') os.remove(fid_path) create_default_subject(update=True, subjects_dir=tempdir, fs_home=fake_home) assert op.exists(fid_path), "Updating fsaverage" # copy MRI file from sample data (shouldn't matter that it's incorrect, # so here choose a small one) path_from = op.join(testing.data_path(), 'subjects', 'sample', 'mri', 'T1.mgz') path_to = op.join(tempdir, 'fsaverage', 'mri', 'orig.mgz') copyfile(path_from, path_to) # remove redundant label files label_temp = op.join(tempdir, 'fsaverage', 'label', '*.label') label_paths = glob(label_temp) for label_path in label_paths[1:]: os.remove(label_path) # create source space print('Creating surface source space') path = op.join(tempdir, 'fsaverage', 'bem', 'fsaverage-%s-src.fif') src = mne.setup_source_space('fsaverage', 'ico0', subjects_dir=tempdir, add_dist=False) mri = op.join(tempdir, 'fsaverage', 'mri', 'orig.mgz') print('Creating volume source space') vsrc = mne.setup_volume_source_space( 'fsaverage', pos=50, mri=mri, subjects_dir=tempdir, add_interpolator=False) write_source_spaces(path % 'vol-50', vsrc) # scale fsaverage for scale in (.9, [1, .2, .8]): write_source_spaces(path % 'ico-0', src, overwrite=True) os.environ['_MNE_FEW_SURFACES'] = 'true' with pytest.warns(None): # sometimes missing nibabel scale_mri('fsaverage', 'flachkopf', scale, True, subjects_dir=tempdir, verbose='debug') del os.environ['_MNE_FEW_SURFACES'] assert _is_mri_subject('flachkopf', tempdir), "Scaling failed" spath = op.join(tempdir, 'flachkopf', 'bem', 'flachkopf-%s-src.fif') assert op.exists(spath % 'ico-0'), "Source space ico-0 was not scaled" assert os.path.isfile(os.path.join(tempdir, 'flachkopf', 'surf', 'lh.sphere.reg')) vsrc_s = mne.read_source_spaces(spath % 'vol-50') pt = np.array([0.12, 0.41, -0.22]) assert_array_almost_equal( apply_trans(vsrc_s[0]['src_mri_t'], pt * np.array(scale)), apply_trans(vsrc[0]['src_mri_t'], pt)) scale_labels('flachkopf', subjects_dir=tempdir) # add distances to source space after hacking the properties to make # it run *much* faster src_dist = src.copy() for s in src_dist: s.update(rr=s['rr'][s['vertno']], nn=s['nn'][s['vertno']], tris=s['use_tris']) s.update(np=len(s['rr']), ntri=len(s['tris']), vertno=np.arange(len(s['rr'])), inuse=np.ones(len(s['rr']), int)) mne.add_source_space_distances(src_dist) write_source_spaces(path % 'ico-0', src_dist, overwrite=True) # scale with distances os.remove(spath % 'ico-0') scale_source_space('flachkopf', 'ico-0', subjects_dir=tempdir) ssrc = mne.read_source_spaces(spath % 'ico-0') assert ssrc[0]['dist'] is not None
def make_mne_forward(anatomy_path, subject, recordings_path, info_from=(('data_type', 'rest'), ('run_index', 0)), fwd_params=None, src_params=None, hcp_path=op.curdir, n_jobs=1): """" Convenience script for conducting standard MNE analyses. Parameters ---------- subject : str The subject name. hcp_path : str The directory containing the HCP data. recordings_path : str The path where MEG data and transformations are stored. anatomy_path : str The directory containing the extracted HCP subject data. info_from : tuple of tuples | dict The reader info concerning the data from which sensor positions should be read. Must not be empty room as sensor positions are in head coordinates for 4D systems, hence not available in that case. Note that differences between the sensor positions across runs are smaller than 12 digits, hence negligible. fwd_params : None | dict The forward parameters src_params : None | dict The src params. Defaults to: dict(subject='fsaverage', fname=None, spacing='oct6', n_jobs=2, surface='white', subjects_dir=anatomy_path, add_dist=True) hcp_path : str The prefix of the path of the HCP data. n_jobs : int The number of jobs to use in parallel. """ if isinstance(info_from, tuple): info_from = dict(info_from) head_mri_t = mne.read_trans( op.join(recordings_path, subject, '{}-head_mri-trans.fif'.format( subject))) src_params = _update_dict_defaults( src_params, dict(subject='fsaverage', fname=None, spacing='oct6', n_jobs=n_jobs, surface='white', subjects_dir=anatomy_path, add_dist=True)) add_source_space_distances = False if src_params['add_dist']: # we want the distances on the morphed space src_params['add_dist'] = False add_source_space_distances = True src_fsaverage = mne.setup_source_space(**src_params) src_subject = mne.morph_source_spaces( src_fsaverage, subject, subjects_dir=anatomy_path) if add_source_space_distances: # and here we compute them post hoc. src_subject = mne.add_source_space_distances( src_subject, n_jobs=n_jobs) bems = mne.make_bem_model(subject, conductivity=(0.3,), subjects_dir=anatomy_path, ico=None) # ico = None for morphed SP. bem_sol = mne.make_bem_solution(bems) info = read_info_hcp(subject=subject, hcp_path=hcp_path, **info_from) picks = _pick_data_channels(info, with_ref_meg=False) info = pick_info(info, picks) # here we assume that as a result of our MNE-HCP processing # all other transforms in info are identity for trans in ['dev_head_t', 'ctf_head_t']: # 'dev_ctf_t' is not identity assert np.sum(info[trans]['trans'] - np.eye(4)) == 0 fwd = mne.make_forward_solution( info, trans=head_mri_t, bem=bem_sol, src=src_subject, n_jobs=n_jobs) return dict(fwd=fwd, src_subject=src_subject, src_fsaverage=src_fsaverage, bem_sol=bem_sol, info=info)
def test_scale_mri(tmp_path, few_surfaces, scale): """Test creating fsaverage and scaling it.""" # create fsaverage using the testing "fsaverage" instead of the FreeSurfer # one tempdir = str(tmp_path) fake_home = testing.data_path() create_default_subject(subjects_dir=tempdir, fs_home=fake_home, verbose=True) assert _is_mri_subject('fsaverage', tempdir), "Creating fsaverage failed" fid_path = op.join(tempdir, 'fsaverage', 'bem', 'fsaverage-fiducials.fif') os.remove(fid_path) create_default_subject(update=True, subjects_dir=tempdir, fs_home=fake_home) assert op.exists(fid_path), "Updating fsaverage" # copy MRI file from sample data (shouldn't matter that it's incorrect, # so here choose a small one) path_from = op.join(testing.data_path(), 'subjects', 'sample', 'mri', 'T1.mgz') path_to = op.join(tempdir, 'fsaverage', 'mri', 'orig.mgz') copyfile(path_from, path_to) # remove redundant label files label_temp = op.join(tempdir, 'fsaverage', 'label', '*.label') label_paths = glob(label_temp) for label_path in label_paths[1:]: os.remove(label_path) # create source space print('Creating surface source space') path = op.join(tempdir, 'fsaverage', 'bem', 'fsaverage-%s-src.fif') src = mne.setup_source_space('fsaverage', 'ico0', subjects_dir=tempdir, add_dist=False) mri = op.join(tempdir, 'fsaverage', 'mri', 'orig.mgz') print('Creating volume source space') vsrc = mne.setup_volume_source_space('fsaverage', pos=50, mri=mri, subjects_dir=tempdir, add_interpolator=False) write_source_spaces(path % 'vol-50', vsrc) # scale fsaverage write_source_spaces(path % 'ico-0', src, overwrite=True) with pytest.warns(None): # sometimes missing nibabel scale_mri('fsaverage', 'flachkopf', scale, True, subjects_dir=tempdir, verbose='debug') assert _is_mri_subject('flachkopf', tempdir), "Scaling failed" spath = op.join(tempdir, 'flachkopf', 'bem', 'flachkopf-%s-src.fif') assert op.exists(spath % 'ico-0'), "Source space ico-0 was not scaled" assert os.path.isfile( os.path.join(tempdir, 'flachkopf', 'surf', 'lh.sphere.reg')) vsrc_s = mne.read_source_spaces(spath % 'vol-50') for vox in ([0, 0, 0], [1, 0, 0], [0, 1, 0], [0, 0, 1], [1, 2, 3]): idx = np.ravel_multi_index(vox, vsrc[0]['shape'], order='F') err_msg = f'idx={idx} @ {vox}, scale={scale}' assert_allclose(apply_trans(vsrc[0]['src_mri_t'], vox), vsrc[0]['rr'][idx], err_msg=err_msg) assert_allclose(apply_trans(vsrc_s[0]['src_mri_t'], vox), vsrc_s[0]['rr'][idx], err_msg=err_msg) scale_labels('flachkopf', subjects_dir=tempdir) # add distances to source space after hacking the properties to make # it run *much* faster src_dist = src.copy() for s in src_dist: s.update(rr=s['rr'][s['vertno']], nn=s['nn'][s['vertno']], tris=s['use_tris']) s.update(np=len(s['rr']), ntri=len(s['tris']), vertno=np.arange(len(s['rr'])), inuse=np.ones(len(s['rr']), int)) mne.add_source_space_distances(src_dist) write_source_spaces(path % 'ico-0', src_dist, overwrite=True) # scale with distances os.remove(spath % 'ico-0') scale_source_space('flachkopf', 'ico-0', subjects_dir=tempdir) ssrc = mne.read_source_spaces(spath % 'ico-0') assert ssrc[0]['dist'] is not None assert ssrc[0]['nearest'] is not None # check patch info computation (only if SciPy is new enough to be fast) if check_version('scipy', '1.3'): for s in src_dist: for key in ('dist', 'dist_limit'): s[key] = None write_source_spaces(path % 'ico-0', src_dist, overwrite=True) # scale with distances os.remove(spath % 'ico-0') scale_source_space('flachkopf', 'ico-0', subjects_dir=tempdir) ssrc = mne.read_source_spaces(spath % 'ico-0') assert ssrc[0]['dist'] is None assert ssrc[0]['nearest'] is not None
raw_dir = '/mnt/scratch/NLR_MEG4' os.chdir(raw_dir) subs = ['NLR_102_RS','NLR_103_AC','NLR_105_BB','NLR_110_HH','NLR_127_AM', 'NLR_130_RW','NLR_132_WP','NLR_133_ML','NLR_145_AC','NLR_150_MG','NLR_151_RD', 'NLR_152_TC','NLR_160_EK','NLR_161_AK','NLR_162_EF','NLR_163_LF','NLR_164_SF', 'NLR_170_GM','NLR_172_TH','NLR_174_HS','NLR_179_GM','NLR_180_ZD','NLR_187_NB', 'NLR_201_GS','NLR_202_DD','NLR_203_AM','NLR_204_AM','NLR_205_AC','NLR_206_LM', 'NLR_207_AH','NLR_210_SB','NLR_211_LB' ] for n, s in enumerate(subs): subject = s # Create source space os.chdir(os.path.join(fs_dir,subject,'bem')) """ NLR_205: Head is too small to create ico5 """ if s == 'NLR_205_AC' or s == 'NLR_JB227': spacing='oct6' # ico5 = 10242, oct6 = 4098 ...8196 = 4098 * 2 fn2 = subject + '-' + 'oct-6' + '-src.fif' else: spacing='ico5' # 10242 * 2 fn2 = subject + '-' + 'ico-5' + '-src.fif' src = mne.setup_source_space(subject=subject, spacing=spacing, # source spacing = 5 mm subjects_dir=fs_dir, add_dist=False, n_jobs=18) src = mne.add_source_space_distances(src, dist_limit=np.inf, n_jobs=18, verbose=None) mne.write_source_spaces(fn2, src, overwrite=True)
def test_scale_mri(tmpdir, few_surfaces): """Test creating fsaverage and scaling it.""" # create fsaverage using the testing "fsaverage" instead of the FreeSurfer # one tempdir = str(tmpdir) fake_home = testing.data_path() create_default_subject(subjects_dir=tempdir, fs_home=fake_home, verbose=True) assert _is_mri_subject('fsaverage', tempdir), "Creating fsaverage failed" fid_path = op.join(tempdir, 'fsaverage', 'bem', 'fsaverage-fiducials.fif') os.remove(fid_path) create_default_subject(update=True, subjects_dir=tempdir, fs_home=fake_home) assert op.exists(fid_path), "Updating fsaverage" # copy MRI file from sample data (shouldn't matter that it's incorrect, # so here choose a small one) path_from = op.join(testing.data_path(), 'subjects', 'sample', 'mri', 'T1.mgz') path_to = op.join(tempdir, 'fsaverage', 'mri', 'orig.mgz') copyfile(path_from, path_to) # remove redundant label files label_temp = op.join(tempdir, 'fsaverage', 'label', '*.label') label_paths = glob(label_temp) for label_path in label_paths[1:]: os.remove(label_path) # create source space print('Creating surface source space') path = op.join(tempdir, 'fsaverage', 'bem', 'fsaverage-%s-src.fif') src = mne.setup_source_space('fsaverage', 'ico0', subjects_dir=tempdir, add_dist=False) mri = op.join(tempdir, 'fsaverage', 'mri', 'orig.mgz') print('Creating volume source space') vsrc = mne.setup_volume_source_space( 'fsaverage', pos=50, mri=mri, subjects_dir=tempdir, add_interpolator=False) write_source_spaces(path % 'vol-50', vsrc) # scale fsaverage for scale in (.9, [1, .2, .8]): write_source_spaces(path % 'ico-0', src, overwrite=True) with pytest.warns(None): # sometimes missing nibabel scale_mri('fsaverage', 'flachkopf', scale, True, subjects_dir=tempdir, verbose='debug') assert _is_mri_subject('flachkopf', tempdir), "Scaling failed" spath = op.join(tempdir, 'flachkopf', 'bem', 'flachkopf-%s-src.fif') assert op.exists(spath % 'ico-0'), "Source space ico-0 was not scaled" assert os.path.isfile(os.path.join(tempdir, 'flachkopf', 'surf', 'lh.sphere.reg')) vsrc_s = mne.read_source_spaces(spath % 'vol-50') pt = np.array([0.12, 0.41, -0.22]) assert_array_almost_equal( apply_trans(vsrc_s[0]['src_mri_t'], pt * np.array(scale)), apply_trans(vsrc[0]['src_mri_t'], pt)) scale_labels('flachkopf', subjects_dir=tempdir) # add distances to source space after hacking the properties to make # it run *much* faster src_dist = src.copy() for s in src_dist: s.update(rr=s['rr'][s['vertno']], nn=s['nn'][s['vertno']], tris=s['use_tris']) s.update(np=len(s['rr']), ntri=len(s['tris']), vertno=np.arange(len(s['rr'])), inuse=np.ones(len(s['rr']), int)) mne.add_source_space_distances(src_dist) write_source_spaces(path % 'ico-0', src_dist, overwrite=True) # scale with distances os.remove(spath % 'ico-0') scale_source_space('flachkopf', 'ico-0', subjects_dir=tempdir) ssrc = mne.read_source_spaces(spath % 'ico-0') assert ssrc[0]['dist'] is not None
info = raw_AEF.info # Here we look at the head only. # mne.viz.plot_alignment(info, trans, subject=subject, dig=True, # meg=['helmet', 'sensors'], subjects_dir=subjects_dir, # surfaces='head') srcfile = data_path + '/subjects/bst_auditory/bem/bst_auditory-ico-4-src.fif' if os.path.isfile(srcfile): src = mne.read_source_spaces(srcfile) else: src = mne.setup_source_space(subject, spacing='ico4', subjects_dir=subjects_dir, add_dist=False) mne.add_source_space_distances(src) mne.write_source_spaces(srcfile, src, overwrite=True) # needed for smoothing print(src) fwdfile = data_path + '/subjects/bst_auditory/bem/bst_auditory-ico-4-fwd.fif' if os.path.isfile(fwdfile): fwd = mne.read_forward_solution(fwdfile) else: ## Compute Forward Solution conductivity = (0.3, ) # for single layer # conductivity = (0.3, 0.006, 0.3) # for three layers model = mne.make_bem_model(subject=subject, ico=4, conductivity=conductivity, subjects_dir=subjects_dir)
os.chdir(raw_dir) subs = ['NLR_102_RS','NLR_103_AC','NLR_105_BB','NLR_110_HH','NLR_127_AM', 'NLR_130_RW','NLR_132_WP','NLR_133_ML','NLR_145_AC','NLR_150_MG','NLR_151_RD', 'NLR_152_TC','NLR_160_EK','NLR_161_AK','NLR_162_EF','NLR_163_LF','NLR_164_SF', 'NLR_170_GM','NLR_172_TH','NLR_174_HS','NLR_179_GM','NLR_180_ZD','NLR_187_NB', 'NLR_201_GS','NLR_202_DD','NLR_203_AM','NLR_204_AM','NLR_205_AC','NLR_206_LM', 'NLR_207_AH','NLR_210_SB','NLR_211_LB' ] subs = ['NLR_GB310','NLR_KB218','NLR_JB423','NLR_GB267','NLR_JB420','NLR_HB275','NLR_197_BK','NLR_GB355','NLR_GB387'] subs = ['NLR_HB205','NLR_IB319','NLR_JB227','NLR_JB486','NLR_KB396'] subs = ['NLR_JB227','NLR_JB486','NLR_KB396'] for n, s in enumerate(subs): subject = s # Create source space os.chdir(os.path.join(fs_dir,subject,'bem')) """ NLR_205: Head is too small to create ico5 """ if s == 'NLR_205_AC' or s == 'NLR_JB227': spacing='oct6' # ico5 = 10242, oct6 = 4098 ...8196 = 4098 * 2 fn2 = subject + '-' + 'oct-6' + '-src.fif' else: spacing='ico5' # 10242 * 2 fn2 = subject + '-' + 'ico-5' + '-src.fif' src = mne.setup_source_space(subject=subject, spacing=spacing, # source spacing = 5 mm subjects_dir=fs_dir, add_dist=False, n_jobs=18, overwrite=True) src = mne.add_source_space_distances(src, dist_limit=np.inf, n_jobs=18, verbose=None) mne.write_source_spaces(fn2, src, overwrite=True)
def compute_src_distances(mri_sub, n_jobs): src = mri_sub.load_source_space() src_computed = mne.add_source_space_distances(src, n_jobs=n_jobs) mri_sub.save_source_space(src_computed)
def test_scale_mri(): """Test creating fsaverage and scaling it""" # create fsaverage tempdir = _TempDir() create_default_subject(subjects_dir=tempdir) assert_true(_is_mri_subject('fsaverage', tempdir), "Creating fsaverage failed") fid_path = os.path.join(tempdir, 'fsaverage', 'bem', 'fsaverage-fiducials.fif') os.remove(fid_path) create_default_subject(update=True, subjects_dir=tempdir) assert_true(os.path.exists(fid_path), "Updating fsaverage") # copy MRI file from sample data path = os.path.join('%s', 'fsaverage', 'mri', 'orig.mgz') sample_sdir = os.path.join(mne.datasets.sample.data_path(), 'subjects') copyfile(path % sample_sdir, path % tempdir) # remove redundant label files label_temp = os.path.join(tempdir, 'fsaverage', 'label', '*.label') label_paths = glob(label_temp) for label_path in label_paths[1:]: os.remove(label_path) # create source space path = os.path.join(tempdir, 'fsaverage', 'bem', 'fsaverage-%s-src.fif') src = mne.setup_source_space('fsaverage', 'ico0', subjects_dir=tempdir, add_dist=False) write_source_spaces(path % 'ico-0', src) mri = os.path.join(tempdir, 'fsaverage', 'mri', 'orig.mgz') vsrc = mne.setup_volume_source_space('fsaverage', pos=50, mri=mri, subjects_dir=tempdir, add_interpolator=False) write_source_spaces(path % 'vol-50', vsrc) # scale fsaverage os.environ['_MNE_FEW_SURFACES'] = 'true' scale = np.array([1, .2, .8]) scale_mri('fsaverage', 'flachkopf', scale, True, subjects_dir=tempdir) del os.environ['_MNE_FEW_SURFACES'] assert_true(_is_mri_subject('flachkopf', tempdir), "Scaling fsaverage failed") spath = os.path.join(tempdir, 'flachkopf', 'bem', 'flachkopf-%s-src.fif') assert_true(os.path.exists(spath % 'ico-0'), "Source space ico-0 was not scaled") vsrc_s = mne.read_source_spaces(spath % 'vol-50') pt = np.array([0.12, 0.41, -0.22]) assert_array_almost_equal(apply_trans(vsrc_s[0]['src_mri_t'], pt * scale), apply_trans(vsrc[0]['src_mri_t'], pt)) scale_labels('flachkopf', subjects_dir=tempdir) # add distances to source space mne.add_source_space_distances(src) src.save(path % 'ico-0', overwrite=True) # scale with distances os.remove(spath % 'ico-0') scale_source_space('flachkopf', 'ico-0', subjects_dir=tempdir) ssrc = mne.read_source_spaces(spath % 'ico-0') assert_is_not(ssrc[0]['dist'], None)
def compute_forward_stack(subjects_dir, subject, recordings_path, info_from=(('data_type', 'rest'), ('run_index', 0)), fwd_params=None, src_params=None, hcp_path=op.curdir, n_jobs=1, verbose=None): """ Convenience function for conducting standard MNE analyses. .. note:: this function computes bem solutions, source spaces and forward models optimized for connectivity computation, i.e., the fsaverage space is morphed onto the subject's space. Parameters ---------- subject : str The subject name. hcp_path : str The directory containing the HCP data. recordings_path : str The path where MEG data and transformations are stored. subjects_dir : str The directory containing the extracted HCP subject data. info_from : tuple of tuples | dict The reader info concerning the data from which sensor positions should be read. Must not be empty room as sensor positions are in head coordinates for 4D systems, hence not available in that case. Note that differences between the sensor positions across runs are smaller than 12 digits, hence negligible. fwd_params : None | dict The forward parameters src_params : None | dict The src params. Defaults to: dict(subject='fsaverage', fname=None, spacing='oct6', n_jobs=2, surface='white', subjects_dir=subjects_dir, add_dist=True) hcp_path : str The prefix of the path of the HCP data. n_jobs : int The number of jobs to use in parallel. verbose : bool, str, int, or None If not None, override default verbose level (see mne.verbose) Returns ------- out : dict A dictionary with the following keys: fwd : instance of mne.Forward The forward solution. src_subject : instance of mne.SourceSpace The source model on the subject's surface src_fsaverage : instance of mne.SourceSpace The source model on fsaverage's surface bem_sol : dict The BEM. info : instance of mne.io.meas_info.Info The actual measurement info used. """ if isinstance(info_from, tuple): info_from = dict(info_from) head_mri_t = mne.read_trans( op.join(recordings_path, subject, '{}-head_mri-trans.fif'.format( subject))) src_defaults = dict(subject='fsaverage', spacing='oct6', n_jobs=n_jobs, surface='white', subjects_dir=subjects_dir, add_dist=True) if 'fname' in mne.fixes._get_args(mne.setup_source_space): # needed for mne-0.14 and below src_defaults.update(dict(fname=None)) else: # remove 'fname' argument (if necessary) when using mne-0.15+ if 'fname' in src_params: del src_params['fname'] src_params = _update_dict_defaults(src_params, src_defaults) add_source_space_distances = False if src_params['add_dist']: # we want the distances on the morphed space src_params['add_dist'] = False add_source_space_distances = True src_fsaverage = mne.setup_source_space(**src_params) src_subject = mne.morph_source_spaces( src_fsaverage, subject, subjects_dir=subjects_dir) if add_source_space_distances: # and here we compute them post hoc. src_subject = mne.add_source_space_distances( src_subject, n_jobs=n_jobs) bems = mne.make_bem_model(subject, conductivity=(0.3,), subjects_dir=subjects_dir, ico=None) # ico = None for morphed SP. bem_sol = mne.make_bem_solution(bems) bem_sol['surfs'][0]['coord_frame'] = 5 info = read_info(subject=subject, hcp_path=hcp_path, **info_from) picks = _pick_data_channels(info, with_ref_meg=False) info = pick_info(info, picks) # here we assume that as a result of our MNE-HCP processing # all other transforms in info are identity for trans in ['dev_head_t', 'ctf_head_t']: # 'dev_ctf_t' is not identity assert np.sum(info[trans]['trans'] - np.eye(4)) == 0 fwd = mne.make_forward_solution( info, trans=head_mri_t, bem=bem_sol, src=src_subject, n_jobs=n_jobs) return dict(fwd=fwd, src_subject=src_subject, src_fsaverage=src_fsaverage, bem_sol=bem_sol, info=info)
def test_scale_mri(): """Test creating fsaverage and scaling it.""" # create fsaverage using the testing "fsaverage" instead of the FreeSurfer # one tempdir = _TempDir() fake_home = testing.data_path() create_default_subject(subjects_dir=tempdir, fs_home=fake_home, verbose=True) assert _is_mri_subject('fsaverage', tempdir), "Creating fsaverage failed" fid_path = op.join(tempdir, 'fsaverage', 'bem', 'fsaverage-fiducials.fif') os.remove(fid_path) create_default_subject(update=True, subjects_dir=tempdir, fs_home=fake_home) assert op.exists(fid_path), "Updating fsaverage" # copy MRI file from sample data (shouldn't matter that it's incorrect, # so here choose a small one) path_from = op.join(testing.data_path(), 'subjects', 'sample', 'mri', 'T1.mgz') path_to = op.join(tempdir, 'fsaverage', 'mri', 'orig.mgz') copyfile(path_from, path_to) # remove redundant label files label_temp = op.join(tempdir, 'fsaverage', 'label', '*.label') label_paths = glob(label_temp) for label_path in label_paths[1:]: os.remove(label_path) # create source space print('Creating surface source space') path = op.join(tempdir, 'fsaverage', 'bem', 'fsaverage-%s-src.fif') src = mne.setup_source_space('fsaverage', 'ico0', subjects_dir=tempdir, add_dist=False) write_source_spaces(path % 'ico-0', src) mri = op.join(tempdir, 'fsaverage', 'mri', 'orig.mgz') print('Creating volume source space') vsrc = mne.setup_volume_source_space( 'fsaverage', pos=50, mri=mri, subjects_dir=tempdir, add_interpolator=False) write_source_spaces(path % 'vol-50', vsrc) # scale fsaverage os.environ['_MNE_FEW_SURFACES'] = 'true' scale = np.array([1, .2, .8]) scale_mri('fsaverage', 'flachkopf', scale, True, subjects_dir=tempdir, verbose='debug') del os.environ['_MNE_FEW_SURFACES'] assert _is_mri_subject('flachkopf', tempdir), "Scaling fsaverage failed" spath = op.join(tempdir, 'flachkopf', 'bem', 'flachkopf-%s-src.fif') assert op.exists(spath % 'ico-0'), "Source space ico-0 was not scaled" assert os.path.isfile(os.path.join(tempdir, 'flachkopf', 'surf', 'lh.sphere.reg')) vsrc_s = mne.read_source_spaces(spath % 'vol-50') pt = np.array([0.12, 0.41, -0.22]) assert_array_almost_equal(apply_trans(vsrc_s[0]['src_mri_t'], pt * scale), apply_trans(vsrc[0]['src_mri_t'], pt)) scale_labels('flachkopf', subjects_dir=tempdir) # add distances to source space mne.add_source_space_distances(src) src.save(path % 'ico-0', overwrite=True) # scale with distances os.remove(spath % 'ico-0') scale_source_space('flachkopf', 'ico-0', subjects_dir=tempdir) ssrc = mne.read_source_spaces(spath % 'ico-0') assert ssrc[0]['dist'] is not None
def compute_forward_stack(subjects_dir, subject, recordings_path, info_from=(('data_type', 'rest'), ('run_index', 0)), fwd_params=None, src_params=None, hcp_path=op.curdir, n_jobs=1, verbose=None): """ Convenience function for conducting standard MNE analyses. .. note:: this function computes bem solutions, source spaces and forward models optimized for connectivity computation, i.e., the fsaverage space is morphed onto the subject's space. Parameters ---------- subject : str The subject name. hcp_path : str The directory containing the HCP data. recordings_path : str The path where MEG data and transformations are stored. subjects_dir : str The directory containing the extracted HCP subject data. info_from : tuple of tuples | dict The reader info concerning the data from which sensor positions should be read. Must not be empty room as sensor positions are in head coordinates for 4D systems, hence not available in that case. Note that differences between the sensor positions across runs are smaller than 12 digits, hence negligible. fwd_params : None | dict The forward parameters src_params : None | dict The src params. Defaults to: dict(subject='fsaverage', fname=None, spacing='oct6', n_jobs=2, surface='white', subjects_dir=subjects_dir, add_dist=True) hcp_path : str The prefix of the path of the HCP data. n_jobs : int The number of jobs to use in parallel. verbose : bool, str, int, or None If not None, override default verbose level (see mne.verbose) Returns ------- out : dict A dictionary with the following keys: fwd : instance of mne.Forward The forward solution. src_subject : instance of mne.SourceSpace The source model on the subject's surface src_fsaverage : instance of mne.SourceSpace The source model on fsaverage's surface bem_sol : dict The BEM. info : instance of mne.io.meas_info.Info The actual measurement info used. """ if isinstance(info_from, tuple): info_from = dict(info_from) head_mri_t = mne.read_trans( op.join(recordings_path, subject, '{}-head_mri-trans.fif'.format(subject))) src_params = _update_dict_defaults( src_params, dict(subject='fsaverage', spacing='oct6', n_jobs=n_jobs, surface='white', subjects_dir=subjects_dir, add_dist=True)) add_source_space_distances = False if src_params['add_dist']: # we want the distances on the morphed space src_params['add_dist'] = False add_source_space_distances = True src_fsaverage = mne.setup_source_space(**src_params) src_subject = mne.morph_source_spaces(src_fsaverage, subject, subjects_dir=subjects_dir) if add_source_space_distances: # and here we compute them post hoc. src_subject = mne.add_source_space_distances(src_subject, n_jobs=n_jobs) bems = mne.make_bem_model(subject, conductivity=(0.3, ), subjects_dir=subjects_dir, ico=None) # ico = None for morphed SP. bem_sol = mne.make_bem_solution(bems) bem_sol['surfs'][0]['coord_frame'] = 5 info = read_info(subject=subject, hcp_path=hcp_path, **info_from) picks = _pick_data_channels(info, with_ref_meg=False) info = pick_info(info, picks) # here we assume that as a result of our MNE-HCP processing # all other transforms in info are identity for trans in ['dev_head_t', 'ctf_head_t']: # 'dev_ctf_t' is not identity assert np.sum(info[trans]['trans'] - np.eye(4)) == 0 fwd = mne.make_forward_solution(info, trans=head_mri_t, bem=bem_sol, src=src_subject, n_jobs=n_jobs) return dict(fwd=fwd, src_subject=src_subject, src_fsaverage=src_fsaverage, bem_sol=bem_sol, info=info)
raise RuntimeError( 'Multiple files found for {stimulus}'.format(stimulus=stimulus)) else: raise RuntimeError( 'Cannot find STC for {stimulus}'.format(stimulus=stimulus)) if args.verbose: pbar.close() # Load source space src = mne.read_source_spaces(args.source) # During inverse computation, the source space was downsampled (i.e. using ico4). # Construct vertex-to-vertex distance matrices using only the vertices that # are defined in the source solution. dist = [] if ('dist' in src[0]) == False: src = mne.add_source_space_distances(src, dist_limit=spatial_radius) for hemi in [0, 1]: inuse = np.flatnonzero(src[0]['inuse']) dist.append(src[hemi]['dist'][np.ix_(stc.vertices[hemi], stc.vertices[hemi])].toarray()) # Load control variables if needed if args.control is not None: print 'Controlling for given variables' m_c = loadmat(args.control) control = m_c['sorted']['mat'][0][0] # Make control variables be in the correct order control_words = [x[0][0] for x in m_c['sorted']['word'][0][0]] order = [control_words.index(w) for w in stimuli]