def test_morph_source_spaces(): """Test morphing of source spaces.""" src = read_source_spaces(fname_fs) src_morph = read_source_spaces(fname_morph) src_morph_py = morph_source_spaces(src, 'sample', subjects_dir=subjects_dir) _compare_source_spaces(src_morph, src_morph_py, mode='approx')
def compute_fwd(subject, src_ref, info, trans_fname, bem_fname, meg=True, eeg=True, mindist=3, subjects_dir=None, n_jobs=1, verbose=None): src = mne.morph_source_spaces(src_ref, subject_to=subject, verbose=verbose, subjects_dir=subjects_dir) bem = mne.read_bem_solution(bem_fname, verbose=verbose) fwd = mne.make_forward_solution(info, trans=trans_fname, src=src, bem=bem, meg=meg, eeg=eeg, mindist=mindist, verbose=verbose, n_jobs=n_jobs) return fwd
def test_morphed_source_space_return(): """Test returning a morphed source space to the original subject""" # let's create some random data on fsaverage data = rng.randn(20484, 1) tmin, tstep = 0, 1. src_fs = read_source_spaces(fname_fs) stc_fs = SourceEstimate(data, [s['vertno'] for s in src_fs], tmin, tstep, 'fsaverage') # Create our morph source space src_morph = morph_source_spaces(src_fs, 'sample', subjects_dir=subjects_dir) # Morph the data over using standard methods stc_morph = stc_fs.morph('sample', [s['vertno'] for s in src_morph], smooth=1, subjects_dir=subjects_dir) # We can now pretend like this was real data we got e.g. from an inverse. # To be complete, let's remove some vertices keeps = [np.sort(rng.permutation(np.arange(len(v)))[:len(v) - 10]) for v in stc_morph.vertices] stc_morph = SourceEstimate( np.concatenate([stc_morph.lh_data[keeps[0]], stc_morph.rh_data[keeps[1]]]), [v[k] for v, k in zip(stc_morph.vertices, keeps)], tmin, tstep, 'sample') # Return it to the original subject stc_morph_return = stc_morph.to_original_src( src_fs, subjects_dir=subjects_dir) # Compare to the original data stc_morph_morph = stc_morph.morph('fsaverage', stc_morph_return.vertices, smooth=1, subjects_dir=subjects_dir) assert_equal(stc_morph_return.subject, stc_morph_morph.subject) for ii in range(2): assert_array_equal(stc_morph_return.vertices[ii], stc_morph_morph.vertices[ii]) # These will not match perfectly because morphing pushes data around corr = np.corrcoef(stc_morph_return.data[:, 0], stc_morph_morph.data[:, 0])[0, 1] assert_true(corr > 0.99, corr) # Degenerate cases stc_morph.subject = None # no .subject provided assert_raises(ValueError, stc_morph.to_original_src, src_fs, subject_orig='fsaverage', subjects_dir=subjects_dir) stc_morph.subject = 'sample' del src_fs[0]['subject_his_id'] # no name in src_fsaverage assert_raises(ValueError, stc_morph.to_original_src, src_fs, subjects_dir=subjects_dir) src_fs[0]['subject_his_id'] = 'fsaverage' # name mismatch assert_raises(ValueError, stc_morph.to_original_src, src_fs, subject_orig='foo', subjects_dir=subjects_dir) src_fs[0]['subject_his_id'] = 'sample' src = read_source_spaces(fname) # wrong source space assert_raises(RuntimeError, stc_morph.to_original_src, src, subjects_dir=subjects_dir)
def compute_fwd(subject, src_ref, info, trans_fname, bem_fname, meg=True, eeg=True, mindist=2, subjects_dir=None, n_jobs=1, verbose=None): """Morph the source space of fsaverage to a subject. Parameters ---------- subject : str Name of the reference subject. src_ref : instance of SourceSpaces Source space of the reference subject. See `get_src_reference.` info : str | instance of mne.Info Instance of an MNE info file or path to a raw fif file. trans_fname : str Path to the trans file of the subject. bem_fname : str Path to the bem solution of the subject. meg : bool Include MEG channels or not. eeg : bool Include EEG channels or not. mindist : float Safety distance from the outer skull. Sources below `mindist` will be discarded in the forward operator. subjects_dir : str Path to the freesurfer `subjects` directory. n_jobs : int The number jobs to run in parallel. verbose : None | bool Use verbose mode. If None use MNE default. """ print("Processing subject %s" % subject) src = mne.morph_source_spaces(src_ref, subject_to=subject, verbose=verbose, subjects_dir=subjects_dir) bem = mne.read_bem_solution(bem_fname, verbose=verbose) fwd = mne.make_forward_solution(info, trans=trans_fname, src=src, bem=bem, meg=meg, eeg=eeg, mindist=mindist, verbose=verbose, n_jobs=n_jobs) return fwd
def compute_fwd(subject, src_ref, info, trans_fname, bem_fname, mindist=2, subjects_dir=None): """Morph source space of fsaverage to subject.""" print("Processing subject %s" % subject) src = mne.morph_source_spaces(src_ref, subject_to=subject, subjects_dir=subjects_dir) bem = mne.read_bem_solution(bem_fname) fwd = mne.make_forward_solution(info, trans=trans_fname, src=src, bem=bem, mindist=mindist, n_jobs=1) return fwd
fname_trans = op.join(data_path, 'MEG', 'sample', 'sample_audvis_raw-trans.fif') fname_bem = op.join(subjects_dir, 'sample', 'bem', 'sample-5120-bem-sol.fif') fname_src_fs = op.join(subjects_dir, 'fsaverage', 'bem', 'fsaverage-ico-5-src.fif') raw_fname = op.join(data_path, 'MEG', 'sample', 'sample_audvis_raw.fif') # Get relevant channel information info = mne.io.read_info(raw_fname) info = mne.pick_info(info, mne.pick_types(info, meg=True, eeg=False, exclude=[])) # Morph fsaverage's source space to sample src_fs = mne.read_source_spaces(fname_src_fs) src_morph = mne.morph_source_spaces(src_fs, subject_to='sample', subjects_dir=subjects_dir) # Compute the forward with our morphed source space fwd = mne.make_forward_solution(info, trans=fname_trans, src=src_morph, bem=fname_bem) mag_map = mne.sensitivity_map(fwd, ch_type='mag') # Return this SourceEstimate (on sample's surfaces) to fsaverage's surfaces mag_map_fs = mag_map.to_original_src(src_fs, subjects_dir=subjects_dir) # Plot the result, which tracks the sulcal-gyral folding # outliers may occur, we'll place the cutoff at 99 percent. kwargs = dict(clim=dict(kind='percent', lims=[0, 50, 99]), # no smoothing, let's see the dipoles on the cortex. smoothing_steps=1, hemi='rh', views=['lat'])
def test_morphed_source_space_return(): """Test returning a morphed source space to the original subject.""" # let's create some random data on fsaverage data = rng.randn(20484, 1) tmin, tstep = 0, 1. src_fs = read_source_spaces(fname_fs) stc_fs = SourceEstimate(data, [s['vertno'] for s in src_fs], tmin, tstep, 'fsaverage') n_verts_fs = sum(len(s['vertno']) for s in src_fs) # Create our morph source space src_morph = morph_source_spaces(src_fs, 'sample', subjects_dir=subjects_dir) n_verts_sample = sum(len(s['vertno']) for s in src_morph) assert n_verts_fs == n_verts_sample # Morph the data over using standard methods stc_morph = compute_source_morph( src_fs, 'fsaverage', 'sample', spacing=[s['vertno'] for s in src_morph], smooth=1, subjects_dir=subjects_dir, warn=False).apply(stc_fs) assert stc_morph.data.shape[0] == n_verts_sample # We can now pretend like this was real data we got e.g. from an inverse. # To be complete, let's remove some vertices keeps = [np.sort(rng.permutation(np.arange(len(v)))[:len(v) - 10]) for v in stc_morph.vertices] stc_morph = SourceEstimate( np.concatenate([stc_morph.lh_data[keeps[0]], stc_morph.rh_data[keeps[1]]]), [v[k] for v, k in zip(stc_morph.vertices, keeps)], tmin, tstep, 'sample') # Return it to the original subject stc_morph_return = stc_morph.to_original_src( src_fs, subjects_dir=subjects_dir) # This should fail (has too many verts in SourceMorph) with pytest.warns(RuntimeWarning, match='vertices not included'): morph = compute_source_morph( src_morph, subject_from='sample', spacing=stc_morph_return.vertices, smooth=1, subjects_dir=subjects_dir) with pytest.raises(ValueError, match='vertices do not match'): morph.apply(stc_morph) # Compare to the original data with pytest.warns(RuntimeWarning, match='vertices not included'): stc_morph_morph = compute_source_morph( src=stc_morph, subject_from='sample', spacing=stc_morph_return.vertices, smooth=1, subjects_dir=subjects_dir).apply(stc_morph) assert_equal(stc_morph_return.subject, stc_morph_morph.subject) for ii in range(2): assert_array_equal(stc_morph_return.vertices[ii], stc_morph_morph.vertices[ii]) # These will not match perfectly because morphing pushes data around corr = np.corrcoef(stc_morph_return.data[:, 0], stc_morph_morph.data[:, 0])[0, 1] assert corr > 0.99, corr # Explicitly test having two vertices map to the same target vertex. We # simulate this by having two vertices be at the same position. src_fs2 = src_fs.copy() vert1, vert2 = src_fs2[0]['vertno'][:2] src_fs2[0]['rr'][vert1] = src_fs2[0]['rr'][vert2] stc_morph_return = stc_morph.to_original_src( src_fs2, subjects_dir=subjects_dir) # test to_original_src method result equality for ii in range(2): assert_array_equal(stc_morph_return.vertices[ii], stc_morph_morph.vertices[ii]) # These will not match perfectly because morphing pushes data around corr = np.corrcoef(stc_morph_return.data[:, 0], stc_morph_morph.data[:, 0])[0, 1] assert corr > 0.99, corr # Degenerate cases stc_morph.subject = None # no .subject provided pytest.raises(ValueError, stc_morph.to_original_src, src_fs, subject_orig='fsaverage', subjects_dir=subjects_dir) stc_morph.subject = 'sample' del src_fs[0]['subject_his_id'] # no name in src_fsaverage pytest.raises(ValueError, stc_morph.to_original_src, src_fs, subjects_dir=subjects_dir) src_fs[0]['subject_his_id'] = 'fsaverage' # name mismatch pytest.raises(ValueError, stc_morph.to_original_src, src_fs, subject_orig='foo', subjects_dir=subjects_dir) src_fs[0]['subject_his_id'] = 'sample' src = read_source_spaces(fname) # wrong source space pytest.raises(RuntimeError, stc_morph.to_original_src, src, subjects_dir=subjects_dir)
############################################################################## # Now we can setup our source model. # Note that spacing has to be set to 'all' since no common MNE resampling # scheme has been employed in the HCP pipelines. # Since this will take very long time to compute and at this point no other # decimation scheme is available inside MNE, we will compute the source # space on fsaverage, the freesurfer average brain, and morph it onto # the subject's native space. With `oct6` we have ~8000 dipole locations. src_fsaverage = mne.setup_source_space( subject='fsaverage', subjects_dir=subjects_dir, add_dist=False, spacing='oct6', overwrite=True) # now we morph it onto the subject. src_subject = mne.morph_source_spaces( src_fsaverage, subject, subjects_dir=subjects_dir) ############################################################################## # For the same reason `ico` has to be set to `None` when computing the bem. # The headshape is not computed with MNE and has a none standard configuration. bems = mne.make_bem_model(subject, conductivity=(0.3,), subjects_dir=subjects_dir, ico=None) # ico = None for morphed SP. bem_sol = mne.make_bem_solution(bems) bem_sol['surfs'][0]['coord_frame'] = 5 ############################################################################## # Now we can read the channels that we want to map to the cortical locations. # Then we can compute the forward solution.
fname_trans = op.join(data_path, 'MEG', 'sample', 'sample_audvis_raw-trans.fif') fname_bem = op.join(subjects_dir, 'sample', 'bem', 'sample-5120-bem-sol.fif') fname_src_fs = op.join(subjects_dir, 'fsaverage', 'bem', 'fsaverage-ico-5-src.fif') raw_fname = op.join(data_path, 'MEG', 'sample', 'sample_audvis_raw.fif') # Get relevant channel information info = mne.io.read_info(raw_fname) info = mne.pick_info(info, mne.pick_types(info, meg=True, eeg=False, exclude=[])) # Morph fsaverage's source space to sample src_fs = mne.read_source_spaces(fname_src_fs) src_morph = mne.morph_source_spaces(src_fs, subject_to='sample', subjects_dir=subjects_dir) # Compute the forward with our morphed source space fwd = mne.make_forward_solution(info, trans=fname_trans, src=src_morph, bem=fname_bem) # fwd = mne.convert_forward_solution(fwd, surf_ori=True, force_fixed=True) mag_map = mne.sensitivity_map(fwd, ch_type='mag') # Return this SourceEstimate (on sample's surfaces) to fsaverage's surfaces mag_map_fs = mag_map.to_original_src(src_fs, subjects_dir=subjects_dir) # Plot the result, which tracks the sulcal-gyral folding # outliers may occur, we'll place the cutoff at 99 percent. kwargs = dict(clim=dict(kind='percent', lims=[0, 50, 99]), # no smoothing, let's see the dipoles on the cortex. smoothing_steps=1, hemi='rh', views=['lat'])
# Be verbose mne.set_log_level('INFO') # Handle command line arguments parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('subject', metavar='sub###', help='The subject to process') args = parser.parse_args() subject = args.subject print('Processing subject:', subject) fsaverage = mne.read_source_spaces(fname.fsaverage_src) # Morph the source space to the current subject subject_src = mne.morph_source_spaces(fsaverage, subject, subjects_dir=fname.subjects_dir) # Save the source space mne.write_source_spaces(fname.src(subject=subject), subject_src, overwrite=True) # Create the forward model. We use a single layer BEM model for this. bem_model = mne.make_bem_model(subject, ico=4, subjects_dir=fname.subjects_dir, conductivity=(0.3, )) bem = mne.make_bem_solution(bem_model) info = mne.io.read_info(fname.epo(subject=subject)) fwd = mne.make_forward_solution(info,
def compute_forward_stack(subjects_dir, subject, recordings_path, info_from=(('data_type', 'rest'), ('run_index', 0)), fwd_params=None, src_params=None, hcp_path=op.curdir, n_jobs=1, verbose=None): """ Convenience function for conducting standard MNE analyses. .. note:: this function computes bem solutions, source spaces and forward models optimized for connectivity computation, i.e., the fsaverage space is morphed onto the subject's space. Parameters ---------- subject : str The subject name. hcp_path : str The directory containing the HCP data. recordings_path : str The path where MEG data and transformations are stored. subjects_dir : str The directory containing the extracted HCP subject data. info_from : tuple of tuples | dict The reader info concerning the data from which sensor positions should be read. Must not be empty room as sensor positions are in head coordinates for 4D systems, hence not available in that case. Note that differences between the sensor positions across runs are smaller than 12 digits, hence negligible. fwd_params : None | dict The forward parameters src_params : None | dict The src params. Defaults to: dict(subject='fsaverage', fname=None, spacing='oct6', n_jobs=2, surface='white', subjects_dir=subjects_dir, add_dist=True) hcp_path : str The prefix of the path of the HCP data. n_jobs : int The number of jobs to use in parallel. verbose : bool, str, int, or None If not None, override default verbose level (see mne.verbose) Returns ------- out : dict A dictionary with the following keys: fwd : instance of mne.Forward The forward solution. src_subject : instance of mne.SourceSpace The source model on the subject's surface src_fsaverage : instance of mne.SourceSpace The source model on fsaverage's surface bem_sol : dict The BEM. info : instance of mne.io.meas_info.Info The actual measurement info used. """ if isinstance(info_from, tuple): info_from = dict(info_from) head_mri_t = mne.read_trans( op.join(recordings_path, subject, '{}-head_mri-trans.fif'.format( subject))) src_defaults = dict(subject='fsaverage', spacing='oct6', n_jobs=n_jobs, surface='white', subjects_dir=subjects_dir, add_dist=True) if 'fname' in mne.fixes._get_args(mne.setup_source_space): # needed for mne-0.14 and below src_defaults.update(dict(fname=None)) else: # remove 'fname' argument (if necessary) when using mne-0.15+ if 'fname' in src_params: del src_params['fname'] src_params = _update_dict_defaults(src_params, src_defaults) add_source_space_distances = False if src_params['add_dist']: # we want the distances on the morphed space src_params['add_dist'] = False add_source_space_distances = True src_fsaverage = mne.setup_source_space(**src_params) src_subject = mne.morph_source_spaces( src_fsaverage, subject, subjects_dir=subjects_dir) if add_source_space_distances: # and here we compute them post hoc. src_subject = mne.add_source_space_distances( src_subject, n_jobs=n_jobs) bems = mne.make_bem_model(subject, conductivity=(0.3,), subjects_dir=subjects_dir, ico=None) # ico = None for morphed SP. bem_sol = mne.make_bem_solution(bems) bem_sol['surfs'][0]['coord_frame'] = 5 info = read_info(subject=subject, hcp_path=hcp_path, **info_from) picks = _pick_data_channels(info, with_ref_meg=False) info = pick_info(info, picks) # here we assume that as a result of our MNE-HCP processing # all other transforms in info are identity for trans in ['dev_head_t', 'ctf_head_t']: # 'dev_ctf_t' is not identity assert np.sum(info[trans]['trans'] - np.eye(4)) == 0 fwd = mne.make_forward_solution( info, trans=head_mri_t, bem=bem_sol, src=src_subject, n_jobs=n_jobs) return dict(fwd=fwd, src_subject=src_subject, src_fsaverage=src_fsaverage, bem_sol=bem_sol, info=info)
# cfg.reducerank = 2; # leadfield2d = ft_prepare_leadfield(cfg); import mne head_mri_t = mne.read_trans( os.path.join(recordings_path, subject, '{}-head_mri-trans.fif'.format(subject))) # Source space src = mne.setup_source_space(subject=subject, subjects_dir=fs_path, add_dist=False, spacing='oct6') # This is to morph the fsaverage source model into subjects. src_subject = mne.morph_source_spaces(src_fsaverage, subject, subjects_dir=fs_path) # BEM bems = mne.make_bem_model(subject, conductivity=(0.3, ), subjects_dir=fs_path, ico=4) bem_sol = mne.make_bem_solution(bems) picks = mne.pick_types(info, meg=True, ref_meg=False) info = mne.pick_info(info, picks) # Forward fwd = mne.make_forward_solution(info, trans=head_mri_t, bem=bem_sol, src=src)
def make_mne_forward(anatomy_path, subject, recordings_path, info_from=(('data_type', 'rest'), ('run_index', 0)), fwd_params=None, src_params=None, hcp_path=op.curdir, n_jobs=1): """" Convenience script for conducting standard MNE analyses. Parameters ---------- subject : str The subject name. hcp_path : str The directory containing the HCP data. recordings_path : str The path where MEG data and transformations are stored. anatomy_path : str The directory containing the extracted HCP subject data. info_from : tuple of tuples | dict The reader info concerning the data from which sensor positions should be read. Must not be empty room as sensor positions are in head coordinates for 4D systems, hence not available in that case. Note that differences between the sensor positions across runs are smaller than 12 digits, hence negligible. fwd_params : None | dict The forward parameters src_params : None | dict The src params. Defaults to: dict(subject='fsaverage', fname=None, spacing='oct6', n_jobs=2, surface='white', subjects_dir=anatomy_path, add_dist=True) hcp_path : str The prefix of the path of the HCP data. n_jobs : int The number of jobs to use in parallel. """ if isinstance(info_from, tuple): info_from = dict(info_from) head_mri_t = mne.read_trans( op.join(recordings_path, subject, '{}-head_mri-trans.fif'.format( subject))) src_params = _update_dict_defaults( src_params, dict(subject='fsaverage', fname=None, spacing='oct6', n_jobs=n_jobs, surface='white', subjects_dir=anatomy_path, add_dist=True)) add_source_space_distances = False if src_params['add_dist']: # we want the distances on the morphed space src_params['add_dist'] = False add_source_space_distances = True src_fsaverage = mne.setup_source_space(**src_params) src_subject = mne.morph_source_spaces( src_fsaverage, subject, subjects_dir=anatomy_path) if add_source_space_distances: # and here we compute them post hoc. src_subject = mne.add_source_space_distances( src_subject, n_jobs=n_jobs) bems = mne.make_bem_model(subject, conductivity=(0.3,), subjects_dir=anatomy_path, ico=None) # ico = None for morphed SP. bem_sol = mne.make_bem_solution(bems) info = read_info_hcp(subject=subject, hcp_path=hcp_path, **info_from) picks = _pick_data_channels(info, with_ref_meg=False) info = pick_info(info, picks) # here we assume that as a result of our MNE-HCP processing # all other transforms in info are identity for trans in ['dev_head_t', 'ctf_head_t']: # 'dev_ctf_t' is not identity assert np.sum(info[trans]['trans'] - np.eye(4)) == 0 fwd = mne.make_forward_solution( info, trans=head_mri_t, bem=bem_sol, src=src_subject, n_jobs=n_jobs) return dict(fwd=fwd, src_subject=src_subject, src_fsaverage=src_fsaverage, bem_sol=bem_sol, info=info)
import conpy, mne # Import required Python modules # Define source space on average brain, morph to subject src_avg = mne.setup_source_space('fsaverage', spacing='ico4') src_sub = mne.morph_source_spaces(src_avg, subject='sub002') # Discard deep sources info = mne.io.read_info('sub002-epo.fif') # Read information about the sensors verts = conpy.select_vertices_in_sensor_range(src_sub, dist=0.07, info=info) src_sub = conpy.restrict_src_to_vertices(src_sub, verts) # Create a one-layer BEM model bem_model = mne.make_bem_model('sub002', ico=4, conductivity=(0.3, )) bem = mne.make_bem_solution(bem_model) # Make the forward model trans = 'sub002-trans.fif' # File containing the MRI<->Head transformation fwd = mne.make_forward_solution(info, trans, src_sub, bem, meg=True, eeg=False) # Only retain orientations tangential to a sphere approximation of the head fwd = conpy.forward_to_tangential(fwd)
def test_morphed_source_space_return(): """Test returning a morphed source space to the original subject.""" # let's create some random data on fsaverage data = rng.randn(20484, 1) tmin, tstep = 0, 1. src_fs = read_source_spaces(fname_fs) stc_fs = SourceEstimate(data, [s['vertno'] for s in src_fs], tmin, tstep, 'fsaverage') n_verts_fs = sum(len(s['vertno']) for s in src_fs) # Create our morph source space src_morph = morph_source_spaces(src_fs, 'sample', subjects_dir=subjects_dir) n_verts_sample = sum(len(s['vertno']) for s in src_morph) assert n_verts_fs == n_verts_sample # Morph the data over using standard methods stc_morph = compute_source_morph(src_fs, 'fsaverage', 'sample', spacing=[s['vertno'] for s in src_morph], smooth=1, subjects_dir=subjects_dir, warn=False).apply(stc_fs) assert stc_morph.data.shape[0] == n_verts_sample # We can now pretend like this was real data we got e.g. from an inverse. # To be complete, let's remove some vertices keeps = [ np.sort(rng.permutation(np.arange(len(v)))[:len(v) - 10]) for v in stc_morph.vertices ] stc_morph = SourceEstimate( np.concatenate([ stc_morph.lh_data[keeps[0]], stc_morph.rh_data[keeps[1]] ]), [v[k] for v, k in zip(stc_morph.vertices, keeps)], tmin, tstep, 'sample') # Return it to the original subject stc_morph_return = stc_morph.to_original_src(src_fs, subjects_dir=subjects_dir) # This should fail (has too many verts in SourceMorph) with pytest.warns(RuntimeWarning, match='vertices not included'): morph = compute_source_morph(src_morph, subject_from='sample', spacing=stc_morph_return.vertices, smooth=1, subjects_dir=subjects_dir) with pytest.raises(ValueError, match='vertices do not match'): morph.apply(stc_morph) # Compare to the original data with pytest.warns(RuntimeWarning, match='vertices not included'): stc_morph_morph = compute_source_morph( src=stc_morph, subject_from='sample', spacing=stc_morph_return.vertices, smooth=1, subjects_dir=subjects_dir).apply(stc_morph) assert_equal(stc_morph_return.subject, stc_morph_morph.subject) for ii in range(2): assert_array_equal(stc_morph_return.vertices[ii], stc_morph_morph.vertices[ii]) # These will not match perfectly because morphing pushes data around corr = np.corrcoef(stc_morph_return.data[:, 0], stc_morph_morph.data[:, 0])[0, 1] assert corr > 0.99, corr # Explicitly test having two vertices map to the same target vertex. We # simulate this by having two vertices be at the same position. src_fs2 = src_fs.copy() vert1, vert2 = src_fs2[0]['vertno'][:2] src_fs2[0]['rr'][vert1] = src_fs2[0]['rr'][vert2] stc_morph_return = stc_morph.to_original_src(src_fs2, subjects_dir=subjects_dir) # test to_original_src method result equality for ii in range(2): assert_array_equal(stc_morph_return.vertices[ii], stc_morph_morph.vertices[ii]) # These will not match perfectly because morphing pushes data around corr = np.corrcoef(stc_morph_return.data[:, 0], stc_morph_morph.data[:, 0])[0, 1] assert corr > 0.99, corr # Degenerate cases stc_morph.subject = None # no .subject provided pytest.raises(ValueError, stc_morph.to_original_src, src_fs, subject_orig='fsaverage', subjects_dir=subjects_dir) stc_morph.subject = 'sample' del src_fs[0]['subject_his_id'] # no name in src_fsaverage pytest.raises(ValueError, stc_morph.to_original_src, src_fs, subjects_dir=subjects_dir) src_fs[0]['subject_his_id'] = 'fsaverage' # name mismatch pytest.raises(ValueError, stc_morph.to_original_src, src_fs, subject_orig='foo', subjects_dir=subjects_dir) src_fs[0]['subject_his_id'] = 'sample' src = read_source_spaces(fname) # wrong source space pytest.raises(RuntimeError, stc_morph.to_original_src, src, subjects_dir=subjects_dir)
def run(): """Run command.""" from mne.commands.utils import get_optparser, _add_verbose_flag parser = get_optparser(__file__) parser.add_option('-s', '--subject', dest='subject', help='Subject name (required)', default=None) parser.add_option('--src', dest='fname', help='Output file name. Use a name <dir>/<name>-src.fif', metavar='FILE', default=None) parser.add_option('--morph', dest='subject_to', help='morph the source space to this subject', default=None) parser.add_option('--surf', dest='surface', help='The surface to use. (default to white)', default='white', type='string') parser.add_option('--spacing', dest='spacing', help='Specifies the approximate grid spacing of the ' 'source space in mm. (default to 7mm)', default=None, type='int') parser.add_option('--ico', dest='ico', help='use the recursively subdivided icosahedron ' 'to create the source space.', default=None, type='int') parser.add_option('--oct', dest='oct', help='use the recursively subdivided octahedron ' 'to create the source space.', default=None, type='int') parser.add_option('-d', '--subjects-dir', dest='subjects_dir', help='Subjects directory', default=None) parser.add_option('-n', '--n-jobs', dest='n_jobs', help='The number of jobs to run in parallel ' '(default 1). Requires the joblib package. ' 'Will use at most 2 jobs' ' (one for each hemisphere).', default=1, type='int') parser.add_option('-o', '--overwrite', dest='overwrite', help='to write over existing files', default=None, action="store_true") _add_verbose_flag(parser) options, args = parser.parse_args() if options.subject is None: parser.print_help() sys.exit(1) subject = options.subject subject_to = options.subject_to fname = options.fname subjects_dir = options.subjects_dir spacing = options.spacing ico = options.ico oct = options.oct surface = options.surface n_jobs = options.n_jobs verbose = True if options.verbose is not None else False overwrite = True if options.overwrite is not None else False # Parse source spacing option spacing_options = [ico, oct, spacing] n_options = len([x for x in spacing_options if x is not None]) if n_options > 1: raise ValueError('Only one spacing option can be set at the same time') elif n_options == 0: # Default to oct6 use_spacing = 'oct6' elif n_options == 1: if ico is not None: use_spacing = "ico" + str(ico) elif oct is not None: use_spacing = "oct" + str(oct) elif spacing is not None: use_spacing = spacing # Generate filename if fname is None: if subject_to is None: fname = subject + '-' + str(use_spacing) + '-src.fif' else: fname = (subject_to + '-' + subject + '-' + str(use_spacing) + '-src.fif') else: if not (fname.endswith('_src.fif') or fname.endswith('-src.fif')): fname = fname + "-src.fif" # Create source space src = mne.setup_source_space(subject=subject, spacing=use_spacing, surface=surface, subjects_dir=subjects_dir, n_jobs=n_jobs, verbose=verbose) # Morph source space if --morph is set if subject_to is not None: src = mne.morph_source_spaces(src, subject_to=subject_to, subjects_dir=subjects_dir, surf=surface, verbose=verbose) # Save source space to file src.save(fname=fname, overwrite=overwrite)
def compute_forward_stack(subjects_dir, subject, recordings_path, info_from=(('data_type', 'rest'), ('run_index', 0)), fwd_params=None, src_params=None, hcp_path=op.curdir, n_jobs=1, verbose=None): """ Convenience function for conducting standard MNE analyses. .. note:: this function computes bem solutions, source spaces and forward models optimized for connectivity computation, i.e., the fsaverage space is morphed onto the subject's space. Parameters ---------- subject : str The subject name. hcp_path : str The directory containing the HCP data. recordings_path : str The path where MEG data and transformations are stored. subjects_dir : str The directory containing the extracted HCP subject data. info_from : tuple of tuples | dict The reader info concerning the data from which sensor positions should be read. Must not be empty room as sensor positions are in head coordinates for 4D systems, hence not available in that case. Note that differences between the sensor positions across runs are smaller than 12 digits, hence negligible. fwd_params : None | dict The forward parameters src_params : None | dict The src params. Defaults to: dict(subject='fsaverage', fname=None, spacing='oct6', n_jobs=2, surface='white', subjects_dir=subjects_dir, add_dist=True) hcp_path : str The prefix of the path of the HCP data. n_jobs : int The number of jobs to use in parallel. verbose : bool, str, int, or None If not None, override default verbose level (see mne.verbose) Returns ------- out : dict A dictionary with the following keys: fwd : instance of mne.Forward The forward solution. src_subject : instance of mne.SourceSpace The source model on the subject's surface src_fsaverage : instance of mne.SourceSpace The source model on fsaverage's surface bem_sol : dict The BEM. info : instance of mne.io.meas_info.Info The actual measurement info used. """ if isinstance(info_from, tuple): info_from = dict(info_from) head_mri_t = mne.read_trans( op.join(recordings_path, subject, '{}-head_mri-trans.fif'.format(subject))) src_params = _update_dict_defaults( src_params, dict(subject='fsaverage', spacing='oct6', n_jobs=n_jobs, surface='white', subjects_dir=subjects_dir, add_dist=True)) add_source_space_distances = False if src_params['add_dist']: # we want the distances on the morphed space src_params['add_dist'] = False add_source_space_distances = True src_fsaverage = mne.setup_source_space(**src_params) src_subject = mne.morph_source_spaces(src_fsaverage, subject, subjects_dir=subjects_dir) if add_source_space_distances: # and here we compute them post hoc. src_subject = mne.add_source_space_distances(src_subject, n_jobs=n_jobs) bems = mne.make_bem_model(subject, conductivity=(0.3, ), subjects_dir=subjects_dir, ico=None) # ico = None for morphed SP. bem_sol = mne.make_bem_solution(bems) bem_sol['surfs'][0]['coord_frame'] = 5 info = read_info(subject=subject, hcp_path=hcp_path, **info_from) picks = _pick_data_channels(info, with_ref_meg=False) info = pick_info(info, picks) # here we assume that as a result of our MNE-HCP processing # all other transforms in info are identity for trans in ['dev_head_t', 'ctf_head_t']: # 'dev_ctf_t' is not identity assert np.sum(info[trans]['trans'] - np.eye(4)) == 0 fwd = mne.make_forward_solution(info, trans=head_mri_t, bem=bem_sol, src=src_subject, n_jobs=n_jobs) return dict(fwd=fwd, src_subject=src_subject, src_fsaverage=src_fsaverage, bem_sol=bem_sol, info=info)
"beta_high": 30, "gamma": 35, "gamma_high": 35 } # build common fsaverage ico4 source space to morph from, then back to later fs_src = mne.setup_source_space('fsaverage', spacing='ico4', surface="white", subjects_dir=mri_dir, n_jobs=4) fs_src.save("{}fsaverage_ico4-src.fif".format(meg_dir)) for meg, mri in sub_dict.items(): # morph fsaverage ico4 source space to subject and save src = mne.morph_source_spaces(fs_src, mri, subjects_dir=mri_dir) src.save("{}nc_{}_from-fs_ico4-src.fif".format(meg_dir, meg)) # create forward model and save # read trans file and BEM model that have been saved trans = "{dir}{mri}_{meg}-trans.fif".format(dir=trans_dir, mri=mri, meg=meg) bem = mne.read_bem_solution("{dir}nc_{meg}-bem.fif".format(dir=meg_dir, meg=meg)) # load and prepare the MEG data rest = mne.read_epochs("{dir}nc_{sub}_1_ica-epo.fif".format(dir=meg_dir, sub=meg)) ton = mne.read_epochs("{dir}nc_{sub}_2_ica-epo.fif".format(dir=meg_dir, sub=meg)) epo_a = mne.read_epochs("{dir}nc_{sub}_3_ica-epo.fif".format(dir=meg_dir, sub=meg))