def __init_with_mne_sample_data(self): """ Initializes MEG, BEM, Source space and tranforms with MNE sample data """ self.data_path = os.path.abspath(join(self.subjects_dir, os.pardir)) self.subject_name = 'sample' self.raw_path = join(self.data_path, 'MEG', 'sample', 'sample_audvis_raw.fif') self.info = mne.io.read_info(self.raw_path) self.bem_path = join(self.data_path, 'subjects', 'sample', 'bem', 'sample-5120-5120-5120-bem-sol.fif') src_file = 'sample-' + self.spacing[0:3] + '-' + self.spacing[ -1] + '-src.fif' src_path = join(self.data_path, 'subjects', 'sample', 'bem', src_file) if not os.path.isfile(src_path): print( "Source space file not found, creating new source space file..." ) mne.setup_source_space(str('sample'), spacing=self.spacing) self.src_path = src_path # self.trans_path = None self.trans_path = join(self.data_path, 'MEG', 'sample', 'sample_audvis_raw-trans.fif') self.cov_path = join(self.data_path, 'MEG', 'sample', 'sample_audvis-cov.fif') self.raw_empty_room_path = join(self.data_path, 'MEG', 'sample', 'ernoise_raw.fif')
def test_setup_source_space(): """Test setting up ico, oct, and all source spaces """ fname_all = op.join(data_path, "subjects", "sample", "bem", "sample-all-src.fif") fname_ico = op.join(data_path, "subjects", "fsaverage", "bem", "fsaverage-ico-5-src.fif") # first lets test some input params assert_raises(ValueError, setup_source_space, "sample", spacing="oct") assert_raises(ValueError, setup_source_space, "sample", spacing="octo") assert_raises(ValueError, setup_source_space, "sample", spacing="oct6e") assert_raises(ValueError, setup_source_space, "sample", spacing="7emm") assert_raises(ValueError, setup_source_space, "sample", spacing="alls") assert_raises(IOError, setup_source_space, "sample", spacing="oct6", subjects_dir=subjects_dir) # ico 5 (fsaverage) - write to temp file src = read_source_spaces(fname_ico) temp_name = op.join(tempdir, "temp-src.fif") with warnings.catch_warnings(record=True): # sklearn equiv neighbors src_new = setup_source_space("fsaverage", temp_name, spacing="ico5", subjects_dir=subjects_dir) _compare_source_spaces(src, src_new, mode="approx") # oct-6 (sample) - auto filename + IO src = read_source_spaces(fname) temp_name = op.join(tempdir, "temp-src.fif") with warnings.catch_warnings(record=True): # sklearn equiv neighbors src_new = setup_source_space("sample", temp_name, spacing="oct6", subjects_dir=subjects_dir, overwrite=True) _compare_source_spaces(src, src_new, mode="approx") src_new = read_source_spaces(temp_name) _compare_source_spaces(src, src_new, mode="approx") # all source points - no file writing src = read_source_spaces(fname_all) src_new = setup_source_space("sample", None, spacing="all", subjects_dir=subjects_dir) _compare_source_spaces(src, src_new, mode="approx")
def get_src_reference(subject="fsaverage", spacing="ico5", subjects_dir=None): """Compute source space of the reference subject. Parameters ---------- subject: str. Name of the reference subject. spacing: str. The spacing to use. Can be ``'ico#'`` for a recursively subdivided icosahedron, ``'oct#'`` for a recursively subdivided octahedron, ``'all'`` for all points, or an integer to use appoximate distance-based spacing (in mm). .. versionchanged:: 0.18 Support for integers for distance-based spacing. Returns ------- src : SourceSpaces The source space for each hemisphere. """ fname_src = op.join(subjects_dir, subject, 'bem', '%s-%s-src.fif' % (subject, spacing)) if os.path.isfile(fname_src): src_ref = mne.read_source_spaces(fname_src) elif os.path.exists(subjects_dir + subject): src_ref = mne.setup_source_space(subject=subject, spacing=spacing, subjects_dir=subjects_dir, add_dist=False) else: mne.datasets.fetch_fsaverage(subjects_dir) src_ref = mne.setup_source_space(subject=subject, spacing=spacing, subjects_dir=subjects_dir, add_dist=False) return src_ref
def test_scale_mri(): """Test creating fsaverage and scaling it""" # create fsaverage tempdir = _TempDir() create_default_subject(subjects_dir=tempdir) is_mri = _is_mri_subject('fsaverage', tempdir) assert_true(is_mri, "Creating fsaverage failed") fid_path = os.path.join(tempdir, 'fsaverage', 'bem', 'fsaverage-fiducials.fif') os.remove(fid_path) create_default_subject(update=True, subjects_dir=tempdir) assert_true(os.path.exists(fid_path), "Updating fsaverage") # remove redundant label files label_temp = os.path.join(tempdir, 'fsaverage', 'label', '*.label') label_paths = glob(label_temp) for label_path in label_paths[1:]: os.remove(label_path) # create source space path = os.path.join(tempdir, 'fsaverage', 'bem', 'fsaverage-ico-0-src.fif') mne.setup_source_space('fsaverage', path, 'ico0', overwrite=True, subjects_dir=tempdir, add_dist=False) # scale fsaverage os.environ['_MNE_FEW_SURFACES'] = 'true' scale_mri('fsaverage', 'flachkopf', [1, .2, .8], True, subjects_dir=tempdir) del os.environ['_MNE_FEW_SURFACES'] is_mri = _is_mri_subject('flachkopf', tempdir) assert_true(is_mri, "Scaling fsaverage failed") src_path = os.path.join(tempdir, 'flachkopf', 'bem', 'flachkopf-ico-0-src.fif') assert_true(os.path.exists(src_path), "Source space was not scaled") scale_labels('flachkopf', subjects_dir=tempdir) # scale source space separately os.remove(src_path) scale_source_space('flachkopf', 'ico-0', subjects_dir=tempdir) assert_true(os.path.exists(src_path), "Source space was not scaled") # add distances to source space src = mne.read_source_spaces(path) mne.add_source_space_distances(src) src.save(path) # scale with distances os.remove(src_path) scale_source_space('flachkopf', 'ico-0', subjects_dir=tempdir)
def create_source_space(sbj_dir, sbj_id): import os.path as op import mne bem_dir = op.join(sbj_dir, sbj_id, 'bem') src_fname = op.join(bem_dir, '%s-ico-5-src.fif' %sbj_id) if not op.isfile(src_fname): mne.setup_source_space(sbj_id, fname=True, spacing='ico5', subjects_dir=sbj_dir, overwrite=True, n_jobs=2)
def test_setup_source_space(): """Test setting up ico, oct, and all source spaces """ tempdir = _TempDir() fname_ico = op.join(data_path, 'subjects', 'fsaverage', 'bem', 'fsaverage-ico-5-src.fif') # first lets test some input params assert_raises(ValueError, setup_source_space, 'sample', spacing='oct', add_dist=False) assert_raises(ValueError, setup_source_space, 'sample', spacing='octo', add_dist=False) assert_raises(ValueError, setup_source_space, 'sample', spacing='oct6e', add_dist=False) assert_raises(ValueError, setup_source_space, 'sample', spacing='7emm', add_dist=False) assert_raises(ValueError, setup_source_space, 'sample', spacing='alls', add_dist=False) assert_raises(IOError, setup_source_space, 'sample', spacing='oct6', subjects_dir=subjects_dir, add_dist=False) # ico 5 (fsaverage) - write to temp file src = read_source_spaces(fname_ico) temp_name = op.join(tempdir, 'temp-src.fif') with warnings.catch_warnings(record=True): # sklearn equiv neighbors warnings.simplefilter('always') src_new = setup_source_space('fsaverage', temp_name, spacing='ico5', subjects_dir=subjects_dir, add_dist=False, overwrite=True) _compare_source_spaces(src, src_new, mode='approx') assert_equal(repr(src), repr(src_new)) assert_equal(repr(src).count('surface ('), 2) assert_array_equal(src[0]['vertno'], np.arange(10242)) assert_array_equal(src[1]['vertno'], np.arange(10242)) # oct-6 (sample) - auto filename + IO src = read_source_spaces(fname) temp_name = op.join(tempdir, 'temp-src.fif') with warnings.catch_warnings(record=True): # sklearn equiv neighbors warnings.simplefilter('always') src_new = setup_source_space('sample', temp_name, spacing='oct6', subjects_dir=subjects_dir, overwrite=True, add_dist=False) _compare_source_spaces(src, src_new, mode='approx', nearest=False) src_new = read_source_spaces(temp_name) _compare_source_spaces(src, src_new, mode='approx', nearest=False) # all source points - no file writing src_new = setup_source_space('sample', None, spacing='all', subjects_dir=subjects_dir, add_dist=False) assert_true(src_new[0]['nuse'] == len(src_new[0]['rr'])) assert_true(src_new[1]['nuse'] == len(src_new[1]['rr'])) # dense source space to hit surf['inuse'] lines of _create_surf_spacing assert_raises(RuntimeError, setup_source_space, 'sample', None, spacing='ico6', subjects_dir=subjects_dir, add_dist=False)
def test_setup_source_space(): """Test setting up ico, oct, and all source spaces """ fname_all = op.join(data_path, 'subjects', 'sample', 'bem', 'sample-all-src.fif') fname_ico = op.join(data_path, 'subjects', 'fsaverage', 'bem', 'fsaverage-ico-5-src.fif') # first lets test some input params assert_raises(ValueError, setup_source_space, 'sample', spacing='oct') assert_raises(ValueError, setup_source_space, 'sample', spacing='octo') assert_raises(ValueError, setup_source_space, 'sample', spacing='oct6e') assert_raises(ValueError, setup_source_space, 'sample', spacing='7emm') assert_raises(ValueError, setup_source_space, 'sample', spacing='alls') assert_raises(IOError, setup_source_space, 'sample', spacing='oct6', subjects_dir=subjects_dir) # ico 5 (fsaverage) - write to temp file src = read_source_spaces(fname_ico) temp_name = op.join(tempdir, 'temp-src.fif') with warnings.catch_warnings(True): # sklearn equiv neighbors src_new = setup_source_space('fsaverage', temp_name, spacing='ico5', subjects_dir=subjects_dir) _compare_source_spaces(src, src_new, mode='approx') # oct-6 (sample) - auto filename + IO src = read_source_spaces(fname) temp_name = op.join(tempdir, 'temp-src.fif') with warnings.catch_warnings(True): # sklearn equiv neighbors src_new = setup_source_space('sample', temp_name, spacing='oct6', subjects_dir=subjects_dir, overwrite=True) _compare_source_spaces(src, src_new, mode='approx') src_new = read_source_spaces(temp_name) _compare_source_spaces(src, src_new, mode='approx') # all source points - no file writing src = read_source_spaces(fname_all) src_new = setup_source_space('sample', None, spacing='all', subjects_dir=subjects_dir) _compare_source_spaces(src, src_new, mode='approx')
def test_setup_source_space(): """Test setting up ico, oct, and all source spaces """ fname_all = op.join(data_path, 'subjects', 'sample', 'bem', 'sample-all-src.fif') fname_ico = op.join(data_path, 'subjects', 'fsaverage', 'bem', 'fsaverage-ico-5-src.fif') # first lets test some input params assert_raises(ValueError, setup_source_space, 'sample', spacing='oct', add_dist=False) assert_raises(ValueError, setup_source_space, 'sample', spacing='octo', add_dist=False) assert_raises(ValueError, setup_source_space, 'sample', spacing='oct6e', add_dist=False) assert_raises(ValueError, setup_source_space, 'sample', spacing='7emm', add_dist=False) assert_raises(ValueError, setup_source_space, 'sample', spacing='alls', add_dist=False) assert_raises(IOError, setup_source_space, 'sample', spacing='oct6', subjects_dir=subjects_dir, add_dist=False) # ico 5 (fsaverage) - write to temp file src = read_source_spaces(fname_ico) temp_name = op.join(tempdir, 'temp-src.fif') with warnings.catch_warnings(record=True): # sklearn equiv neighbors warnings.simplefilter('always') src_new = setup_source_space('fsaverage', temp_name, spacing='ico5', subjects_dir=subjects_dir, add_dist=False, overwrite=True) _compare_source_spaces(src, src_new, mode='approx') # oct-6 (sample) - auto filename + IO src = read_source_spaces(fname) temp_name = op.join(tempdir, 'temp-src.fif') with warnings.catch_warnings(record=True): # sklearn equiv neighbors warnings.simplefilter('always') src_new = setup_source_space('sample', temp_name, spacing='oct6', subjects_dir=subjects_dir, overwrite=True, add_dist=False) _compare_source_spaces(src, src_new, mode='approx') src_new = read_source_spaces(temp_name) _compare_source_spaces(src, src_new, mode='approx') # all source points - no file writing src = read_source_spaces(fname_all) src_new = setup_source_space('sample', None, spacing='all', subjects_dir=subjects_dir, add_dist=False) _compare_source_spaces(src, src_new, mode='approx')
def process_subject_source_space(subject): # make BEMs using watershed bem # NOTE: Use MNE version >= 20 or set overwrite=True! # mne.bem.make_watershed_bem(subject, # subjects_dir=subjects_dir, # show=False, # verbose=False, # overwrite=True) bem_surf_fname = op.join(subjects_dir, subject, 'bem', f'{subject}-ico{bem_ico}-bem.fif') bem_sol_fname = op.join(subjects_dir, subject, 'bem', f'{subject}-ico{bem_ico}-bem-sol.fif') src_fname = op.join(subjects_dir, subject, 'bem', f'{subject}-ico{bem_ico}-src.fif') # make BEM models # ico5 is for downsamping bem_surf = mne.make_bem_model( subject, ico=bem_ico, conductivity=[0.3], # for MEG data, 1 layer model is enough subjects_dir=subjects_dir) mne.write_bem_surfaces(bem_surf_fname, bem_surf) # make BEM solution bem_sol = mne.make_bem_solution(bem_surf) mne.write_bem_solution(bem_sol_fname, bem_sol) # Create the surface source space src = mne.setup_source_space(subject, spacing, subjects_dir=subjects_dir) mne.write_source_spaces(src_fname, src, overwrite=True)
def create_src_space(sbj_dir, sbj_id, spacing): """Create a source space.""" import os.path as op import mne bem_dir = op.join(sbj_dir, sbj_id, 'bem') # check if source space exists, if not it creates using mne-python fun # we have to create the cortical surface source space even when aseg is # True src_fname = op.join(bem_dir, '%s-%s-src.fif' % (sbj_id, spacing)) if not op.isfile(src_fname): src = mne.setup_source_space(sbj_id, subjects_dir=sbj_dir, spacing=spacing.replace('-', ''), add_dist=False, n_jobs=2) mne.write_source_spaces(src_fname, src, overwrite=True) print(('\n*** source space file %s written ***\n' % src_fname)) else: print(('\n*** source space file %s exists!!!\n' % src_fname)) src = mne.read_source_spaces(src_fname) return src
def _mne_source_space(subject, src_tag, subjects_dir): """Load mne source space Parameters ---------- subject : str Subejct src_tag : str Spacing (e.g., 'ico-4'). """ src_file = os.path.join(subjects_dir, subject, 'bem', '%s-%s-src.fif' % (subject, src_tag)) src, spacing = src_tag.split('-') if os.path.exists(src_file): return mne.read_source_spaces(src_file, False) elif src == 'ico': return mne.setup_source_space(subject, src_file, src + spacing, subjects_dir=subjects_dir, add_dist=True) elif src == 'vol': mri_file = os.path.join(subjects_dir, subject, 'mri', 'orig.mgz') bem_file = os.path.join(subjects_dir, subject, 'bem', 'sample-5120-5120-5120-bem-sol.fif') return mne.setup_volume_source_space(subject, src_file, float(spacing), mri=mri_file, bem=bem_file, mindist=0., exclude=0., subjects_dir=subjects_dir) else: raise ValueError("src_tag=%s" % repr(src_tag))
def test_setup_source_space_spacing(tmpdir, spacing): """Test setting up surface source spaces using a given spacing.""" copytree(op.join(subjects_dir, 'sample'), str(tmpdir.join('sample'))) args = [] if spacing == 7 else ['--spacing', str(spacing)] with modified_env(SUBJECTS_DIR=str(tmpdir), SUBJECT='sample'): run_subprocess(['mne_setup_source_space'] + args) src = read_source_spaces(tmpdir.join('sample', 'bem', 'sample-%d-src.fif' % spacing)) src_new = setup_source_space('sample', spacing=spacing, add_dist=False, subjects_dir=subjects_dir) _compare_source_spaces(src, src_new, mode='approx', nearest=True) # Degenerate conditions with pytest.raises(TypeError, match='spacing must be.*got.*float.*'): setup_source_space('sample', 7., subjects_dir=subjects_dir) with pytest.raises(ValueError, match='spacing must be >= 2, got 1'): setup_source_space('sample', 1, subjects_dir=subjects_dir)
def _compute_GGT(subject, kind): # compute source space src = mne.setup_source_space(subject, spacing='oct6', add_dist=False, subjects_dir=cfg.mne_camcan_freesurfer_path) trans = trans_map[subject] bem = cfg.mne_camcan_freesurfer_path + \ "/%s/bem/%s-meg-bem.fif" % (subject, subject) # compute handle MEG data fname = op.join( cfg.camcan_meg_raw_path, subject, kind, '%s_raw.fif' % kind) raw = mne.io.read_raw_fif(fname) mne.channels.fix_mag_coil_types(raw.info) event_length = 5. raw_length = raw.times[-1] events = mne.make_fixed_length_events( raw, duration=event_length, start=0, stop=raw_length - event_length) # Compute the forward and inverse info = mne.Epochs(raw, events=events, tmin=0, tmax=event_length, baseline=None, reject=None, preload=False, decim=10).info fwd = mne.make_forward_solution(info, trans, src, bem) leadfield = fwd['sol']['data'] return {'ggt': np.dot(leadfield, leadfield.T)}
def createBem(subj): src = mne.setup_source_space(subj, n_jobs=2) subprocess.call(['mne', 'watershed_bem', '-s', subj]) model = mne.make_bem_model(subj, conductivity=[0.3]) bem = mne.make_bem_solution(model) mne.write_bem_solution(subj + '-5120-5120-5120-bem-sol.fif', bem) mne.viz.plot_bem(subj)
def _mne_source_space(subject, src_tag, subjects_dir): """Load mne source space Parameters ---------- subject : str Subejct src_tag : str Spacing (e.g., 'ico-4'). """ src_file = os.path.join(subjects_dir, subject, "bem", "%s-%s-src.fif" % (subject, src_tag)) src, spacing = src_tag.split("-") if os.path.exists(src_file): return mne.read_source_spaces(src_file, False) elif src == "ico": return mne.setup_source_space(subject, src_file, src + spacing, subjects_dir=subjects_dir, add_dist=True) elif src == "vol": mri_file = os.path.join(subjects_dir, subject, "mri", "orig.mgz") bem_file = os.path.join(subjects_dir, subject, "bem", "sample-5120-5120-5120-bem-sol.fif") return mne.setup_volume_source_space( subject, src_file, float(spacing), mri=mri_file, bem=bem_file, mindist=0.0, exclude=0.0, subjects_dir=subjects_dir, ) else: raise ValueError("src_tag=%s" % repr(src_tag))
def create_src_space(sbj_dir, sbj_id, spacing, is_blind): import os.path as op import mne bem_dir = op.join(sbj_dir, sbj_id, 'bem') # check if source space exists, if not it creates using mne-python fun # we have to create the cortical surface source space even when aseg is # True if is_blind: # if is_blind we have to precomputed the source space sincw we had # to remove some labels src_fname = op.join(bem_dir, '%s-blind-%s-src.fif' % (sbj_id, spacing)) if not op.isfile(src_fname): raise '\n *** you have to compute the source space blind!!! ***\n' else: print '\n*** source space file %s exists!!!\n' % src_fname src = mne.read_source_spaces(src_fname) else: src_fname = op.join(bem_dir, '%s-%s-src.fif' % (sbj_id, spacing)) if not op.isfile(src_fname): src = mne.setup_source_space(sbj_id, subjects_dir=sbj_dir, fname=True, spacing=spacing.replace('-', ''), add_dist=False, overwrite=True, n_jobs=2) print '\n*** source space file %s written ***\n' % src_fname else: print '\n*** source space file %s exists!!!\n' % src_fname src = mne.read_source_spaces(src_fname) return src
def process_subject_bem(subject, subjects_dir='/cluster/transcend/MRI/WMA/recons', spacing='ico4'): try: bem_fname = op.join(subjects_dir,subject,'bem', '%s-src.fif' % subject) src_fname = op.join(subjects_dir, subject, 'bem', '%s-src.fif' % spacing) #headsurf_log = op.join(subjects_dir, subject, 'bem', subject + '_headsurf.log') if not os.path.isfile(bem_fname): mne.bem.make_watershed_bem(subject=subject, subjects_dir=subjects_dir, overwrite=True, volume='T1', atlas=True, gcaatlas=False, preflood=None) conductivity = (0.3,) model = mne.make_bem_model(subject=subject, ico=4, conductivity=conductivity, subjects_dir=subjects_dir) bem = mne.make_bem_solution(model) mne.write_bem_solution(bem_fname, bem=bem) if not os.path.isfile(src_fname): src = mne.setup_source_space(subject, spacing=spacing, subjects_dir=subjects_dir, add_dist=False) mne.write_source_spaces(src_fname, src=src, overwrite=True) except Exception as ee: error = str(ee) print(subject, error) pass
def _mne_source_space(subject, src_tag, subjects_dir): """Load mne source space""" src_file = os.path.join(subjects_dir, subject, 'bem', '%s-%s-src.fif' % (subject, src_tag)) src = src_tag[:3] if os.path.exists(src_file): return mne.read_source_spaces(src_file, False) elif src == 'ico': return mne.setup_source_space(subject, src_file, 'ico4', subjects_dir=subjects_dir, add_dist=True) elif src == 'vol': mri_file = os.path.join(subjects_dir, subject, 'mri', 'orig.mgz') bem_file = os.path.join(subjects_dir, subject, 'bem', 'sample-5120-5120-5120-bem-sol.fif') return mne.setup_volume_source_space(subject, src_file, pos=10., mri=mri_file, bem=bem_file, mindist=0., exclude=0., subjects_dir=subjects_dir) else: raise ValueError("src_tag=%s" % repr(src_tag))
def _mne_source_space(subject, src_tag, subjects_dir): """Load mne source space Parameters ---------- subject : str Subejct src_tag : str Spacing (e.g., 'ico-4'). """ src_file = os.path.join(subjects_dir, subject, 'bem', '%s-%s-src.fif' % (subject, src_tag)) src, spacing = src_tag.split('-') if os.path.exists(src_file): return mne.read_source_spaces(src_file, False) elif src == 'ico': ss = mne.setup_source_space(subject, spacing=src + spacing, subjects_dir=subjects_dir, add_dist=True) elif src == 'vol': mri_file = os.path.join(subjects_dir, subject, 'mri', 'orig.mgz') bem_file = os.path.join(subjects_dir, subject, 'bem', 'sample-5120-5120-5120-bem-sol.fif') ss = mne.setup_volume_source_space(subject, pos=float(spacing), mri=mri_file, bem=bem_file, mindist=0., exclude=0., subjects_dir=subjects_dir) else: raise ValueError("src_tag=%s" % repr(src_tag)) mne.write_source_spaces(src_file, ss) return ss
def setup_src_space(): if not os.path.exists('source_space/src_space.fif'): src = mne.setup_source_space(MNE_Repo_Mat.subject, spacing='oct6') src.save('source_space/src_space.fif') else: src = mne.read_source_spaces('source_space/src_space.fif') return src
def run_forward(subject_id): subject = "sub%03d" % subject_id print("processing subject: %s" % subject) data_path = op.join(meg_dir, subject) fname_ave = op.join(data_path, '%s-ave.fif' % subject) fname_fwd = op.join(data_path, '%s-meg-%s-fwd.fif' % (subject, spacing)) fname_trans = op.join(study_path, 'ds117', subject, 'MEG', '%s-trans.fif' % subject) src = mne.setup_source_space(subject, spacing=spacing, subjects_dir=subjects_dir, overwrite=True, n_jobs=1, add_dist=False) src_fname = op.join(subjects_dir, subject, '%s-src.fif' % spacing) mne.write_source_spaces(src_fname, src) bem_model = mne.make_bem_model(subject, ico=4, subjects_dir=subjects_dir, conductivity=(0.3,)) bem = mne.make_bem_solution(bem_model) info = mne.read_evokeds(fname_ave, condition=0).info fwd = mne.make_forward_solution(info, trans=fname_trans, src=src, bem=bem, fname=None, meg=True, eeg=False, mindist=mindist, n_jobs=1, overwrite=True) fwd = mne.convert_forward_solution(fwd, surf_ori=True) mne.write_forward_solution(fname_fwd, fwd, overwrite=True)
def compute_SourceSpace(subject, subjects_dir, src_fname, source_voxel_coords, plot=False, ss='volume', volume_spacing=10): src = None if ss == 'surface': src = mne.setup_source_space(subject, spacing='ico5', add_dist=None, subjects_dir=subjects_dir) src.save(src_fname, overwrite=True) if plot: mne.viz.plot_bem(subject=subject, subjects_dir=subjects_dir, src=src, orientation='coronal') elif ss == 'volume': surface = op.join(subjects_dir, subject, 'bem', 'inner_skull.surf') src = mne.setup_volume_source_space(subject, subjects_dir=subjects_dir, pos=volume_spacing, surface=surface, verbose=True) src.save(src_fname, overwrite=True) if plot: fig = mne.viz.plot_bem(subject=subject, subjects_dir=subjects_dir, brain_surfaces='white', src=src, orientation='coronal', show=True) plt.close() old_file_name = f'{subjects_dir}/{subject}/mne_files/coords.pkl' bashCommand = f'mv {old_file_name} {source_voxel_coords}' process = subprocess.Popen(bashCommand.split(), stdout=subprocess.PIPE) output, error = process.communicate() return src
def _mixed_morph_srcs(): # create a mixed source space labels_vol = ['Left-Cerebellum-Cortex', 'Right-Cerebellum-Cortex'] src = mne.setup_source_space('sample', spacing='oct3', add_dist=False, subjects_dir=subjects_dir) src += mne.setup_volume_source_space( 'sample', mri=fname_aseg, pos=10.0, volume_label=labels_vol, subjects_dir=subjects_dir, add_interpolator=True, verbose=True) # create the destination space src_fs = mne.read_source_spaces( op.join(subjects_dir, 'fsaverage', 'bem', 'fsaverage-ico-5-src.fif')) src_fs += mne.setup_volume_source_space( 'fsaverage', pos=7., volume_label=labels_vol, subjects_dir=subjects_dir, add_interpolator=False, verbose=True) del labels_vol with pytest.raises(ValueError, match='src_to must be provided .* mixed'): mne.compute_source_morph( src=src, subject_from='sample', subject_to='fsaverage', subjects_dir=subjects_dir) with pytest.warns(RuntimeWarning, match='not included in smoothing'): morph = mne.compute_source_morph( src=src, subject_from='sample', subject_to='fsaverage', subjects_dir=subjects_dir, niter_affine=[1, 0, 0], niter_sdr=[1, 0, 0], src_to=src_fs, smooth=5, verbose=True) return morph, src, src_fs
def run_strural(subject, bem_ico=4, spacing='ico5', n_jobs=4, subjects_dir='/cluster/transcend/MRI/WMA/recons'): mne.bem.make_watershed_bem(subject, subjects_dir=subjects_dir, overwrite=True) src_fname = op.join(subjects_dir, subject, '%s-pyimpress-src.fif' % spacing) if not os.path.isfile(src_fname): src = mne.setup_source_space(subject, spacing=spacing, subjects_dir=subjects_dir, overwrite=True, n_jobs=n_jobs, add_dist=True) mne.write_source_spaces(src_fname, src) else: src = mne.read_source_spaces(src_fname) bem_fname = op.join(subjects_dir, subject, '%s-pyimpress-bem.fif' % bem_ico) if not os.path.isfile(bem_fname): bem_model = mne.make_bem_model(subject, ico=bem_ico, subjects_dir=subjects_dir, conductivity=(0.3, )) bem = mne.make_bem_solution(bem_model) mne.write_bem_solution(bem_fname, bem) else: bem = mne.read_bem_solution(bem_fname) return src, bem, src_fname, bem_fname
def run_forward(subject): print("processing subject: %s" % subject) meg_subject_dir = op.join(config.meg_dir, subject) fname_ave = op.join(meg_subject_dir, '%s-ave.fif' % subject) fname_fwd = op.join(meg_subject_dir, '%s-%s-fwd.fif' % (subject, config.spacing)) fname_trans = op.join(meg_subject_dir, '%s_audvis_raw-trans.fif' % subject) src = mne.setup_source_space(subject, spacing=config.spacing, subjects_dir=config.subjects_dir, add_dist=False) evoked = mne.read_evokeds(fname_ave, condition=0) # Here we only use 1-layer BEM because the 3-layer is unreliable if 'eeg' in evoked: fname_bem = op.join(config.subjects_dir, subject, 'bem', '%s-5120-5120-5120-bem-sol.fif' % subject) else: fname_bem = op.join(config.subjects_dir, subject, 'bem', '%s-5120-bem-sol.fif' % subject) # Because we use a 1-layer BEM, we do MEG only fwd = mne.make_forward_solution(evoked.info, fname_trans, src, fname_bem, mindist=config.mindist) mne.write_forward_solution(fname_fwd, fwd, overwrite=True)
def src_computation( subject: str, subjects_dir: str, bem: mne.bem.ConductorModel, volume: Optional[bool] = False, _subject_tree: Optional[SubjectTree] = None, _priority: Optional[int] = None ) -> Union[mne.SourceSpaces, List[mne.SourceSpaces]]: """computes `source spaces`_ solution, uses :func:`nodestimation.project.read_or_write` decorator :param subject: patient`s ID :type subject: str :param subjects_dir: path to directory with patient`s files :type subjects_dir: str :param bem: bem_ solution to build SourceSpaces_ :type bem: |imne.bem.ConductorModel|_ :param volume: if True, computes `volume source spaces <https://mne.tools/stable/generated/mne.setup_volume_source_space.html?highlight=setup_volume_source_space#mne.setup_volume_source_space>`_, default False :type volume: bool, optional :param _subject_tree: representation of patient`s files structure, default None :type _subject_tree: *look for SubjectTree in* :mod:`nodestimation.project.annotations` *, optional* :param _priority: if several files are read, which one to choose, if None, read all of them, default None :type _priority: int, optional :return: `source spaces`_ solution :rtype: mne.SourceSpaces_ .. _imne.SourceSpaces: .. _mne.SourceSpaces: .. _SourceSpaces: .. _`source spaces`: https://mne.tools/stable/generated/mne.SourceSpaces.html#mne.SourceSpaces .. |imne.SourceSpaces| replace:: *mne.SourceSpaces* .. |imne.bem.ConductorModel| replace:: *mne.bem.ConductorModel* """ src = mne.setup_source_space(subject, spacing='ico5', add_dist='patch', subjects_dir=subjects_dir) if volume: labels_vol = [ 'Left-Amygdala', 'Left-Thalamus-Proper', 'Left-Cerebellum-Cortex', 'Brain-Stem', 'Right-Amygdala', 'Right-Thalamus-Proper', 'Right-Cerebellum-Cortex' ] fname_aseg = os.path.join(subjects_dir, subject, 'mri', 'aseg.mgz') vol_src = mne.setup_volume_source_space(subject, mri=fname_aseg, pos=10.0, bem=bem, add_interpolator=True, volume_label=labels_vol, subjects_dir=subjects_dir) return src + vol_src else: return src
def test_setup_source_space_spacing(tmpdir, spacing): """Test setting up surface source spaces using a given spacing.""" tempdir = str(tmpdir) copytree(op.join(subjects_dir, 'sample'), op.join(tempdir, 'sample')) args = [] if spacing == 7 else ['--spacing', str(spacing)] with modified_env(SUBJECTS_DIR=tempdir, SUBJECT='sample'): run_subprocess(['mne_setup_source_space'] + args) src = read_source_spaces(op.join(tempdir, 'sample', 'bem', 'sample-%d-src.fif' % spacing)) src_new = setup_source_space('sample', spacing=spacing, add_dist=False, subjects_dir=subjects_dir) _compare_source_spaces(src, src_new, mode='approx', nearest=True) # Degenerate conditions with pytest.raises(TypeError, match='spacing must be.*got.*float.*'): setup_source_space('sample', 7., subjects_dir=subjects_dir) with pytest.raises(ValueError, match='spacing must be >= 2, got 1'): setup_source_space('sample', 1, subjects_dir=subjects_dir)
def test_scale_mri(): """Test creating fsaverage and scaling it""" # create fsaverage tempdir = _TempDir() create_default_subject(subjects_dir=tempdir) is_mri = _is_mri_subject("fsaverage", tempdir) assert_true(is_mri, "Creating fsaverage failed") fid_path = os.path.join(tempdir, "fsaverage", "bem", "fsaverage-fiducials.fif") os.remove(fid_path) create_default_subject(update=True, subjects_dir=tempdir) assert_true(os.path.exists(fid_path), "Updating fsaverage") # remove redundant label files label_temp = os.path.join(tempdir, "fsaverage", "label", "*.label") label_paths = glob(label_temp) for label_path in label_paths[1:]: os.remove(label_path) # create source space path = os.path.join(tempdir, "fsaverage", "bem", "fsaverage-ico-0-src.fif") mne.setup_source_space("fsaverage", path, "ico0", overwrite=True, subjects_dir=tempdir, add_dist=False) # scale fsaverage os.environ["_MNE_FEW_SURFACES"] = "true" scale_mri("fsaverage", "flachkopf", [1, 0.2, 0.8], True, subjects_dir=tempdir) del os.environ["_MNE_FEW_SURFACES"] is_mri = _is_mri_subject("flachkopf", tempdir) assert_true(is_mri, "Scaling fsaverage failed") src_path = os.path.join(tempdir, "flachkopf", "bem", "flachkopf-ico-0-src.fif") assert_true(os.path.exists(src_path), "Source space was not scaled") scale_labels("flachkopf", subjects_dir=tempdir) # scale source space separately os.remove(src_path) scale_source_space("flachkopf", "ico-0", subjects_dir=tempdir) assert_true(os.path.exists(src_path), "Source space was not scaled") # add distances to source space src = mne.read_source_spaces(path) mne.add_source_space_distances(src) src.save(path) # scale with distances os.remove(src_path) scale_source_space("flachkopf", "ico-0", subjects_dir=tempdir)
def test_setup_source_space_spacing(tmp_path, spacing, monkeypatch): """Test setting up surface source spaces using a given spacing.""" copytree(op.join(subjects_dir, 'sample'), tmp_path / 'sample') args = [] if spacing == 7 else ['--spacing', str(spacing)] monkeypatch.setenv('SUBJECTS_DIR', str(tmp_path)) monkeypatch.setenv('SUBJECT', 'sample') run_subprocess(['mne_setup_source_space'] + args) src = read_source_spaces(tmp_path / 'sample' / 'bem' / ('sample-%d-src.fif' % spacing)) # No need to pass subjects_dir here because we've setenv'ed it src_new = setup_source_space('sample', spacing=spacing, add_dist=False) _compare_source_spaces(src, src_new, mode='approx', nearest=True) # Degenerate conditions with pytest.raises(TypeError, match='spacing must be.*got.*float.*'): setup_source_space('sample', 7.) with pytest.raises(ValueError, match='spacing must be >= 2, got 1'): setup_source_space('sample', 1)
def create_source_space(subject, fsMRI_dir): print('Subject ' + subject + ': create_source_space ======================') meg_subject_dir = op.join(config.meg_dir, subject) # Create source space src = mne.setup_source_space(subject, spacing=config.spacing, subjects_dir=fsMRI_dir) mne.write_source_spaces(op.join(meg_subject_dir, subject + '-oct6-src.fif'), src, overwrite=True)
def test_simulate_raw_bem(raw_data): """Test simulation of raw data with BEM.""" raw, src, stc, trans, sphere = raw_data src = setup_source_space('sample', 'oct1', subjects_dir=subjects_dir) for s in src: s['nuse'] = 3 s['vertno'] = src[1]['vertno'][:3] s['inuse'].fill(0) s['inuse'][s['vertno']] = 1 # use different / more complete STC here vertices = [s['vertno'] for s in src] stc = SourceEstimate(np.eye(sum(len(v) for v in vertices)), vertices, 0, 1. / raw.info['sfreq']) with pytest.deprecated_call(): raw_sim_sph = simulate_raw(raw, stc, trans, src, sphere, cov=None, verbose=True) with pytest.deprecated_call(): raw_sim_bem = simulate_raw(raw, stc, trans, src, bem_fname, cov=None, n_jobs=2) # some components (especially radial) might not match that well, # so just make sure that most components have high correlation assert_array_equal(raw_sim_sph.ch_names, raw_sim_bem.ch_names) picks = pick_types(raw.info, meg=True, eeg=True) n_ch = len(picks) corr = np.corrcoef(raw_sim_sph[picks][0], raw_sim_bem[picks][0]) assert_array_equal(corr.shape, (2 * n_ch, 2 * n_ch)) med_corr = np.median(np.diag(corr[:n_ch, -n_ch:])) assert med_corr > 0.65 # do some round-trip localization for s in src: transform_surface_to(s, 'head', trans) locs = np.concatenate([s['rr'][s['vertno']] for s in src]) tmax = (len(locs) - 1) / raw.info['sfreq'] cov = make_ad_hoc_cov(raw.info) # The tolerance for the BEM is surprisingly high (28) but I get the same # result when using MNE-C and Xfit, even when using a proper 5120 BEM :( for use_raw, bem, tol in ((raw_sim_sph, sphere, 2), (raw_sim_bem, bem_fname, 31)): events = find_events(use_raw, 'STI 014') assert len(locs) == 6 evoked = Epochs(use_raw, events, 1, 0, tmax, baseline=None).average() assert len(evoked.times) == len(locs) fits = fit_dipole(evoked, cov, bem, trans, min_dist=1.)[0].pos diffs = np.sqrt(np.sum((locs - fits)**2, axis=-1)) * 1000 med_diff = np.median(diffs) assert med_diff < tol, '%s: %s' % (bem, med_diff)
def run_forward(subject, session=None): deriv_path = config.get_subject_deriv_path(subject=subject, session=session, kind=config.get_kind()) bids_basename = BIDSPath(subject=subject, session=session, task=config.get_task(), acquisition=config.acq, run=None, recording=config.rec, space=config.space, prefix=deriv_path, check=False) fname_evoked = bids_basename.copy().update(kind='ave', extension='.fif') fname_trans = bids_basename.copy().update(kind='trans', extension='.fif') fname_fwd = bids_basename.copy().update(kind='fwd', extension='.fif') msg = f'Input: {fname_evoked}, Output: {fname_fwd}' logger.info( gen_log_message(message=msg, step=10, subject=subject, session=session)) # Find the raw data file trans = get_head_mri_trans(bids_basename=(bids_basename.copy().update( run=config.get_runs()[0], prefix=None)), bids_root=config.bids_root) mne.write_trans(fname_trans, trans) src = mne.setup_source_space(subject, spacing=config.spacing, subjects_dir=config.get_fs_subjects_dir(), add_dist=False) evoked = mne.read_evokeds(fname_evoked, condition=0) # Here we only use 3-layers BEM only if EEG is available. if 'eeg' in config.ch_types: model = mne.make_bem_model(subject, ico=4, conductivity=(0.3, 0.006, 0.3), subjects_dir=config.get_fs_subjects_dir()) else: model = mne.make_bem_model(subject, ico=4, conductivity=(0.3, ), subjects_dir=config.get_fs_subjects_dir()) bem = mne.make_bem_solution(model) fwd = mne.make_forward_solution(evoked.info, trans, src, bem, mindist=config.mindist) mne.write_forward_solution(fname_fwd, fwd, overwrite=True)
def run_forward(subject, session=None): deriv_path = config.get_subject_deriv_path(subject=subject, session=session, kind=config.get_kind()) bids_basename = make_bids_basename(subject=subject, session=session, task=config.get_task(), acquisition=config.acq, run=None, processing=config.proc, recording=config.rec, space=config.space) fname_evoked = op.join(deriv_path, bids_basename + '-ave.fif') fname_trans = op.join(deriv_path, 'sub-{}'.format(subject) + '-trans.fif') fname_fwd = op.join(deriv_path, bids_basename + '-fwd.fif') msg = f'Input: {fname_evoked}, Output: {fname_fwd}' logger.info(gen_log_message(message=msg, step=10, subject=subject, session=session)) # Find the raw data file # XXX : maybe simplify bids_basename = make_bids_basename(subject=subject, session=session, task=config.get_task(), acquisition=config.acq, run=config.get_runs()[0], processing=config.proc, recording=config.rec, space=config.space) trans = get_head_mri_trans(bids_basename=bids_basename, bids_root=config.bids_root) mne.write_trans(fname_trans, trans) src = mne.setup_source_space(subject, spacing=config.spacing, subjects_dir=config.get_fs_subjects_dir(), add_dist=False) evoked = mne.read_evokeds(fname_evoked, condition=0) # Here we only use 3-layers BEM only if EEG is available. if 'eeg' in config.ch_types: model = mne.make_bem_model(subject, ico=4, conductivity=(0.3, 0.006, 0.3), subjects_dir=config.get_fs_subjects_dir()) else: model = mne.make_bem_model(subject, ico=4, conductivity=(0.3,), subjects_dir=config.get_fs_subjects_dir()) bem = mne.make_bem_solution(model) fwd = mne.make_forward_solution(evoked.info, trans, src, bem, mindist=config.mindist) mne.write_forward_solution(fname_fwd, fwd, overwrite=True)
def get_source_space(subject): """Return source space. Mainly a helper function to provide caching of source space computation. """ return mne.setup_source_space(subject, spacing='oct6', subjects_dir=subjects_dir, add_dist=False)
def createSrc(subj): camcan_root = os.environ['CAMCAN_ROOT'] ico4_fname = camcan_root + 'processed/cc700/mri/pipeline/release004/BIDSsep/megraw/' + subj + '/meg/' + subj + '-ico4-src.fif' try: src = mne.read_source_spaces(ico4_fname) except IOError: src = mne.setup_source_space(subj, spacing='ico4') mne.write_source_spaces(ico4_fname, src) return src
def _get_bem_src_trans(p, info, subj, struc): subjects_dir = get_subjects_dir(p.subjects_dir, raise_error=True) assert isinstance(subjects_dir, str) if struc is None: # spherical case bem, src, trans = _spherical_conductor(info, subj, p.src_pos) bem_type = 'spherical-model' else: from mne.transforms import _ensure_trans trans = op.join(p.work_dir, subj, p.trans_dir, subj + '-trans.fif') if not op.isfile(trans): old = trans trans = op.join(p.work_dir, subj, p.trans_dir, subj + '-trans_head2mri.txt') if not op.isfile(trans): raise IOError('Unable to find head<->MRI trans files in:\n' '%s\n%s' % (old, trans)) trans = read_trans(trans) trans = _ensure_trans(trans, 'mri', 'head') this_src = _handle_dict(p.src, subj) assert isinstance(this_src, str) if this_src.startswith('oct'): kind = 'oct' elif this_src.startswith('vol'): kind = 'vol' else: raise RuntimeError('Unknown source space type %s, must be ' 'oct or vol' % (this_src, )) num = int(this_src.split(kind)[-1].split('-')[-1]) bem = op.join(subjects_dir, struc, 'bem', '%s-%s-bem-sol.fif' % (struc, p.bem_type)) for mid in ('', '-'): src_space_file = op.join( subjects_dir, struc, 'bem', '%s-%s%s%s-src.fif' % (struc, kind, mid, num)) if op.isfile(src_space_file): break else: # if neither exists, use last filename print(' Creating %s%s source space for %s...' % (kind, num, subj)) if kind == 'oct': src = setup_source_space(struc, spacing='%s%s' % (kind, num), subjects_dir=p.subjects_dir, n_jobs=p.n_jobs) else: assert kind == 'vol' src = setup_volume_source_space(struc, pos=num, bem=bem, subjects_dir=p.subjects_dir) write_source_spaces(src_space_file, src) src = read_source_spaces(src_space_file) bem = read_bem_solution(bem, verbose=False) bem_type = ('%s-layer BEM' % len(bem['surfs'])) return bem, src, trans, bem_type
def run_forward(subject, session=None): bids_path = BIDSPath(subject=subject, session=session, task=config.get_task(), acquisition=config.acq, run=None, recording=config.rec, space=config.space, extension='.fif', datatype=config.get_datatype(), root=config.deriv_root, check=False) fname_evoked = bids_path.copy().update(suffix='ave') fname_trans = bids_path.copy().update(suffix='trans') fname_fwd = bids_path.copy().update(suffix='fwd') msg = f'Input: {fname_evoked}, Output: {fname_fwd}' logger.info( gen_log_message(message=msg, step=10, subject=subject, session=session)) # Retrieve the head -> MRI transformation matrix from the MRI sidecar file # in the input data, and save it to an MNE "trans" file in the derivatives # folder. trans = get_head_mri_trans(bids_path.copy().update( run=config.get_runs()[0], root=config.bids_root)) mne.write_trans(fname_trans, trans) src = mne.setup_source_space(subject, spacing=config.spacing, subjects_dir=config.get_fs_subjects_dir(), add_dist=False) evoked = mne.read_evokeds(fname_evoked, condition=0) # Here we only use 3-layers BEM only if EEG is available. if 'eeg' in config.ch_types: model = mne.make_bem_model(subject, ico=4, conductivity=(0.3, 0.006, 0.3), subjects_dir=config.get_fs_subjects_dir()) else: model = mne.make_bem_model(subject, ico=4, conductivity=(0.3, ), subjects_dir=config.get_fs_subjects_dir()) bem = mne.make_bem_solution(model) fwd = mne.make_forward_solution(evoked.info, trans, src, bem, mindist=config.mindist) mne.write_forward_solution(fname_fwd, fwd, overwrite=True)
def make_forward_solution(experiment, subject, spacing, process_slug=DEFAULT_PROC): struct = sub_to_struct[experiment][subject] if struct == 'NA': raise IOError( 'Freesurfer Reconstruction has not yet been done for this subject. See the Freesurfer Recommended Reconstruction page.') trans_fname = TRANS_FNAME.format(experiment=experiment, subject=subject, struct=struct) if not os.path.isfile(trans_fname): raise IOError( 'Coregistration has not yet been done for this subject. Use mne_analyze on big-brain and follow MNE handbook chapter 7.') trans = mne.read_trans(trans_fname) fwd_path = FWD_PATH.format(experiment=experiment, subject=subject) if not os.path.exists(fwd_path): try: os.mkdir(fwd_path) except: os.mkdir(FWD_PATH.format(experiment=experiment, subject='')) os.mkdir(fwd_path) fwd_fname = FWD_FNAME.format(fwd_path=fwd_path, subject=subject, experiment=experiment, process_slug=process_slug, struct=struct, spacing=spacing) raw = mne.io.Raw(PROC_FNAME.format(experiment=experiment, subject=subject, process_slug=process_slug)) bem_path = [fn for fn in os.listdir(BEM_PATH.format(struct=struct)) if fnmatch.fnmatch(fn, '*-bem-sol.fif')] if len(bem_path) == 0: raise IOError('BEM has not yet been done for this subject. See MNE_pipeline_2018.sh') bem_fname = pjoin(BEM_PATH.format(struct=struct), bem_path[0]) src_file = SRC_FNAME.format(struct=struct, spacing=spacing) mne.set_config('SUBJECTS_DIR', SUBJ_DIR) # Not sure how to make sure this runs effectively if os.path.isfile(src_file): src = mne.read_source_spaces(src_file) else: src = mne.setup_source_space(struct, spacing='oct6') mne.write_source_spaces(src_file, src) fwd = mne.make_forward_solution(raw.info, trans, src=src, bem=bem_fname) mne.write_forward_solution(fwd_fname, fwd) return fwd
def get_surface(spacing, subjects_dir): print('Computing source space ...') src = mne.setup_source_space(subject="fsaverage", spacing=spacing, subjects_dir=subjects_dir, add_dist=False, verbose=False) tris = src[0]["use_tris"] vertno = src[0]["vertno"] points = src[0]["rr"][vertno] return points, tris
def test_forward_mixed_source_space(): """Test making the forward solution for a mixed source space """ # get bem file fname_bem = op.join(subjects_dir, 'sample', 'bem', 'sample-5120-5120-5120-bem-sol.fif') # get the aseg file fname_aseg = op.join(subjects_dir, 'sample', 'mri', 'aseg.mgz') # get the surface source space surf = setup_source_space('sample', fname=None, spacing='ico2') # setup two volume source spaces label_names = get_volume_labels_from_aseg(fname_aseg) vol_labels = [label_names[int(np.random.rand() * len(label_names))] for _ in range(2)] vol1 = setup_volume_source_space('sample', fname=None, pos=20., mri=fname_aseg, volume_label=vol_labels[0]) vol2 = setup_volume_source_space('sample', fname=None, pos=20., mri=fname_aseg, volume_label=vol_labels[1]) # merge surfaces and volume src = surf + vol1 + vol2 # calculate forward solution fwd = make_forward_solution(fname_raw, mri=fname_mri, src=src, bem=fname_bem, fname=None) # extract source spaces src_from_fwd = fwd['src'] # get the coordinate frame of each source space coord_frames = np.array([s['coord_frame'] for s in src_from_fwd]) # assert that all source spaces are in head coordinates assert_true((coord_frames == FIFF.FIFFV_COORD_HEAD).all()) # run tests for SourceSpaces.export_volume fname_img = op.join(temp_dir, 'temp-image.mgz') # head coordinates and mri_resolution, but trans file assert_raises(ValueError, src_from_fwd.export_volume, fname_img, mri_resolution=True, trans=None) # head coordinates and mri_resolution, but wrong trans file vox_mri_t = vol1[0]['vox_mri_t'] assert_raises(RuntimeError, src_from_fwd.export_volume, fname_img, mri_resolution=True, trans=vox_mri_t)
def test_simulate_raw_bem(raw_data): """Test simulation of raw data with BEM.""" raw, src, stc, trans, sphere = raw_data src = setup_source_space('sample', 'oct1', subjects_dir=subjects_dir) for s in src: s['nuse'] = 3 s['vertno'] = src[1]['vertno'][:3] s['inuse'].fill(0) s['inuse'][s['vertno']] = 1 # use different / more complete STC here vertices = [s['vertno'] for s in src] stc = SourceEstimate(np.eye(sum(len(v) for v in vertices)), vertices, 0, 1. / raw.info['sfreq']) with pytest.deprecated_call(): raw_sim_sph = simulate_raw(raw, stc, trans, src, sphere, cov=None, verbose=True) with pytest.deprecated_call(): raw_sim_bem = simulate_raw(raw, stc, trans, src, bem_fname, cov=None, n_jobs=2) # some components (especially radial) might not match that well, # so just make sure that most components have high correlation assert_array_equal(raw_sim_sph.ch_names, raw_sim_bem.ch_names) picks = pick_types(raw.info, meg=True, eeg=True) n_ch = len(picks) corr = np.corrcoef(raw_sim_sph[picks][0], raw_sim_bem[picks][0]) assert_array_equal(corr.shape, (2 * n_ch, 2 * n_ch)) med_corr = np.median(np.diag(corr[:n_ch, -n_ch:])) assert med_corr > 0.65 # do some round-trip localization for s in src: transform_surface_to(s, 'head', trans) locs = np.concatenate([s['rr'][s['vertno']] for s in src]) tmax = (len(locs) - 1) / raw.info['sfreq'] cov = make_ad_hoc_cov(raw.info) # The tolerance for the BEM is surprisingly high (28) but I get the same # result when using MNE-C and Xfit, even when using a proper 5120 BEM :( for use_raw, bem, tol in ((raw_sim_sph, sphere, 2), (raw_sim_bem, bem_fname, 31)): events = find_events(use_raw, 'STI 014') assert len(locs) == 6 evoked = Epochs(use_raw, events, 1, 0, tmax, baseline=None).average() assert len(evoked.times) == len(locs) fits = fit_dipole(evoked, cov, bem, trans, min_dist=1.)[0].pos diffs = np.sqrt(np.sum((locs - fits) ** 2, axis=-1)) * 1000 med_diff = np.median(diffs) assert med_diff < tol, '%s: %s' % (bem, med_diff)
def test_make_forward_solution_compensation(): """Test making forward solution from python with compensation """ fname_ctf_raw = op.join(op.dirname(__file__), '..', '..', 'fiff', 'tests', 'data', 'test_ctf_comp_raw.fif') fname_bem = op.join(subjects_dir, 'sample', 'bem', 'sample-5120-bem-sol.fif') fname_src = op.join(temp_dir, 'oct2-src.fif') src = setup_source_space('sample', fname_src, 'oct2', subjects_dir=subjects_dir) fwd_py = make_forward_solution(fname_ctf_raw, mindist=0.0, src=src, eeg=False, meg=True, bem=fname_bem, mri=fname_mri) fwd = do_forward_solution('sample', fname_ctf_raw, src=fname_src, mindist=0.0, bem=fname_bem, mri=fname_mri, eeg=False, meg=True, subjects_dir=subjects_dir) _compare_forwards(fwd, fwd_py, 274, 108)
def create_src_space(sbj_dir, sbj_id, spacing): import os.path as op import mne bem_dir = op.join(sbj_dir, sbj_id, 'bem') # check if source space exists, if not it creates using mne-python fun # we have to create the cortical surface source space even when aseg is # True src_fname = op.join(bem_dir, '%s-%s-src.fif' % (sbj_id, spacing)) if not op.isfile(src_fname): src = mne.setup_source_space(sbj_id, subjects_dir=sbj_dir, fname=True, spacing=spacing.replace('-', ''), add_dist=False, overwrite=True, n_jobs=2) print '*** source space file %s written ***' % src_fname else: print '*** source space file %s exists!!!' % src_fname src = mne.read_source_spaces(src_fname) return src
def test_source_estimate(): "Test SourceSpace dimension" ds = datasets.get_mne_sample(src='ico') dsa = ds.aggregate('side') # test auto-conversion asndvar('epochs', ds=ds) asndvar('epochs', ds=dsa) asndvar(dsa['epochs'][0]) # source space clustering res = testnd.ttest_ind('src', 'side', ds=ds, samples=0, pmin=0.05, tstart=0.05, mintime=0.02, minsource=10) assert_equal(res.clusters.n_cases, 52) # test morphing dsa = ds.aggregate('side') ndvar = dsa['src'] stc = mne.SourceEstimate(ndvar.x[0], ndvar.source.vertno, ndvar.time.tmin, ndvar.time.tstep, ndvar.source.subject) subjects_dir = ndvar.source.subjects_dir path = ndvar.source._src_pattern.format(subject='fsaverage', src=ndvar.source.src, subjects_dir=subjects_dir) if os.path.exists(path): src_to = mne.read_source_spaces(path) else: src_to = mne.setup_source_space('fsaverage', path, 'ico4', subjects_dir=subjects_dir) vertices_to = [src_to[0]['vertno'], src_to[1]['vertno']] mm = mne.compute_morph_matrix('sample', 'fsaverage', ndvar.source.vertno, vertices_to, None, subjects_dir) stc_to = mne.morph_data_precomputed('sample', 'fsaverage', stc, vertices_to, mm) ndvar_m = morph_source_space(ndvar, 'fsaverage') assert_array_equal(ndvar_m.x[0], stc_to.data)
############################################################################### # Set up our source space. # List substructures we are interested in. We select only the # sub structures we want to include in the source space labels_vol = ['Left-Amygdala', 'Left-Thalamus-Proper', 'Left-Cerebellum-Cortex', 'Brain-Stem', 'Right-Amygdala', 'Right-Thalamus-Proper', 'Right-Cerebellum-Cortex'] # Get a surface-based source space, here with few source points for speed # in this demonstration, in general you should use oct6 spacing! src = mne.setup_source_space(subject, spacing='oct5', add_dist=False, subjects_dir=subjects_dir) # Now we create a mixed src space by adding the volume regions specified in the # list labels_vol. First, read the aseg file and the source space bounds # using the inner skull surface (here using 10mm spacing to save time, # we recommend something smaller like 5.0 in actual analyses): vol_src = mne.setup_volume_source_space( subject, mri=fname_aseg, pos=10.0, bem=fname_model, volume_label=labels_vol, subjects_dir=subjects_dir, add_interpolator=False, # just for speed, usually this should be True verbose=True) # Generate the mixed source space src += vol_src
trans = mne.read_trans(trans_fname) ############################################################################### # To save time and memory, the forward solution is read from a file. Set # ``use_precomputed=False`` in the beginning of this script to build the # forward solution from scratch. The head surfaces for constructing a BEM # solution are read from a file. Since the data only contains MEG channels, we # only need the inner skull surface for making the forward solution. For more # information: :ref:`CHDBBCEJ`, :func:`mne.setup_source_space`, # :ref:`create_bem_model`, :func:`mne.bem.make_watershed_bem`. if use_precomputed: fwd_fname = op.join(data_path, 'MEG', 'bst_auditory', 'bst_auditory-meg-oct-6-fwd.fif') fwd = mne.read_forward_solution(fwd_fname) else: src = mne.setup_source_space(subject, spacing='ico4', subjects_dir=subjects_dir, overwrite=True) model = mne.make_bem_model(subject=subject, ico=4, conductivity=[0.3], subjects_dir=subjects_dir) bem = mne.make_bem_solution(model) fwd = mne.make_forward_solution(evoked_std.info, trans=trans, src=src, bem=bem) inv = mne.minimum_norm.make_inverse_operator(evoked_std.info, fwd, cov) snr = 3.0 lambda2 = 1.0 / snr ** 2 del fwd ############################################################################### # The sources are computed using dSPM method and plotted on an inflated brain # surface. For interactive controls over the image, use keyword # ``time_viewer=True``.
def test_scale_mri_xfm(): """Test scale_mri transforms and MRI scaling.""" # scale fsaverage tempdir = _TempDir() os.environ['_MNE_FEW_SURFACES'] = 'true' fake_home = testing.data_path() # add fsaverage create_default_subject(subjects_dir=tempdir, fs_home=fake_home, verbose=True) # add sample (with few files) sample_dir = op.join(tempdir, 'sample') os.mkdir(sample_dir) os.mkdir(op.join(sample_dir, 'bem')) for dirname in ('mri', 'surf'): copytree(op.join(fake_home, 'subjects', 'sample', dirname), op.join(sample_dir, dirname)) subject_to = 'flachkopf' spacing = 'oct2' for subject_from in ('fsaverage', 'sample'): if subject_from == 'fsaverage': scale = 1. # single dim else: scale = [0.9, 2, .8] # separate src_from_fname = op.join(tempdir, subject_from, 'bem', '%s-%s-src.fif' % (subject_from, spacing)) src_from = mne.setup_source_space( subject_from, spacing, subjects_dir=tempdir, add_dist=False) write_source_spaces(src_from_fname, src_from) print(src_from_fname) vertices_from = np.concatenate([s['vertno'] for s in src_from]) assert len(vertices_from) == 36 hemis = ([0] * len(src_from[0]['vertno']) + [1] * len(src_from[0]['vertno'])) mni_from = mne.vertex_to_mni(vertices_from, hemis, subject_from, subjects_dir=tempdir) if subject_from == 'fsaverage': # identity transform source_rr = np.concatenate([s['rr'][s['vertno']] for s in src_from]) * 1e3 assert_allclose(mni_from, source_rr) if subject_from == 'fsaverage': overwrite = skip_fiducials = False else: with pytest.raises(IOError, match='No fiducials file'): scale_mri(subject_from, subject_to, scale, subjects_dir=tempdir) skip_fiducials = True with pytest.raises(IOError, match='already exists'): scale_mri(subject_from, subject_to, scale, subjects_dir=tempdir, skip_fiducials=skip_fiducials) overwrite = True scale_mri(subject_from, subject_to, scale, subjects_dir=tempdir, verbose='debug', overwrite=overwrite, skip_fiducials=skip_fiducials) if subject_from == 'fsaverage': assert _is_mri_subject(subject_to, tempdir), "Scaling failed" src_to_fname = op.join(tempdir, subject_to, 'bem', '%s-%s-src.fif' % (subject_to, spacing)) assert op.exists(src_to_fname), "Source space was not scaled" # Check MRI scaling fname_mri = op.join(tempdir, subject_to, 'mri', 'T1.mgz') assert op.exists(fname_mri), "MRI was not scaled" # Check MNI transform src = mne.read_source_spaces(src_to_fname) vertices = np.concatenate([s['vertno'] for s in src]) assert_array_equal(vertices, vertices_from) mni = mne.vertex_to_mni(vertices, hemis, subject_to, subjects_dir=tempdir) assert_allclose(mni, mni_from, atol=1e-3) # 0.001 mm del os.environ['_MNE_FEW_SURFACES']
def test_scale_mri(): """Test creating fsaverage and scaling it.""" # create fsaverage using the testing "fsaverage" instead of the FreeSurfer # one tempdir = _TempDir() fake_home = testing.data_path() create_default_subject(subjects_dir=tempdir, fs_home=fake_home, verbose=True) assert _is_mri_subject('fsaverage', tempdir), "Creating fsaverage failed" fid_path = op.join(tempdir, 'fsaverage', 'bem', 'fsaverage-fiducials.fif') os.remove(fid_path) create_default_subject(update=True, subjects_dir=tempdir, fs_home=fake_home) assert op.exists(fid_path), "Updating fsaverage" # copy MRI file from sample data (shouldn't matter that it's incorrect, # so here choose a small one) path_from = op.join(testing.data_path(), 'subjects', 'sample', 'mri', 'T1.mgz') path_to = op.join(tempdir, 'fsaverage', 'mri', 'orig.mgz') copyfile(path_from, path_to) # remove redundant label files label_temp = op.join(tempdir, 'fsaverage', 'label', '*.label') label_paths = glob(label_temp) for label_path in label_paths[1:]: os.remove(label_path) # create source space print('Creating surface source space') path = op.join(tempdir, 'fsaverage', 'bem', 'fsaverage-%s-src.fif') src = mne.setup_source_space('fsaverage', 'ico0', subjects_dir=tempdir, add_dist=False) mri = op.join(tempdir, 'fsaverage', 'mri', 'orig.mgz') print('Creating volume source space') vsrc = mne.setup_volume_source_space( 'fsaverage', pos=50, mri=mri, subjects_dir=tempdir, add_interpolator=False) write_source_spaces(path % 'vol-50', vsrc) # scale fsaverage for scale in (.9, [1, .2, .8]): write_source_spaces(path % 'ico-0', src, overwrite=True) os.environ['_MNE_FEW_SURFACES'] = 'true' with pytest.warns(None): # sometimes missing nibabel scale_mri('fsaverage', 'flachkopf', scale, True, subjects_dir=tempdir, verbose='debug') del os.environ['_MNE_FEW_SURFACES'] assert _is_mri_subject('flachkopf', tempdir), "Scaling failed" spath = op.join(tempdir, 'flachkopf', 'bem', 'flachkopf-%s-src.fif') assert op.exists(spath % 'ico-0'), "Source space ico-0 was not scaled" assert os.path.isfile(os.path.join(tempdir, 'flachkopf', 'surf', 'lh.sphere.reg')) vsrc_s = mne.read_source_spaces(spath % 'vol-50') pt = np.array([0.12, 0.41, -0.22]) assert_array_almost_equal( apply_trans(vsrc_s[0]['src_mri_t'], pt * np.array(scale)), apply_trans(vsrc[0]['src_mri_t'], pt)) scale_labels('flachkopf', subjects_dir=tempdir) # add distances to source space after hacking the properties to make # it run *much* faster src_dist = src.copy() for s in src_dist: s.update(rr=s['rr'][s['vertno']], nn=s['nn'][s['vertno']], tris=s['use_tris']) s.update(np=len(s['rr']), ntri=len(s['tris']), vertno=np.arange(len(s['rr'])), inuse=np.ones(len(s['rr']), int)) mne.add_source_space_distances(src_dist) write_source_spaces(path % 'ico-0', src_dist, overwrite=True) # scale with distances os.remove(spath % 'ico-0') scale_source_space('flachkopf', 'ico-0', subjects_dir=tempdir) ssrc = mne.read_source_spaces(spath % 'ico-0') assert ssrc[0]['dist'] is not None
def mne_anatomy(subject, subjects_dir, overwrite=False): import warnings # Checks that watershed hasn't already been run for fname in ['fiducials.fif', 'head.fif', 'head-dense.fif', 'head-medium.fif', 'head-sparse.fif', 'inner_skull.surf', 'oct-6-src.fif']: fname = op.join(subjects_dir, subject, 'bem', subject + '-' + fname) if (not overwrite) and op.exists(fname): raise IOError('%s already exists. Set overwrite=True.' % fname) return # Create BEM surfaces make_watershed_bem(subject=subject, subjects_dir=subjects_dir, overwrite=True, volume='T1', atlas=False, gcaatlas=False, preflood=None) # Copy files outside watershed folder in case of bad manipulation for surface in ['inner_skull', 'outer_skull', 'outer_skin']: from_file = op.join(subjects_dir, subject, 'bem', 'watershed/%s_%s_surface' % (subject, surface)) to_file = op.join(subjects_dir, subject, 'bem', '%s.surf' % surface) if op.exists(to_file): os.remove(to_file) # Update file try: os.symlink(from_file, to_file) except OSError as e: # if disk is not NTFS, symoblic link isn't possible if e.strerror == 'Operation not permitted': from shutil import copyfile copyfile(from_file, to_file) # Make scalp surfaces make_scalp_surfaces(subjects_dir, subject, force='store_true', overwrite='store_true', verbose=None) # Setup source space src_fname = op.join(subjects_dir, subject, 'bem', subject + 'oct-6-src.fif') if not op.isfile(src_fname): from mne import setup_source_space setup_source_space(subject, subjects_dir=subjects_dir, fname=src_fname, spacing='oct6', surface='white', overwrite=True, add_dist=True, n_jobs=-1, verbose=None) # Prepare BEM model bem_fname = op.join(subjects_dir, subject, 'bem', subject + '-5120-bem.fif') bem_sol_fname = op.join(subjects_dir, subject, 'bem', subject + '-5120-bem-sol.fif') if not op.exists(bem_sol_fname): from mne.bem import (make_bem_model, write_bem_surfaces, make_bem_solution, write_bem_solution) surfs = make_bem_model(subject, subjects_dir=subjects_dir) write_bem_surfaces(fname=bem_fname, surfs=surfs) bem = make_bem_solution(surfs) write_bem_solution(fname=bem_sol_fname, bem=bem) # Make morphs to fsaverage if has it try: read_morph_map(subject, 'fsaverage', subjects_dir=subjects_dir) except IOError as e: if 'No such file or directory' in e.strerror: warnings.warn(e.strerror)
head_mri_t = mne.read_trans( op.join(recordings_path, subject, '{}-head_mri-trans.fif'.format( subject))) ############################################################################## # Now we can setup our source model. # Note that spacing has to be set to 'all' since no common MNE resampling # scheme has been employed in the HCP pipelines. # Since this will take very long time to compute and at this point no other # decimation scheme is available inside MNE, we will compute the source # space on fsaverage, the freesurfer average brain, and morph it onto # the subject's native space. With `oct6` we have ~8000 dipole locations. src_fsaverage = mne.setup_source_space( subject='fsaverage', subjects_dir=subjects_dir, add_dist=False, spacing='oct6', overwrite=True) # now we morph it onto the subject. src_subject = mne.morph_source_spaces( src_fsaverage, subject, subjects_dir=subjects_dir) ############################################################################## # For the same reason `ico` has to be set to `None` when computing the bem. # The headshape is not computed with MNE and has a none standard configuration. bems = mne.make_bem_model(subject, conductivity=(0.3,), subjects_dir=subjects_dir, ico=None) # ico = None for morphed SP. bem_sol = mne.make_bem_solution(bems)
def compute_LF_matrix(sbj_id, sbj_dir, raw_info, aseg, spacing, labels): import os.path as op import mne from mne.bem import make_watershed_bem from mne.report import Report from nipype.utils.filemanip import split_filename as split_f from neuropype_ephy.compute_fwd_problem import create_mixed_source_space report = Report() bem_dir = op.join(sbj_dir, sbj_id, 'bem') surf_name = 'inner_skull.surf' sbj_inner_skull_fname = op.join(bem_dir, sbj_id + '-' + surf_name) inner_skull_fname = op.join(bem_dir, surf_name) data_path, raw_fname, ext = split_f(raw_info['filename']) if aseg: fwd_filename = op.join(data_path, '%s-%s-aseg-fwd.fif' % (raw_fname, spacing)) else: fwd_filename = op.join(data_path, '%s-%s-fwd.fif' % (raw_fname, spacing)) # check if we have just created the fwd matrix if not op.isfile(fwd_filename): # check if bem-sol was created, if not creates the bem sol using C MNE bem_fname = op.join(bem_dir, '%s-5120-bem-sol.fif' % sbj_id) model_fname = op.join(bem_dir, '%s-5120-bem.fif' % sbj_id) if not op.isfile(bem_fname): # chek if inner_skull surf exists, if not BEM computation is # performed by MNE python functions mne.bem.make_watershed_bem if not (op.isfile(sbj_inner_skull_fname) or op.isfile(inner_skull_fname)): print sbj_inner_skull_fname + '---> FILE NOT FOUND!!! ---> BEM is computed' make_watershed_bem(sbj_id, sbj_dir, overwrite=True) else: print '*** inner skull surface exists!!!' # Create a BEM model for a subject surfaces = mne.make_bem_model(sbj_id, ico=4, conductivity=[0.3], subjects_dir=sbj_dir) # Write BEM surfaces to a fiff file mne.write_bem_surfaces(model_fname, surfaces) # Create a BEM solution using the linear collocation approach bem = mne.make_bem_solution(surfaces) mne.write_bem_solution(bem_fname, bem) print '*** BEM solution file %s written ***' % bem_fname # add BEM figures to a Report report.add_bem_to_section(subject=sbj_id, subjects_dir=sbj_dir) report_filename = op.join(bem_dir, "BEM_report.html") print report_filename report.save(report_filename, open_browser=False, overwrite=True) else: bem = bem_fname print '*** BEM solution file %s exists!!!' % bem_fname # check if source space exists, if not it creates using mne-python fun # we have to create the cortical surface source space even when aseg is # True src_fname = op.join(bem_dir, '%s-%s-src.fif' % (sbj_id, spacing)) if not op.isfile(src_fname): src = mne.setup_source_space(sbj_id, subjects_dir=sbj_dir, fname=True, spacing=spacing.replace('-', ''), add_dist=False, overwrite=True, n_jobs=2) print '*** source space file %s written ***' % src_fname else: print '*** source space file %s exists!!!' % src_fname src = mne.read_source_spaces(src_fname) if aseg: src = create_mixed_source_space(sbj_dir, sbj_id, spacing, labels, src) n = sum(src[i]['nuse'] for i in range(len(src))) print('il src space contiene %d spaces e %d vertici' % (len(src), n)) # check if the co-registration file was created # if not raise an runtime error trans_fname = op.join(data_path, '%s-trans.fif' % raw_fname) if not op.isfile(trans_fname): raise RuntimeError('coregistration file %s NOT found!!!' % trans_fname) # if all is ok creates the fwd matrix mne.make_forward_solution(raw_info, trans_fname, src, bem, fwd_filename, mindist=5.0, # ignore sources <= 0mm from inner skull meg=True, eeg=False, n_jobs=2, overwrite=True) else: print '*** FWD file %s exists!!!' % fwd_filename return fwd_filename
raw = raw.crop(0, 150.).load_data() picks = mne.pick_types(raw.info, meg=True, exclude='bads') raw.filter(1, 20., n_jobs=1, fir_design='firwin') events = mne.find_events(raw, stim_channel='UPPT001') event_ids = {"faces": 1, "scrambled": 2} tmin, tmax = -0.2, 0.5 baseline = None # no baseline as high-pass is applied reject = dict(mag=3e-12) # Make source space trans = data_path + '/MEG/spm/SPM_CTF_MEG_example_faces1_3D_raw-trans.fif' src = mne.setup_source_space('spm', spacing='oct6', subjects_dir=subjects_dir, add_dist=False) bem = data_path + '/subjects/spm/bem/spm-5120-5120-5120-bem-sol.fif' forward = mne.make_forward_solution(raw.info, trans, src, bem) del src # inverse parameters conditions = 'faces', 'scrambled' snr = 3.0 lambda2 = 1.0 / snr ** 2 method = 'dSPM' clim = dict(kind='value', lims=[0, 2.5, 5]) ############################################################################### # Estimate covariances samples_epochs = 5, 15,
def get_mne_sample(tmin=-0.1, tmax=0.4, baseline=(None, 0), sns=False, src=None, sub="modality=='A'", fixed=False, snr=2, method='dSPM'): """Load events and epochs from the MNE sample data Parameters ---------- tmin, tmax baseline : Epoch parameters. sns : bool Add sensor space data as NDVar as ``ds['sns']``. src : None | 'ico' | 'vol' Add source space data as NDVar as ``ds['src']``. sub : str | None Expresion for subset of events to load. fixed : bool MNE inverse parameter. snr : scalar MNE inverse parameter. method : str MNE inverse parameter. Returns ------- ds : Dataset Dataset with epochs from the MNE sample dataset. """ data_dir = mne.datasets.sample.data_path() meg_dir = os.path.join(data_dir, 'MEG', 'sample') raw_file = os.path.join(meg_dir, 'sample_audvis_filt-0-40_raw.fif') subjects_dir = os.path.join(data_dir, 'subjects') subject = 'sample' label_path = os.path.join(subjects_dir, subject, 'label', '%s.label') ds = load.fiff.events(raw_file, stim_channel='STI 014') ds.info['subjects_dir'] = subjects_dir ds.info['subject'] = subject ds.info['label'] = label_path # get the trigger variable form the dataset for eaier access trigger = ds['trigger'] # use trigger to add various labels to the dataset ds['condition'] = Factor(trigger, labels={1:'LA', 2:'RA', 3:'LV', 4:'RV', 5:'smiley', 32:'button'}) ds['side'] = Factor(trigger, labels={1: 'L', 2:'R', 3:'L', 4:'R', 5:'None', 32:'None'}) ds['modality'] = Factor(trigger, labels={1: 'A', 2:'A', 3:'V', 4:'V', 5:'None', 32:'None'}) if sub: ds = ds.sub(sub) load.fiff.add_mne_epochs(ds, tmin, tmax, baseline) if sns: ds['sns'] = load.fiff.epochs_ndvar(ds['epochs']) if not src: return ds bem_dir = os.path.join(subjects_dir, subject, 'bem') bem_file = os.path.join(bem_dir, 'sample-5120-5120-5120-bem-sol.fif') trans_file = os.path.join(meg_dir, 'sample_audvis_raw-trans.fif') epochs = ds['epochs'] if src == 'ico': src_tag = 'ico-4' elif src == 'vol': src_tag = 'vol-10' else: raise ValueError("src = %r" % src) fwd_file = os.path.join(meg_dir, 'sample-%s-fwd.fif' % src_tag) if os.path.exists(fwd_file): fwd = mne.read_forward_solution(fwd_file) else: src_file = os.path.join(bem_dir, 'sample-%s-src.fif' % src_tag) if os.path.exists(src_file): src_ = src_file elif src == 'ico': src_ = mne.setup_source_space(subject, src_file, 'ico4', subjects_dir=subjects_dir) elif src == 'vol': mri_file = os.path.join(subjects_dir, subject, 'mri', 'orig.mgz') src_ = mne.setup_volume_source_space(subject, src_file, pos=10., mri=mri_file, bem=bem_file, mindist=0., exclude=0., subjects_dir=subjects_dir) fwd = mne.make_forward_solution(epochs.info, trans_file, src_, bem_file, fwd_file) cov_file = os.path.join(meg_dir, 'sample_audvis-cov.fif') cov = mne.read_cov(cov_file) inv = mn.make_inverse_operator(epochs.info, fwd, cov, None, None, fixed) ds.info['inv'] = inv stcs = mn.apply_inverse_epochs(epochs, inv, 1. / (snr ** 2), method) ds['src'] = load.fiff.stc_ndvar(stcs, subject, src_tag, subjects_dir) return ds
def test_make_forward_solution_kit(): """Test making fwd using KIT, BTI, and CTF (compensated) files """ fname_bem = op.join(subjects_dir, 'sample', 'bem', 'sample-5120-bem-sol.fif') kit_dir = op.join(op.dirname(__file__), '..', '..', 'io', 'kit', 'tests', 'data') sqd_path = op.join(kit_dir, 'test.sqd') mrk_path = op.join(kit_dir, 'test_mrk.sqd') elp_path = op.join(kit_dir, 'test_elp.txt') hsp_path = op.join(kit_dir, 'test_hsp.txt') mri_path = op.join(kit_dir, 'trans-sample.fif') fname_kit_raw = op.join(kit_dir, 'test_bin_raw.fif') bti_dir = op.join(op.dirname(__file__), '..', '..', 'io', 'bti', 'tests', 'data') bti_pdf = op.join(bti_dir, 'test_pdf_linux') bti_config = op.join(bti_dir, 'test_config_linux') bti_hs = op.join(bti_dir, 'test_hs_linux') fname_bti_raw = op.join(bti_dir, 'exported4D_linux_raw.fif') fname_ctf_raw = op.join(op.dirname(__file__), '..', '..', 'io', 'tests', 'data', 'test_ctf_comp_raw.fif') # first set up a testing source space fname_src = op.join(temp_dir, 'oct2-src.fif') src = setup_source_space('sample', fname_src, 'oct2', subjects_dir=subjects_dir) # first use mne-C: convert file, make forward solution fwd = do_forward_solution('sample', fname_kit_raw, src=fname_src, mindist=0.0, bem=fname_bem, mri=mri_path, eeg=False, meg=True, subjects_dir=subjects_dir) assert_true(isinstance(fwd, Forward)) # now let's use python with the same raw file fwd_py = make_forward_solution(fname_kit_raw, mindist=0.0, src=src, eeg=False, meg=True, bem=fname_bem, mri=mri_path) _compare_forwards(fwd, fwd_py, 157, 108) assert_true(isinstance(fwd_py, Forward)) # now let's use mne-python all the way raw_py = read_raw_kit(sqd_path, mrk_path, elp_path, hsp_path) # without ignore_ref=True, this should throw an error: assert_raises(NotImplementedError, make_forward_solution, raw_py.info, mindist=0.0, src=src, eeg=False, meg=True, bem=fname_bem, mri=mri_path) fwd_py = make_forward_solution(raw_py.info, mindist=0.0, src=src, eeg=False, meg=True, bem=fname_bem, mri=mri_path, ignore_ref=True) _compare_forwards(fwd, fwd_py, 157, 108, meg_rtol=1e-3, meg_atol=1e-7) # BTI python end-to-end versus C fwd = do_forward_solution('sample', fname_bti_raw, src=fname_src, mindist=0.0, bem=fname_bem, mri=mri_path, eeg=False, meg=True, subjects_dir=subjects_dir) raw_py = read_raw_bti(bti_pdf, bti_config, bti_hs) fwd_py = make_forward_solution(raw_py.info, mindist=0.0, src=src, eeg=False, meg=True, bem=fname_bem, mri=mri_path) _compare_forwards(fwd, fwd_py, 248, 108) # now let's test CTF w/compensation fwd_py = make_forward_solution(fname_ctf_raw, mindist=0.0, src=src, eeg=False, meg=True, bem=fname_bem, mri=fname_mri) fwd = do_forward_solution('sample', fname_ctf_raw, src=fname_src, mindist=0.0, bem=fname_bem, mri=fname_mri, eeg=False, meg=True, subjects_dir=subjects_dir) _compare_forwards(fwd, fwd_py, 274, 108) # CTF with compensation changed in python ctf_raw = Raw(fname_ctf_raw, compensation=2) fwd_py = make_forward_solution(ctf_raw.info, mindist=0.0, src=src, eeg=False, meg=True, bem=fname_bem, mri=fname_mri) with warnings.catch_warnings(record=True): fwd = do_forward_solution('sample', ctf_raw, src=fname_src, mindist=0.0, bem=fname_bem, mri=fname_mri, eeg=False, meg=True, subjects_dir=subjects_dir) _compare_forwards(fwd, fwd_py, 274, 108)
fname_trans = data_dir + '/sample_audvis_raw-trans.fif' fname_cov = data_dir + '/ernoise-cov.fif' fname_event = data_dir + '/sample_audvis_filt-0-40_raw-eve.fif' # List of sub structures we are interested in. We select only the # sub structures we want to include in the source space labels_vol = ['Left-Amygdala', 'Left-Thalamus-Proper', 'Left-Cerebellum-Cortex', 'Brain-Stem', 'Right-Amygdala', 'Right-Thalamus-Proper', 'Right-Cerebellum-Cortex'] # Setup a surface-based source space src = setup_source_space(subject, subjects_dir=subjects_dir, spacing='oct6', add_dist=False) # Setup a volume source space # set pos=7.0 for speed issue vol_src = setup_volume_source_space(subject, mri=fname_aseg, pos=7.0, bem=fname_model, volume_label=labels_vol, subjects_dir=subjects_dir) # Generate the mixed source space src += vol_src # compute the fwd matrix fwd = make_forward_solution(fname_raw, fname_trans, src, fname_bem, mindist=5.0, # ignore sources<=5mm from innerskull meg=True, eeg=False,
def test_scale_mri(): """Test creating fsaverage and scaling it""" # create fsaverage tempdir = _TempDir() create_default_subject(subjects_dir=tempdir) assert_true(_is_mri_subject('fsaverage', tempdir), "Creating fsaverage failed") fid_path = os.path.join(tempdir, 'fsaverage', 'bem', 'fsaverage-fiducials.fif') os.remove(fid_path) create_default_subject(update=True, subjects_dir=tempdir) assert_true(os.path.exists(fid_path), "Updating fsaverage") # copy MRI file from sample data path = os.path.join('%s', 'fsaverage', 'mri', 'orig.mgz') sample_sdir = os.path.join(mne.datasets.sample.data_path(), 'subjects') copyfile(path % sample_sdir, path % tempdir) # remove redundant label files label_temp = os.path.join(tempdir, 'fsaverage', 'label', '*.label') label_paths = glob(label_temp) for label_path in label_paths[1:]: os.remove(label_path) # create source space path = os.path.join(tempdir, 'fsaverage', 'bem', 'fsaverage-%s-src.fif') src = mne.setup_source_space('fsaverage', 'ico0', subjects_dir=tempdir, add_dist=False) write_source_spaces(path % 'ico-0', src) mri = os.path.join(tempdir, 'fsaverage', 'mri', 'orig.mgz') vsrc = mne.setup_volume_source_space('fsaverage', pos=50, mri=mri, subjects_dir=tempdir, add_interpolator=False) write_source_spaces(path % 'vol-50', vsrc) # scale fsaverage os.environ['_MNE_FEW_SURFACES'] = 'true' scale = np.array([1, .2, .8]) scale_mri('fsaverage', 'flachkopf', scale, True, subjects_dir=tempdir) del os.environ['_MNE_FEW_SURFACES'] assert_true(_is_mri_subject('flachkopf', tempdir), "Scaling fsaverage failed") spath = os.path.join(tempdir, 'flachkopf', 'bem', 'flachkopf-%s-src.fif') assert_true(os.path.exists(spath % 'ico-0'), "Source space ico-0 was not scaled") vsrc_s = mne.read_source_spaces(spath % 'vol-50') pt = np.array([0.12, 0.41, -0.22]) assert_array_almost_equal(apply_trans(vsrc_s[0]['src_mri_t'], pt * scale), apply_trans(vsrc[0]['src_mri_t'], pt)) scale_labels('flachkopf', subjects_dir=tempdir) # add distances to source space mne.add_source_space_distances(src) src.save(path % 'ico-0', overwrite=True) # scale with distances os.remove(spath % 'ico-0') scale_source_space('flachkopf', 'ico-0', subjects_dir=tempdir) ssrc = mne.read_source_spaces(spath % 'ico-0') assert_is_not(ssrc[0]['dist'], None)
os.chdir(raw_dir) subs = ['NLR_102_RS','NLR_103_AC','NLR_105_BB','NLR_110_HH','NLR_127_AM', 'NLR_130_RW','NLR_132_WP','NLR_133_ML','NLR_145_AC','NLR_150_MG','NLR_151_RD', 'NLR_152_TC','NLR_160_EK','NLR_161_AK','NLR_162_EF','NLR_163_LF','NLR_164_SF', 'NLR_170_GM','NLR_172_TH','NLR_174_HS','NLR_179_GM','NLR_180_ZD','NLR_187_NB', 'NLR_201_GS','NLR_202_DD','NLR_203_AM','NLR_204_AM','NLR_205_AC','NLR_206_LM', 'NLR_207_AH','NLR_210_SB','NLR_211_LB' ] subs = ['NLR_GB310','NLR_KB218','NLR_JB423','NLR_GB267','NLR_JB420','NLR_HB275','NLR_197_BK','NLR_GB355','NLR_GB387'] subs = ['NLR_HB205','NLR_IB319','NLR_JB227','NLR_JB486','NLR_KB396'] subs = ['NLR_JB227','NLR_JB486','NLR_KB396'] for n, s in enumerate(subs): subject = s # Create source space os.chdir(os.path.join(fs_dir,subject,'bem')) """ NLR_205: Head is too small to create ico5 """ if s == 'NLR_205_AC' or s == 'NLR_JB227': spacing='oct6' # ico5 = 10242, oct6 = 4098 ...8196 = 4098 * 2 fn2 = subject + '-' + 'oct-6' + '-src.fif' else: spacing='ico5' # 10242 * 2 fn2 = subject + '-' + 'ico-5' + '-src.fif' src = mne.setup_source_space(subject=subject, spacing=spacing, # source spacing = 5 mm subjects_dir=fs_dir, add_dist=False, n_jobs=18, overwrite=True) src = mne.add_source_space_distances(src, dist_limit=np.inf, n_jobs=18, verbose=None) mne.write_source_spaces(fn2, src, overwrite=True)