def visualize_objects(subject, bem, sources, brain, color='marsatlas', json_fname='default'): if json_fname == 'default': read_dir = op.join(op.abspath(__package__), 'config') json_fname = op.join(read_dir, 'db_coords.json') raw_dir, prep_dir, trans_dir, mri_dir, src_dir, bem_dir, fwd_dir, hga_dir = read_directories( json_fname) so = SceneObj() im_objects = [] if bem == True: im_bem = visualize_bem(bem_dir, subject, vis_as='src', color='green', preview=False) im_objects.append(im_bem) if brain == True: im_brain = visualize_brain(src_dir, subject, hemi='both', translucent=True, preview=False) im_objects.append(im_brain) if sources == True: im_cort_src = visualize_cortical_src(src_dir, subject, hemi='both', color=color, preview=False) im_objects.append(im_cort_src) for im in im_objects: so.add_to_subplot(im) so.preview() return so
def create_bem(json_fname, subject): """ Create the BEM model from FreeSurfer files Parameters: ---------- subject : str Name of the subject to calculate the BEM model Returns: ------- surfaces : list of dict BEM surfaces bem : instance of ConductorModel BEM model ------- """ print('\n---------- Resolving BEM model and BEM soultion ----------\n') database, project, db_mne, db_bv, db_fs = read_databases(json_fname) raw_dir, prep_dir, trans_dir, mri_dir, src_dir, bem_dir, fwd_dir, hga_dir = read_directories(json_fname) fname_bem_model = op.join(bem_dir.format(subject), '{0}-bem-model.fif'.format(subject)) fname_bem_sol = op.join(bem_dir.format(subject), '{0}-bem-sol.fif'.format(subject)) # Make bem model: single-shell model. Depends on anatomy only. bem_model = mne.make_bem_model(subject, ico=None, conductivity=[0.3], subjects_dir=op.join(db_fs, project)) mne.write_bem_surfaces(fname_bem_model, bem_model) # Make bem solution. Depends on anatomy only. bem_sol = mne.make_bem_solution(bem_model) mne.write_bem_solution(fname_bem_sol, bem_sol) return bem_model, bem_sol
def check_bem(json_fname, subject): """ Check if the BEM model exists Parameters ---------- subject : str The name of the subject to check the BEM model Returns: ------- True/False : bool True if the BEM model exists for the subject, otherwise False ------- """ raw_dir, prep_dir, trans_dir, mri_dir, src_dir, bem_dir, fwd_dir, hga_dir = read_directories(json_fname) # Check if BEM files exists, return boolean value print('\nChecking BEM files\n') fname_bem_model = op.join(bem_dir.format(subject), '{0}-bem-model.fif'.format(subject)) fname_bem_sol = op.join(bem_dir.format(subject), '{0}-bem-sol.fif'.format(subject)) if op.isfile(fname_bem_model) and op.isfile(fname_bem_sol): return True else: return False
def set_marsatlas(subject, src, hemi='both', json_fname='default'): if json_fname == 'default': read_dir = op.join(op.abspath(__package__), 'config') json_fname = op.join(read_dir, 'db_coords.json') raw_dir, prep_dir, trans_dir, mri_dir, src_dir, bem_dir, fwd_dir, hga_dir = read_directories( json_fname) read_dir = op.abspath(__file__).replace('vis.py', 'textures') cortical_text = np.load(op.join(read_dir, 'cortical.npy')) subcort_text = np.load(op.join(read_dir, 'subcortical.npy')) rgb_marsatlas = [] for s in src: if s['type'] == 'surf': textures = cortical_text if (hemi == 'both' or hemi == 'lh') and s['id'] == 101: labels_lh = mne.read_label( op.join(src_dir.format(subject), '{0}_{1}-lab-lh.label'.format(subject, s['type']))) all_src = np.full((s['np'], 3), 1.) for v, p in zip(labels_lh.vertices, labels_lh.values - 1): all_src[int(v), :] = textures[int(p), :] rgb_marsatlas.append(all_src) if (hemi == 'both' or hemi == 'rh') and s['id'] == 102: labels_rh = mne.read_label( op.join(src_dir.format(subject), '{0}_{1}-lab-rh.label'.format(subject, s['type']))) all_src = np.full((s['np'], 3), 1.) for v, p in zip(labels_rh.vertices, labels_rh.values - 101): all_src[int(v), :] = textures[int(p), :] rgb_marsatlas.append(all_src) if s['type'] == 'vol': textures = subcort_text if (hemi == 'both' or hemi == 'lh') and s['seg_name'].endswith('lh'): labels_lh = mne.read_label( op.join(src_dir.format(subject), '{0}_{1}-lab-lh.label'.format(subject, s['type']))) all_src = np.full((labels_lh.pos.shape[0], 3), 1.) for v, p in zip(labels_lh.vertices, labels_lh.values - 200): all_src[int(v), :] = textures[int(p), :] rgb_marsatlas.append(all_src) if (hemi == 'both' or hemi == 'rh') and s['seg_name'].endswith('rh'): labels_rh = mne.read_label( op.join(src_dir.format(subject), '{0}_{1}-lab-rh.label'.format(subject, s['type']))) all_src = np.full((labels_rh.pos.shape[0], 3), 1.) for v, p in zip(labels_rh.vertices, labels_rh.values - 208): all_src[int(v), :] = textures[int(p), :] rgb_marsatlas.append(all_src) rgb_marsatlas = np.vstack(tuple(rgb_marsatlas)) rgb_marsatlas = list(rgb_marsatlas) return rgb_marsatlas
def create_forward_models(subject, session=1, event='', src=None, json_fname='default'): """ Create the forward model Parameters ---------- subject : str Name of the subject session : int | str Number of the session event : str Name of the event of the epoch file src : str | None, default None Path of the sources file, if None the 'src.fif' file is automatically searched Returns ------- forward models : list List of forward models """ if json_fname == 'default': read_dir = op.join(op.abspath(__package__), 'config') json_fname = op.join(read_dir, 'db_coords.json') # database, project, db_mne, db_bv, db_fs = read_databases(json_fname) raw_dir, prep_dir, trans_dir, mri_dir, src_dir, bem_dir, fwd_dir, hga_dir = read_directories( json_fname) # File to align coordinate frames meg2mri computed using mne.analyze # (computed with interactive gui) fname_trans = op.join(trans_dir.format(subject), '{0}-trans.fif'.format(subject)) # MEG Epoched data to recover position of channels fname_event = op.join(prep_dir.format(subject, session), '{0}_{1}-epo.fif'.format(subject, event)) if event == '' or event == None: fname_event.replace('_-', '-') # Take info from epochs, and then free some space epochs_event = mne.read_epochs(fname_event) info = epochs_event.info del epochs_event # Find and read source space files if src is None: src = [ n for n in os.listdir(src_dir.format(subject)) if n.endswith('src.fif') ] src = [ mne.read_source_spaces(op.join(src_dir.format(subject), n)) for n in src ] elif src is str: if op.isfile(src): src = [mne.read_source_spaces(src)] elif src is list: if src[0] == str: src = [mne.read_source_spaces(n) for n in src] if src[0] == mne.source_space.SourceSpaces: pass elif src is mne.source_space.SourceSpaces: src = [src] else: raise Exception( '\nSource space dtype not recognized, use str, list of str, list of SourceSpaces, ' 'or None to automatic research\n') # Calculate forward model for each source space fwds = [] for sp in src: if sp[0]['type'] == 'surf': print( '\n---------- Forward Model for cortical sources ----------\n') f_fixed = True name = 'surf' elif sp[0]['type'] == 'vol': print( '\n---------- Forward Model for subcortical sources ----------\n' ) f_fixed = False name = 'vol' else: raise ValueError( 'Unknown Source Space type, it should be \'surf\' or \'vol\'') fwd = forward_model(subject, info, fname_trans, sp, force_fixed=f_fixed, name=name, json_fname=json_fname) fwds.append(fwd) print('\n---------- Forward Models Completed ----------\n') return fwds
def forward_model(subject, info, fname_trans, src, force_fixed=False, name='model', json_fname='default'): """Compute forward model. Parameters ---------- subject : str The name of subject raw : instance of rawBTI functionnal data fname_trans : str The filename of transformation matrix src : instance of SourceSpaces | list Sources of each interest hemisphere subjects_dir : str The subjects directory force_fixed: Boolean Force fixed source orientation mode name : str Use to save output Returns ------- fwd : instance of mne.Forward Forward model """ if json_fname == 'default': read_dir = op.join(op.abspath(__package__), 'config') json_fname = op.join(read_dir, 'db_coords.json') raw_dir, prep_dir, trans_dir, mri_dir, src_dir, bem_dir, fwd_dir, hga_dir = read_directories( json_fname) # Files to save fname_bem_sol = op.join(bem_dir.format(subject), '{0}-bem-sol.fif'.format(subject)) fname_fwd = op.join(fwd_dir.format(subject), '{0}-{1}-fwd.fif'.format(subject, name)) # Making BEM model and BEM solution if it was not done before if not check_bem(json_fname, subject): create_bem(json_fname, subject) # Compute forward, commonly referred to as the gain or leadfield matrix. fwd = mne.make_forward_solution(info=info, trans=fname_trans, src=src, bem=fname_bem_sol, mindist=0.0) # Set orientation of cortical sources to surface normals if force_fixed: # Surface normal fwd = mne.forward.convert_forward_solution(fwd, surf_ori=True, force_fixed=True) # Save fwd model mne.write_forward_solution(fname_fwd, fwd, overwrite=True) return fwd
def create_source_models(subject, save=False, json_fname='default'): """ Create cortical and subcortical source models Pipeline for: i) importing BrainVISA white meshes for positions and MarsAtlas textures for areas ii) create transformation file from BV to head coordinates iii) create source spaces with cortical and subcortical dipoles, Parameters ---------- subject : str Subject name database : To delete, database reference for trans file, useless from next version save : bool | True Allows to save source spaces and respective labels in the default directory Returns ------- surf_src : instance of SourceSpace Cortical surface source spaces, lh and rh surf_labels : instance of Labels Cortical surfaces labels vol_src : instance of VolSourceSpace Subcortical volumes source space, lh and rh vol_labels : instance of Labels Subcortical volumes Labels """ if json_fname == 'default': read_dir = op.join(op.abspath(__package__), 'config') json_fname = op.join(read_dir, 'db_info.son') database, project, db_mne, db_bv, db_fs = read_databases(json_fname) raw_dir, prep_dir, trans_dir, mri_dir, src_dir, bem_dir, fwd_dir, hga_dir = read_directories( json_fname) ########################################################################### # ------------------------------------------------------------------------- # BrainVISA anatomical data # ------------------------------------------------------------------------- # BV decimated white meshes (cortical sources) fname_surf_L = op.join(db_bv, project, subject, 't1mri', 'default_acquisition', 'default_analysis', 'segmentation', 'mesh', 'surface_analysis', '{0}_Lwhite_remeshed_hiphop.gii'.format(subject)) fname_surf_R = op.join(db_bv, project, subject, 't1mri', 'default_acquisition', 'default_analysis', 'segmentation', 'mesh', 'surface_analysis', '{0}_Rwhite_remeshed_hiphop.gii'.format(subject)) # BV texture (MarsAtlas labels) for decimated white meshes # (cortical sources) fname_tex_L = op.join(db_bv, 'hiphop138-multiscale', 'Decimated', '4K', 'hiphop138_Lwhite_dec_4K_parcels_marsAtlas.gii') fname_tex_R = op.join(db_bv, 'hiphop138-multiscale', 'Decimated', '4K', 'hiphop138_Rwhite_dec_4K_parcels_marsAtlas.gii') # Labelling xls file fname_atlas = op.join(db_mne, project, 'marsatlas', 'MarsAtlas_BV_2015.xls') # Color palette (still used???) fname_color = op.join(db_mne, project, 'marsatlas', 'MarsAtlas.ima') # MarsAtlas volume parcellation fname_vol = op.join(db_bv, project, subject, 't1mri', 'default_acquisition', 'default_analysis', 'segmentation', 'mesh', 'surface_analysis', '{0}_parcellation.nii.gz'.format(subject)) # ------------------------------------------------------------------------- # Transformation files BV to FS # ------------------------------------------------------------------------- # Referential file list # (3 transformation files to transform BV meshes to FS space) fname_trans_ref = op.join(db_mne, project, 'referential', 'referential.txt') # This file contains the transformations for subject_01 fname_trans_out = op.join(db_mne, project, subject, 'ref', '{0}-trans.trm'.format(subject)) name_lobe_vol = ['Subcortical'] # --------------------------------------------------------------------- # Setting up the source space from BrainVISA results # --------------------------------------------------------------------- # http://martinos.org/mne/stable/manual/cookbook.html#source-localization # Create .trm file transformation from BrainVisa to FreeSurfer needed # for brain.py function for surface only create_trans(subject, database, fname_trans_ref, fname_trans_out) # Calculate cortical sources and MarsAtlas labels print('\n---------- Cortical sources ----------\n') surf_src, surf_labels = get_brain_surf_sources(subject, fname_surf_L, fname_surf_R, fname_tex_L, fname_tex_R, fname_trans_out, fname_atlas, fname_color) if save == True: print('\nSaving surface source space and labels.....') mne.write_source_spaces(op.join(src_dir.format(subject), '{0}_surf-src.fif'.format(subject)), surf_src, overwrite=True) for sl in surf_labels: mne.write_label( op.join(src_dir.format(subject), '{0}_surf-lab'.format(subject)), sl) print('[done]') # Create BEM model if needed print('\nBEM model is needed for volume source space\n') if not check_bem(json_fname, subject): create_bem(json_fname, subject) print('\n---------- Subcortical sources ----------\n') vol_src, vol_labels = get_brain_vol_sources(subject, fname_vol, json_fname, name_lobe_vol, fname_trans_out, fname_atlas, space=5.) if save == True: print('Saving volume source space and labels.....') mne.write_source_spaces(op.join(src_dir.format(subject), '{0}_vol-src.fif'.format(subject)), vol_src, overwrite=True) for vl in vol_labels: mne.write_label( op.join(src_dir.format(subject), '{0}_vol-lab'.format(subject)), vl) print('[done]') # print('\n---------- Sources Completed ----------\n') return surf_src, surf_labels, vol_src, vol_labels