def morph_labels_from_fsaverage(subject, subjects_dir, mmvt_dir, aparc_name='aparc250', fs_labels_fol='', sub_labels_fol='', n_jobs=6, fsaverage='fsaverage', overwrite=False): fsaverage = find_template_brain_with_annot_file(aparc_name, fsaverage, subjects_dir) if fsaverage == '': return False if subject == fsaverage: return True subject_dir = op.join(subjects_dir, subject) labels_fol = op.join(subjects_dir, fsaverage, 'label', aparc_name) if fs_labels_fol == '' else fs_labels_fol sub_labels_fol = op.join( subject_dir, 'label', aparc_name) if sub_labels_fol == '' else sub_labels_fol if not op.isdir(sub_labels_fol): os.makedirs(sub_labels_fol) labels = read_labels(fsaverage, subjects_dir, aparc_name, n_jobs=n_jobs) verts = utils.load_surf(subject, mmvt_dir, subjects_dir) # Make sure we have a morph map, and if not, create it here, and not in the parallel function mne.surface.read_morph_map(subject, fsaverage, subjects_dir=subjects_dir) indices = np.array_split(np.arange(len(labels)), n_jobs) chunks = [([labels[ind] for ind in chunk_indices], subject, fsaverage, labels_fol, sub_labels_fol, verts, subjects_dir, overwrite) for chunk_indices in indices] results = utils.run_parallel(_morph_labels_parallel, chunks, n_jobs) return all(results)
def save_labels_from_vertices_lookup(subject, atlas, subjects_dir, mmvt_dir, surf_type='pial', read_labels_from_fol='', overwrite_vertices_labels_lookup=False, n_jobs=6): lookup = create_vertices_labels_lookup( subject, atlas, read_labels_from_fol=read_labels_from_fol, overwrite=overwrite_vertices_labels_lookup) labels_fol = op.join(subjects_dir, subject, 'label', atlas) surf = utils.load_surf(subject, mmvt_dir, subjects_dir) utils.delete_folder_files(labels_fol) ok = True for hemi in utils.HEMIS: labels_vertices = defaultdict(list) # surf_fname = op.join(subjects_dir, subject, 'surf', '{}.{}'.format(hemi, surf_type)) # surf, _ = mne.surface.read_surface(surf_fname) for vertice, label in lookup[hemi].items(): labels_vertices[label].append(vertice) chunks_indices = np.array_split(np.arange(len(labels_vertices)), n_jobs) labels_vertices_items = list(labels_vertices.items()) chunks = [([labels_vertices_items[ind] for ind in chunk_indices], subject, labels_vertices, surf, hemi, labels_fol) for chunk_indices in chunks_indices] results = utils.run_parallel(_save_labels_from_vertices_lookup_hemi, chunks, n_jobs) ok = ok and all(results) return ok
def get_t1_vertices_data(subject): trans_fname = op.join(MMVT_DIR, subject, 't1_trans.npz') trans_dict = utils.Bag(np.load(trans_fname)) ras_tkr2vox = np.linalg.inv(trans_dict.vox2ras_tkr) pial_verts = utils.load_surf(subject, MMVT_DIR, SUBJECTS_DIR) t1_data, t1_header = anat.get_data_and_header(subject, 'brainmask.mgz') for hemi in utils.HEMIS: output_fname = op.join(MMVT_DIR, subject, 'surf', 'T1-{}.npy'.format(hemi)) verts = pial_verts[hemi] t1_surf_hemi = np.zeros((len(verts))) hemi_pial_voxels = np.rint(utils.apply_trans(ras_tkr2vox, verts)).astype(int) for vert_ind, t1_vox in zip(range(len(verts)), hemi_pial_voxels): t1_surf_hemi[vert_ind] = t1_data[tuple(t1_vox)] np.save(output_fname, t1_surf_hemi)
def project_cbf_on_cortex(subject, site, scan_rescan, overwrite=False, print_only=False): mmvt_subject = subject if scan_rescan == SCAN else '{}_rescan'.format( subject) fmri_fol = utils.make_dir(op.join(FMRI_DIR, mmvt_subject)) mmvt_cbf_fname = op.join(fmri_fol, 'CBF_{}.nii'.format(scan_rescan)) cics_cbf_fname = op.join(HOME_FOL, site, subject, scan_rescan, 'CBF.nii') reg_file = op.join(HOME_FOL, site, subject, scan_rescan, 'control_to_T1.lta') if not op.islink(mmvt_cbf_fname) and op.isfile(cics_cbf_fname): utils.make_link(cics_cbf_fname, mmvt_cbf_fname) if not op.islink(mmvt_cbf_fname): print('Cannot file the link to CBF! ({}-{})'.format( cics_cbf_fname, mmvt_cbf_fname)) return False if not op.isfile(reg_file): print('Cannot find the registration file! ({})'.format(reg_file)) return False verts = utils.load_surf(subject, MMVT_DIR, SUBJECTS_DIR) for hemi in utils.HEMIS: surf_output_fname = op.join(FMRI_DIR, subject, 'CBF_{}_{}.mgz'.format(scan_rescan, hemi)) npy_output_fname = op.join( MMVT_DIR, subject, 'fmri', 'fmri_CBF_{}_{}.npy'.format(scan_rescan, hemi)) if op.isfile(npy_output_fname) and not overwrite: continue fu.project_volume_data(cics_cbf_fname, hemi, reg_file=reg_file, smooth_fwhm=0, output_fname=surf_output_fname, print_only=print_only) if print_only: continue surf_data = np.squeeze(nib.load(surf_output_fname).get_data()) np.save(npy_output_fname, surf_data) if len(verts[hemi]) != len(surf_data): print( '*** Wrong number of vertices in {} data! surf vertices ({}) != data vertices ({})' .format(hemi, len(verts[hemi]), len(surf_data)))
def check_labels(subject, atlas, subjects_dir, mmvt_dir): labels = read_labels(subject, subjects_dir, atlas) verts = utils.load_surf(subject, mmvt_dir, subjects_dir) verts = {hemi: range(len(verts[hemi])) for hemi in utils.HEMIS} ok = True for hemi in utils.HEMIS: labels_indices = [] for l in labels: if l.hemi != hemi: continue labels_indices.extend(l.vertices.tolist()) labels_indices = set(labels_indices) print('{}: labels vertices len: {}, verts len: {}'.format( hemi, len(labels_indices), len(verts[hemi]))) for label in labels: if not all(np.in1d(label.vertices, verts[label.hemi])): print('Not all {} vertices are in {} verts!'.format( label.name, label.hemi)) ok = False return ok