def compute_noise_cov(subject, hcp_path, noise_cov_fname=''): if noise_cov_fname == '': noise_cov_fname = meg.NOISE_COV.format(cond='empty_room') if op.isfile(noise_cov_fname): noise_cov = mne.read_cov(noise_cov_fname) return noise_cov utils.make_dir(utils.get_parent_fol(noise_cov_fname)) raw_noise = hcp.read_raw(subject=subject, hcp_path=hcp_path, data_type='noise_empty_room') raw_noise.load_data() # apply ref channel correction and drop ref channels preproc.apply_ref_correction(raw_noise) raw_noise.filter(0.50, None, method='iir', iir_params=dict(order=4, ftype='butter'), n_jobs=1) raw_noise.filter(None, 60, method='iir', iir_params=dict(order=4, ftype='butter'), n_jobs=1) ############################################################################## # Note that using the empty room noise covariance will inflate the SNR of the # evkoked and renders comparisons to `baseline` rather uninformative. noise_cov = mne.compute_raw_covariance(raw_noise, method='empirical') noise_cov.save(noise_cov_fname) return noise_cov
def save_evoked_to_blender(mri_subject, events, evoked=None, norm_by_percentile=True, norm_percs=(1, 99)): fol = op.join(MMVT_DIR, mri_subject, 'eeg') utils.make_dir(fol) if '{cond}' in meg.EVO: for event_ind, event_id in enumerate(events.keys()): if evoked is None: evo = mne.read_evokeds(meg.get_cond_fname(meg.EVO, event_id)) else: evo = evoked[event_id] if event_ind == 0: ch_names = evo[0].ch_names data = np.zeros( (evo[0].data.shape[0], evo[0].data.shape[1], 2)) data[:, :, event_ind] = evo[0].data else: if evoked is None: evoked = mne.read_evokeds(meg.EVO) data = evoked.data data_max, data_min = utils.get_data_max_min(data, norm_by_percentile, norm_percs) max_abs = utils.get_max_abs(data_max, data_min) data = data / max_abs np.save(op.join(fol, 'eeg_data.npy'), data) np.savez(op.join(fol, 'eeg_data_meta.npz'), names=ch_names, conditions=list(events.keys())) return True
def create_annotation_from_fsaverage(subject, aparc_name='aparc250', fsaverage='fsaverage', overwrite_annotation=False, overwrite_morphing=False, do_solve_labels_collisions=False, morph_labels_from_fsaverage=True, fs_labels_fol='', n_jobs=6): annotations_exist = np.all([op.isfile(op.join(SUBJECTS_DIR, subject, 'label', '{}.{}.annot'.format(hemi, aparc_name))) for hemi in HEMIS]) existing_freesurfer_annotations = ['aparc.DKTatlas40.annot', 'aparc.annot', 'aparc.a2009s.annot'] if '{}.annot'.format(aparc_name) in existing_freesurfer_annotations: morph_labels_from_fsaverage = False do_solve_labels_collisions = False if not annotations_exist: utils.make_dir(op.join(SUBJECTS_DIR, subject, 'label')) annotations_exist = fu.create_annotation_file(subject, aparc_name, subjects_dir=SUBJECTS_DIR, freesurfer_home=FREE_SURFER_HOME) if morph_labels_from_fsaverage: utils.morph_labels_from_fsaverage(subject, SUBJECTS_DIR, aparc_name, n_jobs=n_jobs, fsaverage=fsaverage, overwrite=overwrite_morphing, fs_labels_fol=fs_labels_fol) if do_solve_labels_collisions: solve_labels_collisions(subject, aparc_name, fsaverage, n_jobs) # Note that using the current mne version this code won't work, because of collissions between hemis # You need to change the mne.write_labels_to_annot code for that. if overwrite_annotation or not annotations_exist: try: utils.labels_to_annot(subject, SUBJECTS_DIR, aparc_name, overwrite=overwrite_annotation) except: print("Can't write labels to annotation! Trying to solve labels collision") solve_labels_collisions(subject, aparc_name, fsaverage, n_jobs) try: utils.labels_to_annot(subject, SUBJECTS_DIR, aparc_name, overwrite=overwrite_annotation) except: print("Can't write labels to annotation! Solving the labels collision didn't help...") print(traceback.format_exc()) return utils.both_hemi_files_exist(op.join( SUBJECTS_DIR, subject, 'label', '{}.{}.annot'.format('{hemi}', aparc_name)))
def freesurfer_surface_to_blender_surface(subject, hemi='both', overwrite=False): for hemi in utils.get_hemis(hemi): utils.make_dir(op.join(MMVT_DIR, subject, 'surf')) for surf_type in ['inflated', 'pial']: surf_name = op.join(SUBJECTS_DIR, subject, 'surf', '{}.{}'.format(hemi, surf_type)) surf_wavefront_name = '{}.asc'.format(surf_name) surf_new_name = '{}.srf'.format(surf_name) hemi_ply_fname = '{}.ply'.format(surf_name) mmvt_hemi_ply_fname = op.join(MMVT_DIR, subject, 'surf', '{}.{}.ply'.format(hemi, surf_type)) mmvt_hemi_npz_fname = op.join(MMVT_DIR, subject, 'surf', '{}.{}.npz'.format(hemi, surf_type)) if overwrite or not op.isfile(mmvt_hemi_ply_fname) and not op.isfile(mmvt_hemi_npz_fname): print('{} {}: convert srf to asc'.format(hemi, surf_type)) utils.run_script('mris_convert {} {}'.format(surf_name, surf_wavefront_name)) os.rename(surf_wavefront_name, surf_new_name) print('{} {}: convert asc to ply'.format(hemi, surf_type)) convert_hemis_srf_to_ply(subject, hemi, surf_type) if op.isfile(mmvt_hemi_ply_fname): os.remove(mmvt_hemi_ply_fname) shutil.copy(hemi_ply_fname, mmvt_hemi_ply_fname) ply_fname = op.join(MMVT_DIR, subject, 'surf', '{}.{}.ply'.format(hemi, surf_type)) if not op.isfile(mmvt_hemi_npz_fname): verts, faces = utils.read_ply_file(ply_fname) np.savez(mmvt_hemi_npz_fname, verts=verts, faces=faces) return utils.both_hemi_files_exist(op.join(MMVT_DIR, subject, 'surf', '{hemi}.pial.ply')) and \ utils.both_hemi_files_exist(op.join(MMVT_DIR, subject, 'surf', '{hemi}.pial.npz')) and \ utils.both_hemi_files_exist(op.join(MMVT_DIR, subject, 'surf', '{hemi}.inflated.ply')) and \ utils.both_hemi_files_exist(op.join(MMVT_DIR, subject, 'surf', '{hemi}.inflated.npz'))
def get_output_using_sftp(subjects, subject_to): sftp_domain = 'door.nmr.mgh.harvard.edu' sftp_username = '******' remote_subject_dir = '/space/thibault/1/users/npeled/subjects/{subject}' necessary_files = { 'mri_cvs_register_to_{}'.format(subject_to): ['combined_to{}_elreg_afteraseg-norm.tm3d'.format(subject_to)] } password = '' for subject in subjects: print('Getting tm3d file for {}'.format(subject)) utils.make_dir(op.join(SUBJECTS_DIR, subject, 'electrodes')) ret, password_temp = utils.prepare_subject_folder( necessary_files, subject, remote_subject_dir.format(subject=subject.lower()), SUBJECTS_DIR, True, sftp_username, sftp_domain, sftp_password=password) if not ret: print('Error in copying the file!') if password_temp != '': password = password_temp
def split_baseline(fif_fnames, clips_length=6, shift=6, overwrite=False): output_fol = op.join(utils.get_parent_fol(fif_fnames[0]), 'new_baselines') if not overwrite and op.isdir(output_fol) and len( glob.glob(op.join(output_fol, '*.fif'))) > 0: return glob.glob(op.join(output_fol, '*.fif')) utils.make_dir(output_fol) data = [] for fif_fname in fif_fnames: clip = mne.read_evokeds(fif_fname)[0] freq = clip.info['sfreq'] step = int(freq * clips_length) start_t, end_t = 0, len(clip.times) while start_t + clips_length * freq < end_t: new_clip = mne.EvokedArray(clip.data[:, start_t:start_t + step], clip.info, comment='baseline') data.append(new_clip.data[0, :10]) mne.write_evokeds( op.join( output_fol, '{}_{}.fif'.format(utils.namebase(fif_fname), int(start_t / freq))), new_clip) start_t += int(freq * shift) data = np.array(data) return glob.glob(op.join(output_fol, '*.fif'))
def create_lut_file_for_atlas(subject, atlas): # Read the subcortical segmentation from the freesurfer lut lut = utils.read_freesurfer_lookup_table(FREE_SURFER_HOME, get_colors=True) lut_new = [list(l) for l in lut if l[0] < 1000] for hemi, offset in zip(['lh', 'rh'], [1000, 2000]): if hemi == 'lh': lut_new.append([1000, 'ctx-lh-unknown', 25, 5, 25, 0]) else: lut_new.append([2000, 'ctx-rh-unknown', 25, 5, 25, 0]) _, ctab, names = _read_annot(op.join(SUBJECTS_DIR, subject, 'label', '{}.{}.annot'.format(hemi, atlas))) names = [name.astype(str) for name in names] for index, (label, cval) in enumerate(zip(names, ctab)): r,g,b,a, _ = cval lut_new.append([index + offset + 1, label, r, g, b, a]) lut_new.sort(key=lambda x:x[0]) # Add the values above 3000 for l in [l for l in lut if l[0] >= 3000]: lut_new.append(l) new_lut_fname = op.join(SUBJECTS_DIR, subject, 'label', '{}ColorLUT.txt'.format(atlas)) with open(new_lut_fname, 'w') as fp: csv_writer = csv.writer(fp, delimiter='\t') csv_writer.writerows(lut_new) # np.savetxt(new_lut_fname, lut_new, delimiter='\t', fmt="%s") utils.make_dir(op.join(MMVT_DIR, subject, 'freeview')) shutil.copyfile(new_lut_fname, op.join(MMVT_DIR, subject, 'freeview', '{}ColorLUT.txt'.format(atlas)))
def save_labels_coloring(subject, atlas, n_jobs=2): ret = False coloring_dir = op.join(MMVT_DIR, subject, 'coloring') utils.make_dir(coloring_dir) coloring_fname = op.join(coloring_dir, 'labels_{}_coloring.csv'.format(atlas)) coloring_names_fname = op.join(coloring_dir, 'labels_{}_colors_names.csv'.format(atlas)) try: labels = lu.read_labels(subject, SUBJECTS_DIR, atlas, n_jobs=n_jobs) colors_rgb_and_names = cu.get_distinct_colors_and_names() labels_colors_rgb, labels_colors_names = {}, {} for label in labels: label_inv_name = lu.get_label_hemi_invariant_name(label.name) if label_inv_name not in labels_colors_rgb: labels_colors_rgb[label_inv_name], labels_colors_names[label_inv_name] = next(colors_rgb_and_names) with open(coloring_fname, 'w') as colors_file, open(coloring_names_fname, 'w') as col_names_file: for label in labels: label_inv_name = lu.get_label_hemi_invariant_name(label.name) color_rgb = labels_colors_rgb[label_inv_name] color_name = labels_colors_names[label_inv_name] colors_file.write('{},{},{},{}\n'.format(label.name, *color_rgb)) col_names_file.write('{},{}\n'.format(label.name, color_name)) ret = op.isfile(coloring_fname) except: print('Error in save_labels_coloring!') print(traceback.format_exc()) return ret
def save_cerebellum_coloring(subject): ret = False coloring_dir = op.join(MMVT_DIR, subject, 'coloring') utils.make_dir(coloring_dir) coloring_fname = op.join(coloring_dir, 'cerebellum_coloring.csv') lut_name = 'Buckner2011_17Networks_ColorLUT_new.txt' lut_fname = op.join(MMVT_DIR, 'templates', lut_name) if not op.isfile(lut_fname): lut_resources_fname = op.join(utils.get_resources_fol(), lut_name) if op.isfile(lut_resources_fname): shutil.copy(lut_resources_fname, lut_fname) else: print("The Buckner2011 17Networks Color LUT is missing! ({})".format(lut_fname)) return False try: with open(coloring_fname, 'w') as colors_file, open(lut_fname, 'r') as lut_file: lut = lut_file.readlines() for ind, lut_line in zip(range(1, 34), lut[1:]): color_rgb = [float(x) / 255 for x in ' '.join(lut_line.split()).split(' ')[2:-1]] colors_file.write('{},{},{},{}\n'.format('cerebellum_{}'.format(ind), *color_rgb)) ret = op.isfile(coloring_fname) except: print('Error in save_cerebellum_coloring!') print(traceback.format_exc()) return ret
def create_lut_file_for_atlas(subject, atlas): # Read the subcortical segmentation from the freesurfer lut lut = utils.read_freesurfer_lookup_table(FREE_SURFER_HOME, get_colors=True) lut_new = [list(l) for l in lut if l[0] < 1000] for hemi, offset in zip(['lh', 'rh'], [1000, 2000]): if hemi == 'lh': lut_new.append([1000, 'ctx-lh-unknown', 25, 5, 25, 0]) else: lut_new.append([2000, 'ctx-rh-unknown', 25, 5, 25, 0]) _, ctab, names = _read_annot( op.join(SUBJECTS_DIR, subject, 'label', '{}.{}.annot'.format(hemi, atlas))) names = [name.astype(str) for name in names] for index, (label, cval) in enumerate(zip(names, ctab)): r, g, b, a, _ = cval lut_new.append([index + offset + 1, label, r, g, b, a]) lut_new.sort(key=lambda x: x[0]) # Add the values above 3000 for l in [l for l in lut if l[0] >= 3000]: lut_new.append(l) new_lut_fname = op.join(SUBJECTS_DIR, subject, 'label', '{}ColorLUT.txt'.format(atlas)) with open(new_lut_fname, 'w') as fp: csv_writer = csv.writer(fp, delimiter='\t') csv_writer.writerows(lut_new) # np.savetxt(new_lut_fname, lut_new, delimiter='\t', fmt="%s") utils.make_dir(op.join(BLENDER_ROOT_DIR, subject, 'freeview')) shutil.copyfile( new_lut_fname, op.join(BLENDER_ROOT_DIR, subject, 'freeview', '{}ColorLUT.txt'.format(atlas)))
def export_into_csv(template_system, mmvt_dir, bipolar=False, prefix='', input_fname=''): template = 'fsaverage' if template_system == 'ras' else 'colin27' if template_system == 'mni' else template_system if input_fname == '': input_name = '{}electrodes{}_positions.npz'.format( prefix, '_bipolar' if bipolar else '') input_fname = op.join(mmvt_dir, template, 'electrodes', input_name) electrodes_dict = utils.Bag(np.load(input_fname)) fol = utils.make_dir(op.join(MMVT_DIR, template, 'electrodes')) csv_fname = op.join( fol, '{}{}_{}RAS.csv'.format(prefix, template, 'bipolar_' if bipolar else '')) print('Writing csv file to {}'.format(csv_fname)) with open(csv_fname, 'w') as csv_file: wr = csv.writer(csv_file, quoting=csv.QUOTE_NONE) wr.writerow(['Electrode Name', 'R', 'A', 'S']) for elc_name, elc_coords in zip(electrodes_dict.names, electrodes_dict.pos): wr.writerow([ elc_name, *['{:.2f}'.format(x) for x in elc_coords.squeeze()] ]) fol = utils.make_dir(op.join(MMVT_DIR, template, 'electrodes')) csv_fname2 = op.join(fol, utils.namebase_with_ext(csv_fname)) if csv_fname != csv_fname2: utils.copy_file(csv_fname, csv_fname2) print('export_into_csv: {}'.format( op.isfile(csv_fname) and op.isfile(csv_fname2))) return csv_fname
def save_subject_ct_trans(subject, ct_name='ct_reg_to_mr.mgz', overwrite=False): output_fname = op.join(MMVT_DIR, subject, 'ct', 'ct_trans.npz') if op.isfile(output_fname) and not overwrite: return True ct_fname, ct_exist = utils.locating_file(ct_name, ['*.mgz', '*.nii', '*.nii.gz'], op.join(MMVT_DIR, subject, 'ct')) # ct_fname = op.join(MMVT_DIR, subject, 'ct', ct_name) if not ct_exist: # op.isfile(ct_fname): # subjects_ct_fname = op.join(SUBJECTS_DIR, subject, 'mri', ct_name) ct_fname, ct_exist = utils.locating_file( ct_name, ['*.mgz', '*.nii', '*.nii.gz'], op.join(SUBJECTS_DIR, subject, 'mri')) if ct_exist: #op.isfile(subjects_ct_fname): utils.make_dir(op.join(MMVT_DIR, subject, 'ct')) ct_fname = utils.copy(ct_fname, op.join(MMVT_DIR, subject, 'ct')) else: print("Can't find subject's CT! ({})".format(ct_fname)) return False if ct_fname != op.join(MMVT_DIR, subject, 'ct', ct_name): utils.make_link(ct_fname, op.join(MMVT_DIR, subject, 'ct', ct_name)) print('save_subject_ct_trans: loading {}'.format(ct_fname)) header = nib.load(ct_fname).header ras_tkr2vox, vox2ras_tkr, vox2ras, ras2vox = anat.get_trans_functions( header) print('save_subject_ct_trans: Saving {}'.format(output_fname)) np.savez(output_fname, ras_tkr2vox=ras_tkr2vox, vox2ras_tkr=vox2ras_tkr, vox2ras=vox2ras, ras2vox=ras2vox) return op.isfile(output_fname)
def save_evoked_to_blender(mri_subject, events, args, evoked=None): fol = op.join(MMVT_DIR, mri_subject, 'eeg') utils.make_dir(fol) if '{cond}' in meg.EVO: for event_ind, event_id in enumerate(events.keys()): if evoked is None: evo = mne.read_evokeds(meg.get_cond_fname(meg.EVO, event_id)) else: evo = evoked[event_id] if event_ind == 0: ch_names = np.array(evo[0].ch_names) dt = np.diff(evo[0].times[:2])[0] data = np.zeros((evo[0].data.shape[0], evo[0].data.shape[1], 2)) data[:, :, event_ind] = evo[0].data else: if evoked is None: evoked = mne.read_evokeds(meg.EVO) data = evoked[0].data data = data[..., np.newaxis] ch_names = np.array(evoked[0].ch_names) dt = np.diff(evoked[0].times[:2])[0] if 'Event' in ch_names: event_ind = np.where(ch_names == 'Event')[0] ch_names = np.delete(ch_names, event_ind) data = np.delete(data, event_ind, 0) if args.normalize_evoked: data_max, data_min = utils.get_data_max_min(data, args.norm_by_percentile, args.norm_percs) max_abs = utils.get_max_abs(data_max, data_min) data = data / max_abs np.save(op.join(fol, 'eeg_data.npy'), data) np.savez(op.join(fol, 'eeg_data_meta.npz'), names=ch_names, conditions=list(events.keys()), dt=dt) return True
def project_on_surface(subject, volume_file, surf_output_fname, target_subject=None, overwrite_surf_data=False, modality='fmri', subjects_dir='', mmvt_dir='', **kargs): if target_subject is None: target_subject = subject if subjects_dir == '': subjects_dir = utils.get_link_dir(utils.get_links_dir(), 'subjects', 'SUBJECTS_DIR') if mmvt_dir == '': mmvt_dir = utils.get_link_dir(utils.get_links_dir(), 'mmvt') utils.make_dir(op.join(mmvt_dir, subject, 'fmri')) os.environ['SUBJECTS_DIR'] = subjects_dir os.environ['SUBJECT'] = subject for hemi in utils.HEMIS: if not op.isfile(surf_output_fname.format(hemi=hemi)) or overwrite_surf_data: print('project {} to {}'.format(volume_file, hemi)) if modality != 'pet': surf_data = project_volume_data(volume_file, hemi, subject_id=subject, surf="pial", smooth_fwhm=3, target_subject=target_subject, output_fname=surf_output_fname.format(hemi=hemi)) else: surf_data = project_pet_volume_data(subject, volume_file, hemi, surf_output_fname.format(hemi=hemi)) nans = np.sum(np.isnan(surf_data)) if nans > 0: print('there are {} nans in {} surf data!'.format(nans, hemi)) surf_data = np.squeeze(nib.load(surf_output_fname.format(hemi=hemi)).get_data()) output_fname = op.join(mmvt_dir, subject, modality, '{}_{}'.format(modality, op.basename( surf_output_fname.format(hemi=hemi)))) npy_output_fname = op.splitext(output_fname)[0] if not op.isfile('{}.npy'.format(npy_output_fname)) or overwrite_surf_data: print('Saving surf data in {}.npy'.format(npy_output_fname)) utils.make_dir(utils.get_parent_fol(npy_output_fname)) np.save(npy_output_fname, surf_data)
def aseg_to_srf(subject, subjects_dir, output_fol, region_id, mask_fname, norm_fname, overwrite_subcortical_objs=False): ret = True tmp_fol = op.join(subjects_dir, subject, 'tmp', utils.rand_letters(6)) utils.make_dir(tmp_fol) rs = utils.partial_run_script(locals()) output_fname = op.join(output_fol, '{}.srf'.format(region_id)) tmp_output_fname = op.join(tmp_fol, '{}.asc'.format(region_id)) if overwrite_subcortical_objs: utils.remove_file(output_fname) try: rs(mri_pretess) rs(mri_tessellate) rs(mris_smooth) rs(mris_convert) if op.isfile(tmp_output_fname): shutil.move(tmp_output_fname, output_fname) shutil.rmtree(tmp_fol) else: ret = False except: print('Error in aseg_to_srf! subject: {}'.format(subject)) print(traceback.format_exc()) ret = False return ret
def create_unknown_labels(subject, atlas): labels_fol = op.join(SUBJECTS_DIR, subject, 'label', atlas) utils.make_dir(labels_fol) unknown_labels_fname_template = op.join( labels_fol, 'unknown-{}.label'.format('{hemi}')) if utils.both_hemi_files_exist(unknown_labels_fname_template): unknown_labels = { hemi: mne.read_label(unknown_labels_fname_template.format(hemi=hemi), subject) for hemi in utils.HEMIS } return unknown_labels unknown_labels = {} for hemi in utils.HEMIS: labels = read_labels(subject, SUBJECTS_DIR, atlas, hemi=hemi) unknown_label_name = 'unknown-{}'.format(hemi) labels_names = [l.name for l in labels] if unknown_label_name not in labels_names: verts, _ = utils.read_pial(subject, MMVT_DIR, hemi) unknown_verts = set(range(verts.shape[0])) for label in labels: unknown_verts -= set(label.vertices) unknown_verts = np.array(sorted(list(unknown_verts))) unknown_label = mne.Label(unknown_verts, hemi=hemi, name=unknown_label_name, subject=subject) else: unknown_label = labels[labels_names.index(unknown_label_name)] unknown_labels[hemi] = unknown_label if not op.isfile(unknown_labels_fname_template.format(hemi=hemi)): unknown_label.save(unknown_labels_fname_template.format(hemi=hemi)) return unknown_labels
def run_on_subjects(args): subjects_flags, subjects_errors = {}, {} args.sftp_password = utils.get_sftp_password( args.subject, SUBJECTS_DIR, args.necessary_files, args.sftp_username, args.overwrite_fs_files) \ if args.sftp else '' for subject in args.subject: utils.make_dir(op.join(MMVT_DIR, subject, 'mmvt')) # os.chdir(op.join(SUBJECTS_DIR, subject, 'mmvt')) try: print('*******************************************') print('subject: {}, atlas: {}'.format(subject, args.atlas)) print('*******************************************') flags = main(subject, args) subjects_flags[subject] = flags except: subjects_errors[subject] = traceback.format_exc() print('Error in subject {}'.format(subject)) print(traceback.format_exc()) errors = defaultdict(list) for subject, flags in subjects_flags.items(): print('subject {}:'.format(subject)) for flag_type, val in flags.items(): print('{}: {}'.format(flag_type, val)) if not val: errors[subject].append(flag_type) print('Errors:') for subject, error in errors.items(): print('{}: {}'.format(subject, error))
def solve_labels_collision(subject, subjects_dir, atlas, backup_atlas, n_jobs=1): now = time.time() print('Read labels') # utils.read_labels_parallel(subject, subjects_dir, atlas, labels_fol='', n_jobs=n_jobs) labels = read_labels(subject, subjects_dir, atlas, n_jobs=n_jobs) backup_labels_fol = op.join(subjects_dir, subject, 'label', backup_atlas) labels_fol = op.join(subjects_dir, subject, 'label', atlas) if op.isdir(backup_labels_fol): shutil.rmtree(backup_labels_fol) os.rename(labels_fol, backup_labels_fol) utils.make_dir(labels_fol) hemis_verts, labels_hemi, pia_verts = {}, {}, {} print('Read surface ({:.2f}s)'.format(time.time() - now)) for hemi in HEMIS: surf_fname = op.join(subjects_dir, subject, 'surf', '{}.pial'.format(hemi)) hemis_verts[hemi], _ = mne.surface.read_surface(surf_fname) labels_hemi[hemi] = [l for l in labels if l.hemi == hemi] print('Calc centroids ({:.2f}s)'.format(time.time() - now)) centroids = calc_labels_centroids(labels_hemi, hemis_verts) for hemi in HEMIS: print('Calc vertices labeling for {} ({:.2f}s)'.format(hemi, time.time() - now)) hemi_centroids_dist = cdist(hemis_verts[hemi], centroids[hemi]) vertices_labels_indices = np.argmin(hemi_centroids_dist, axis=1) labels_hemi_chunks = utils.chunks(list(enumerate(labels_hemi[hemi])), len(labels_hemi[hemi]) / n_jobs) params = [(labels_hemi_chunk, atlas, vertices_labels_indices, hemis_verts, labels_fol) for labels_hemi_chunk in labels_hemi_chunks] print('Save labels for {} ({:.2f}s)'.format(hemi, time.time() - now)) utils.run_parallel(_save_new_labels_parallel, params, n_jobs)
def get_empty_fnames(subject, tasks, args): utils.make_dir(op.join(MEG_DIR, subject)) utils.make_link(op.join(args.remote_subject_dir.format(subject=subject), 'bem'), op.join(MEG_DIR, subject, 'bem')) for task in tasks: utils.make_dir(op.join(MEG_DIR, task, subject)) utils.make_link(op.join(MEG_DIR, subject, 'bem'), op.join(MEG_DIR, task, subject, 'bem')) utils.make_link(op.join(MEG_DIR, subject, 'bem'), op.join(SUBJECTS_DIR, subject, 'bem')) remote_meg_fol = '/autofs/space/lilli_003/users/DARPA-TRANSFER/meg/{}'.format(subject) csv_fname = op.join(remote_meg_fol, 'cfg.txt') if not op.isfile(csv_fname): print('No cfg file!') return {task:'' for task in tasks} days, empty_fnames = {}, {} for line in utils.csv_file_reader(csv_fname, ' '): for task in tasks: if line[4].lower() == task.lower(): days[task] = line[2] print(days) for line in utils.csv_file_reader(csv_fname, ' '): if line[4] == 'empty': for task in tasks: empty_fnames[task] = op.join(MEG_DIR, task, subject, '{}_empty_raw.fif'.format(subject)) if op.isfile(empty_fnames[task]): continue task_day = days[task] if line[2] == task_day: empty_fname = op.join(remote_meg_fol, line[0].zfill(3), line[-1]) if not op.isfile(empty_fname): raise Exception('empty file does not exist! {}'.format(empty_fname[task])) utils.make_link(empty_fname, empty_fnames[task]) return empty_fnames
def create_labels_coloring(subject, labels_names, labels_values, coloring_name, norm_percs=(3, 99), norm_by_percentile=True, colors_map='jet'): coloring_dir = op.join(MMVT_DIR, subject, 'coloring') utils.make_dir(coloring_dir) coloring_fname = op.join(coloring_dir, '{}.csv'.format(coloring_name)) ret = False try: labels_colors = utils.arr_to_colors( labels_values, norm_percs=norm_percs, colors_map=colors_map) # norm_by_percentile=norm_by_percentile print('Saving coloring to {}'.format(coloring_fname)) with open(coloring_fname, 'w') as colors_file: for label_name, label_color, label_value in zip( labels_names, labels_colors, labels_values): colors_file.write('{},{},{},{},{}\n'.format( label_name, *label_color[:3], label_value)) ret = op.isfile(coloring_fname) except: print('Error in create_labels_coloring!') print(traceback.format_exc()) return ret
def init_meg(subject): utils.make_dir(op.join(MEG_DIR, subject)) utils.make_link( op.join(args.remote_subject_dir.format(subject=subject), 'bem'), op.join(MEG_DIR, subject, 'bem')) utils.make_link(op.join(MEG_DIR, subject, 'bem'), op.join(SUBJECTS_DIR, subject, 'bem'))
def change_frames_names(fol, images_prefix, images_type, images_format_len, new_fol_name='new_images'): import shutil images = glob.glob( op.join(fol, '{}*.{}'.format(images_prefix, images_type))) images.sort(key=lambda x: int(utils.namebase(x)[len(images_prefix):])) images_formats = { 1: '{0:0>1}', 2: '{0:0>2}', 3: '{0:0>3}', 4: '{0:0>4}', 5: '{0:0>5}' } root = op.join(op.sep.join(images[0].split(op.sep)[:-1])) new_fol = op.join(root, new_fol_name) utils.delete_folder_files(new_fol) utils.make_dir(new_fol) for num, image_fname in enumerate(images): num_str = images_formats[images_format_len].format(num + 1) new_image_fname = op.join( new_fol, '{}{}.{}'.format(images_prefix, num_str, images_type)) print('{} -> {}'.format(image_fname, new_image_fname)) utils.copy_file(image_fname, new_image_fname) return new_fol
def save_evoked_to_blender(mri_subject, events, args, evoked=None): fol = op.join(MMVT_DIR, mri_subject, 'eeg') utils.make_dir(fol) if '{cond}' in meg.EVO: for event_ind, event_id in enumerate(events.keys()): if evoked is None: evo = mne.read_evokeds(meg.get_cond_fname(meg.EVO, event_id)) else: evo = evoked[event_id] if event_ind == 0: ch_names = np.array(evo[0].ch_names) dt = np.diff(evo[0].times[:2])[0] data = np.zeros((evo[0].data.shape[0], evo[0].data.shape[1], 2)) data[:, :, event_ind] = evo[0].data else: if evoked is None: evoked = mne.read_evokeds(meg.EVO) data = evoked[0].data data = data[..., np.newaxis] ch_names = np.array(evoked[0].ch_names) dt = np.diff(evoked[0].times[:2])[0] if 'Event' in ch_names: event_ind = np.where(ch_names == 'Event')[0] ch_names = np.delete(ch_names, event_ind) data = np.delete(data, event_ind, 0) data_max, data_min = utils.get_data_max_min(data, args.norm_by_percentile, args.norm_percs) max_abs = utils.get_max_abs(data_max, data_min) if args.normalize_evoked: data = data / max_abs np.save(op.join(fol, 'eeg_data.npy'), data) np.savez(op.join(fol, 'eeg_data_meta.npz'), names=ch_names, conditions=list(events.keys()), dt=dt, minmax=(-max_abs, max_abs)) return True
def create_aparc_aseg_file(subject, atlas, overwrite_aseg_file=False, print_only=False, args={}): if not utils.both_hemi_files_exist(op.join(SUBJECTS_DIR, subject, 'label', '{}.{}.annot'.format('{hemi}', atlas))): print('No annot file was found for {}!'.format(atlas)) print('Run python -m src.preproc.anatomy -s {} -a {} -f create_surfaces,create_annotation'.format(subject, atlas)) return False # aparc_aseg_fname ret = fu.create_aparc_aseg_file( subject, atlas, SUBJECTS_DIR, overwrite_aseg_file, print_only, mmvt_args=args) if isinstance(ret, Iterable): ret, aparc_aseg_fname = ret if not ret: return False aparc_aseg_file = utils.namebase_with_ext(aparc_aseg_fname) utils.make_dir(op.join(MMVT_DIR, subject, 'freeview')) blender_file = op.join(MMVT_DIR, subject, 'freeview', aparc_aseg_file) utils.remove_file(blender_file) shutil.copyfile(aparc_aseg_fname, blender_file) atlas_mat_fname = utils.change_fname_extension(blender_file, 'npy') if not op.isfile(atlas_mat_fname) or overwrite_aseg_file: d = nib.load(blender_file) x = d.get_data() np.save(atlas_mat_fname, x) return op.isfile(blender_file) and op.isfile(atlas_mat_fname)
def prepare_darpa_csv(subject, bipolar, atlas, good_channels=None, groups_ordering=None, error_radius=3, elec_length=4, p_threshold=0.05): elecs_names, elecs_coords = elec_pre.read_electrodes_file(subject, bipolar) elecs_coords_mni = fu.transform_subject_to_mni_coordinates(subject, elecs_coords, SUBJECTS_DIR) elec_pre.save_electrodes_coords(elecs_names, elecs_coords_mni, good_channels) elecs_coords_mni_dic = {elec_name:elec_coord for (elec_name,elec_coord) in zip(elecs_names, elecs_coords_mni)} elecs_probs, _ = utils.get_electrodes_labeling(subject, BLENDER_ROOT_DIR, atlas, bipolar, error_radius, elec_length) assert(len(elecs_names) == len(elecs_coords_mni) == len(elecs_probs)) most_probable_rois = elec_pre.get_most_probable_rois(elecs_probs, p_threshold, good_channels) rois_colors = elec_pre.get_rois_colors(most_probable_rois) elec_pre.save_rois_colors_legend(subject, rois_colors, bipolar) utils.make_dir(op.join(BLENDER_ROOT_DIR, 'colin27', 'coloring')) results = defaultdict(list) for elec_name, elec_probs in zip(elecs_names, elecs_probs): assert(elec_name == elec_probs['name']) if not good_channels is None and elec_name not in good_channels: continue group = get_elec_group(elec_name, bipolar) roi = elec_pre.get_most_probable_roi([*elec_probs['cortical_probs'], *elec_probs['subcortical_probs']], [*elec_probs['cortical_rois'], *elec_probs['subcortical_rois']], p_threshold) color = rois_colors[utils.get_hemi_indifferent_roi(roi)] results[group].append(dict(name=elec_name, roi=roi, color=color)) coloring_fname = 'electrodes{}_coloring.csv'.format('_bipolar' if bipolar else '') with open(op.join(OUTPUT_DIR, '{}_electrodes_info.csv'.format(subject)), 'w') as csv_file, \ open(op.join(BLENDER_ROOT_DIR, 'colin27', 'coloring', coloring_fname), 'w') as colors_csv_file: csv_writer = csv.writer(csv_file, delimiter=',') colors_csv_writer = csv.writer(colors_csv_file, delimiter=',') elec_ind = 0 for group in groups_ordering: group_res = sorted(results[group], key=lambda x:natural_keys(x['name'])) for res in group_res: csv_writer.writerow([elec_ind, res['name'], *elecs_coords_mni_dic[res['name']], res['roi'], *res['color']]) colors_csv_writer.writerow([res['name'], *res['color']]) elec_ind += 1
def solve_labels_collision(subject, subjects_dir, atlas, backup_atlas, n_jobs=1): now = time.time() print('Read labels') labels = utils.read_labels_parallel(subject, subjects_dir, atlas, n_jobs) backup_labels_fol = op.join(subjects_dir, subject, 'label', backup_atlas) labels_fol = op.join(subjects_dir, subject, 'label', atlas) if op.isdir(backup_labels_fol): shutil.rmtree(backup_labels_fol) os.rename(labels_fol, backup_labels_fol) utils.make_dir(labels_fol) hemis_verts, labels_hemi, pia_verts = {}, {}, {} print('Read surface ({:.2f}s)'.format(time.time() - now)) for hemi in HEMIS: surf_fname = op.join(subjects_dir, subject, 'surf', '{}.pial'.format(hemi)) hemis_verts[hemi], _ = mne.surface.read_surface(surf_fname) labels_hemi[hemi] = [l for l in labels if l.hemi == hemi] print('Calc centroids ({:.2f}s)'.format(time.time() - now)) centroids = calc_labels_centroids(labels_hemi, hemis_verts) for hemi in HEMIS: print('Calc vertices labeling for {} ({:.2f}s)'.format(hemi, time.time() - now)) hemi_centroids_dist = cdist(hemis_verts[hemi], centroids[hemi]) vertices_labels_indices = np.argmin(hemi_centroids_dist, axis=1) labels_hemi_chunks = utils.chunks(list(enumerate(labels_hemi[hemi])), len(labels_hemi[hemi]) / n_jobs) params = [(labels_hemi_chunk, atlas, vertices_labels_indices, hemis_verts, labels_fol) for labels_hemi_chunk in labels_hemi_chunks] print('Save labels for {} ({:.2f}s)'.format(hemi, time.time() - now)) utils.run_parallel(_save_new_labels_parallel, params, n_jobs)
def calc_time_series_per_label(x, labels, measure, excludes=(), figures_dir='', do_plot=False, do_plot_all_vertices=False): import sklearn.decomposition as deco import matplotlib.pyplot as plt labels, _ = remove_exclude_labels(labels, excludes) if measure.startswith('pca'): comps_num = 1 if '_' not in measure else int(measure.split('_')[1]) labels_data = np.zeros((len(labels), x.shape[-1], comps_num)) else: labels_data = np.zeros((len(labels), x.shape[-1])) labels_names = [] if do_plot_all_vertices: all_vertices_plots_dir = op.join(figures_dir, 'all_vertices') utils.make_dir(all_vertices_plots_dir) if do_plot: measure_plots_dir = op.join(figures_dir, measure) utils.make_dir(measure_plots_dir) for ind, label in enumerate(labels): if measure == 'mean': labels_data[ind, :] = np.mean(x[label.vertices, 0, 0, :], 0) elif measure.startswith('pca'): print(label) _x = x[label.vertices, 0, 0, :].T remove_cols = np.where(np.all(_x == np.mean(_x, 0), 0))[0] _x = np.delete(_x, remove_cols, 1) _x = (_x - np.mean(_x, 0)) / np.std(_x, 0) comps = 1 if '_' not in measure else int(measure.split('_')[1]) pca = deco.PCA(comps) x_r = pca.fit(_x).transform(_x) # if x_r.shape[1] == 1: labels_data[ind, :] = x_r # else: # labels_data[ind, :] = x_r elif measure == 'cv': #''coef_of_variation': label_mean = np.mean(x[label.vertices, 0, 0, :], 0) label_std = np.std(x[label.vertices, 0, 0, :], 0) labels_data[ind, :] = label_std / label_mean labels_names.append(label.name) if do_plot_all_vertices: plt.figure() plt.plot(x[label.vertices, 0, 0, :].T) plt.savefig( op.join(all_vertices_plots_dir, '{}.jpg'.format(label.name))) plt.close() if do_plot: plt.figure() plt.plot(labels_data[ind, :]) plt.savefig( op.join(measure_plots_dir, '{}_{}.jpg'.format(measure, label.name))) plt.close() return labels_data, labels_names
def subcortical_segmentation(subject, overwrite_subcorticals=False, model='subcortical', lookup=None, mask_name='aseg.mgz', mmvt_subcorticals_fol_name='subcortical', template_subject='', norm_name='norm.mgz', overwrite=True): # 1) mri_pretess: Changes region segmentation so that the neighbors of all voxels have a face in common # 2) mri_tessellate: Creates surface by tessellating # 3) mris_smooth: Smooth the new surface # 4) mris_convert: Convert the new surface into srf format template_subject = subject if template_subject == '' else template_subject norm_fname = op.join(SUBJECTS_DIR, template_subject, 'mri', norm_name) if not op.isfile(norm_fname): print('norm file does not exist! {}'.format(norm_fname)) return False mask_fname = op.join(SUBJECTS_DIR, template_subject, 'mri', mask_name) if not op.isfile(mask_fname): print('mask file does not exist! {}'.format(mask_fname)) return False codes_file = op.join(MMVT_DIR, 'sub_cortical_codes.txt') if not op.isfile(codes_file): print('subcortical codes file does not exist! {}'.format(codes_file)) return False # subcortical_lookup = np.genfromtxt(codes_file, dtype=str, delimiter=',') function_output_fol = op.join(SUBJECTS_DIR, subject, 'mmvt', '{}_objs'.format(model)) utils.make_dir(function_output_fol) renamed_output_fol = op.join(SUBJECTS_DIR, subject, 'mmvt', model) utils.make_dir(renamed_output_fol) if lookup is None: lookup = load_subcortical_lookup_table() obj_files = glob.glob(op.join(function_output_fol, '*.srf')) errors = [] if len(obj_files) < len(lookup) or overwrite_subcorticals: if overwrite: utils.delete_folder_files(function_output_fol) utils.delete_folder_files(renamed_output_fol) print('Trying to write into {}'.format(function_output_fol)) for region_id in lookup.keys(): if op.isfile(op.join(function_output_fol, '{}.srf'.format(region_id))): continue ret = fu.aseg_to_srf(subject, SUBJECTS_DIR, function_output_fol, region_id, mask_fname, norm_fname, overwrite_subcorticals) if not ret: errors.append(lookup[region_id]) if len(errors) > 0: print('Errors: {}'.format(','.join(errors))) ply_files = glob.glob(op.join(renamed_output_fol, '*.ply')) if len(ply_files) < len(lookup) or overwrite_subcorticals: convert_and_rename_subcortical_files(subject, function_output_fol, renamed_output_fol, lookup, mmvt_subcorticals_fol_name) blender_dir = op.join(MMVT_DIR, subject, mmvt_subcorticals_fol_name) if not op.isdir(blender_dir) or len(glob.glob(op.join(blender_dir, '*.ply'))) < len(ply_files) or overwrite_subcorticals: utils.delete_folder_files(blender_dir) copy_subcorticals_to_mmvt(renamed_output_fol, subject, mmvt_subcorticals_fol_name) flag_ok = len(glob.glob(op.join(blender_dir, '*.ply'))) >= len(lookup) and \ len(glob.glob(op.join(blender_dir, '*.npz'))) >= len(lookup) return flag_ok
def create_real_folder(real_fol): try: if real_fol == '': real_fol = utils.get_resources_fol() utils.make_dir(real_fol) except: print('Error with creating the folder "{}"'.format(real_fol)) print(traceback.format_exc())
def plot_all_windows(edf_raw, live_channels, T, hz, window, edf_fname, ylim): pics_fol = op.join(op.split(edf_fname)[0], 'pics') utils.make_dir(pics_fol) for t_start in np.arange(0, T-window, window): plot_window(edf_raw, live_channels, t_start, window, hz, ylim) print('plotting {}-{}'.format(t_start, t_start+window)) plt.savefig(op.join(pics_fol, '{}-{}.jpg'.format(t_start, t_start+window))) plt.close()
def robust_register(subject, subjects_dir, source_fname, target_fname, output_fname, lta_name, cost_function='nmi', print_only=False, **kargs): xfms_dir = op.join(subjects_dir, subject, 'mri', 'transforms') utils.make_dir(xfms_dir) lta_fname = op.join(xfms_dir, lta_name) rs = utils.partial_run_script(locals(), print_only=print_only) rs(mri_robust_register) return True if print_only else op.isfile(lta_fname)
def create_dup_frames_links(subject, dup_frames, fol): fol = op.join(MMVT_DIR, subject, 'figures', fol) utils.delete_folder_files(fol) utils.make_dir(fol) for ind, frame in enumerate(dup_frames): utils.make_link( frame, op.join(fol, 'dup_{}.{}'.format(ind, utils.file_type(frame)))) return fol
def morph_fmri(args): morph_from, morph_to = 'mg78', 'colin27' nii_template = 'non-interference-v-interference_{hemi}.mgz' from src.utils import freesurfer_utils as fu utils.make_dir(op.join(MMVT_DIR, morph_to, 'fmri')) for hemi in utils.HEMIS: fu.surf2surf( morph_from, morph_to, hemi, op.join(MMVT_DIR, morph_from, 'fmri', nii_template.format(hemi=hemi)), op.join(MMVT_DIR, morph_to, 'fmri', nii_template.format(hemi=hemi)))
def morph_fmri(morph_from, morph_to, nii_template): utils.make_dir(op.join(MMVT_DIR, morph_to, 'fmri')) for hemi in utils.HEMIS: fu.surf2surf( morph_from, morph_to, hemi, op.join(MMVT_DIR, morph_from, 'fmri', nii_template.format(hemi=hemi)), op.join(MMVT_DIR, morph_to, 'fmri', nii_template.format(hemi=hemi)))
def get_empty_fnames(subject, tasks, args, overwrite=False): utils.make_dir(op.join(MEG_DIR, subject)) utils.make_link(op.join(args.remote_subject_dir.format(subject=subject), 'bem'), op.join(MEG_DIR, subject, 'bem'), overwrite=overwrite) for task in tasks: utils.make_dir(op.join(MEG_DIR, task, subject)) utils.make_link(op.join(MEG_DIR, subject, 'bem'), op.join(MEG_DIR, task, subject, 'bem'), overwrite=overwrite) utils.make_link(op.join(MEG_DIR, subject, 'bem'), op.join(SUBJECTS_DIR, subject, 'bem'), overwrite=overwrite) remote_meg_fol = op.join(args.remote_meg_dir, subject) csv_fname = op.join(remote_meg_fol, 'cfg.txt') empty_fnames, cors, days = '', '', '' if not op.isfile(csv_fname): print('No cfg file!') return '', '', '' days, empty_fnames, cors = {}, {}, {} for line in utils.csv_file_reader(csv_fname, ' '): for task in tasks: if line[4].lower() == task.lower(): days[task] = line[2] # print(days) for line in utils.csv_file_reader(csv_fname, ' '): if line[4] == 'empty': for task in tasks: empty_fnames[task] = op.join( MEG_DIR, task, subject, '{}_empty_raw.fif'.format(subject)) if op.isfile(empty_fnames[task]): continue task_day = days[task] if line[2] == task_day: empty_fname = op.join(remote_meg_fol, line[0].zfill(3), line[-1]) if not op.isfile(empty_fname): raise Exception('empty file does not exist! {}'.format( empty_fname[task])) utils.make_link(empty_fname, empty_fnames[task]) cor_dir = op.join(args.remote_subject_dir.format(subject=subject), 'mri', 'T1-neuromag', 'sets') for task in tasks: if op.isfile( op.join(cor_dir, 'COR-{}-{}.fif'.format(subject, task.lower()))): cors[task] = op.join( cor_dir, 'COR-{}-{}.fif'.format('{subject}', task.lower())) elif op.isfile( op.join(cor_dir, 'COR-{}-day{}.fif'.format(subject, days[task]))): cors[task] = op.join( cor_dir, 'COR-{}-day{}.fif'.format('{subject}', days[task])) return empty_fnames, cors, days
def main(subject, args): utils.make_dir(op.join(BLENDER_ROOT_DIR, subject, 'electrodes')) stim_data = None if 'all' in args.function or 'load_stim_file' in args.function: stim_data = load_stim_file(subject, args) if 'all' in args.function or 'create_stim_electrodes_positions' in args.function: labels = stim_data['labels'] if stim_data else None create_stim_electrodes_positions(subject, args, labels) if 'all' in args.function or 'set_labels_colors' in args.function: set_labels_colors(subject, args, stim_data)
def prepare_darpa_csv(subject, bipolar, atlas, good_channels=None, groups_ordering=None, error_radius=3, elec_length=4, p_threshold=0.05): elecs_names, elecs_coords = elec_pre.read_electrodes_file(subject, bipolar) elecs_coords_mni = fu.transform_subject_to_mni_coordinates( subject, elecs_coords, SUBJECTS_DIR) elec_pre.save_electrodes_coords(elecs_names, elecs_coords_mni, good_channels) elecs_coords_mni_dic = { elec_name: elec_coord for (elec_name, elec_coord) in zip(elecs_names, elecs_coords_mni) } elecs_probs, _ = utils.get_electrodes_labeling(subject, BLENDER_ROOT_DIR, atlas, bipolar, error_radius, elec_length) assert (len(elecs_names) == len(elecs_coords_mni) == len(elecs_probs)) most_probable_rois = elec_pre.get_most_probable_rois( elecs_probs, p_threshold, good_channels) rois_colors = elec_pre.get_rois_colors(most_probable_rois) elec_pre.save_rois_colors_legend(subject, rois_colors, bipolar) utils.make_dir(op.join(BLENDER_ROOT_DIR, 'colin27', 'coloring')) results = defaultdict(list) for elec_name, elec_probs in zip(elecs_names, elecs_probs): assert (elec_name == elec_probs['name']) if not good_channels is None and elec_name not in good_channels: continue group = get_elec_group(elec_name, bipolar) roi = elec_pre.get_most_probable_roi( [*elec_probs['cortical_probs'], *elec_probs['subcortical_probs']], [*elec_probs['cortical_rois'], *elec_probs['subcortical_rois']], p_threshold) color = rois_colors[utils.get_hemi_indifferent_roi(roi)] results[group].append(dict(name=elec_name, roi=roi, color=color)) coloring_fname = 'electrodes{}_coloring.csv'.format( '_bipolar' if bipolar else '') with open(op.join(OUTPUT_DIR, '{}_electrodes_info.csv'.format(subject)), 'w') as csv_file, \ open(op.join(BLENDER_ROOT_DIR, 'colin27', 'coloring', coloring_fname), 'w') as colors_csv_file: csv_writer = csv.writer(csv_file, delimiter=',') colors_csv_writer = csv.writer(colors_csv_file, delimiter=',') elec_ind = 0 for group in groups_ordering: group_res = sorted(results[group], key=lambda x: natural_keys(x['name'])) for res in group_res: csv_writer.writerow([ elec_ind, res['name'], *elecs_coords_mni_dic[res['name']], res['roi'], *res['color'] ]) colors_csv_writer.writerow([res['name'], *res['color']]) elec_ind += 1
def save_fmri_colors(subject, hemi, contrast_name, fmri_file, surf_name='pial', threshold=2, output_fol=''): if not op.isfile(fmri_file.format(hemi)): print('No such file {}!'.format(fmri_file.format(hemi))) return fmri = nib.load(fmri_file.format(hemi)) x = fmri.get_data().ravel() if output_fol == '': output_fol = op.join(BLENDER_ROOT_DIR, subject, 'fmri') utils.make_dir(output_fol) output_name = op.join(output_fol, 'fmri_{}_{}'.format(contrast_name, hemi)) _save_fmri_colors(subject, hemi, x, threshold, output_name, surf_name=surf_name)
def plot_all_files_graph_max(subject, baseline_fnames, event_fname, func_name, bands_names, modality, input_template, sz_name='', sfreq=None, do_plot=False, overwrite=False): if sz_name == '': sz_name = utils.namebase(event_fname) output_fol = utils.make_dir( op.join(MMVT_DIR, subject, 'connectivity', '{}_{}'.format(modality, func_name), 'runs', sz_name)) if modality == 'ieeg': # clip = np.load(event_fname) t_start, t_end = -5, 5 else: clip = mne.read_evokeds(event_fname)[0] sfreq = clip.info['sfreq'] t_start, t_end = clip.times[0], clip.times[-1] windows_length = 500 half_window = (1 / sfreq) * (windows_length / 2) # In seconds scores = {} for band_name in bands_names: band_fol = utils.make_dir( op.join(MMVT_DIR, subject, 'connectivity', '{}_{}'.format(modality, func_name), band_name)) con_name = '{}_{}_mi'.format(modality, band_name) figure_name = '{}_{}_{}.jpg'.format(con_name, func_name, sz_name) output_fname = op.join(output_fol, figure_name) # if op.isfile(output_fname) and not overwrite: # print('{} already exist'.format(figure_name)) # continue all_files_found = True for fname in baseline_fnames + [event_fname]: if input_template != '': file_name = utils.namebase(fname) input_fname = input_template.format(file_name=file_name, band_name=band_name) if not op.isfile(input_fname): print('{} does not exist!!!'.format(input_fname)) all_files_found = False break if not all_files_found: continue scores[band_name] = calc_score(event_fname, baseline_fnames, input_template, band_name, t_start, t_end, half_window, output_fname, band_fol, figure_name, do_plot) return scores
def transform_mni_to_subject(subject, subjects_dir, volue_fol, volume_fname='sig.mgz', subject_contrast_file_name='sig_subject.mgz', print_only=False): mni305_sig_file = os.path.join(volue_fol, volume_fname) subject_sig_file = os.path.join(volue_fol, subject_contrast_file_name) rs = utils.partial_run_script(locals(), print_only=print_only) rs(mni305_to_subject_reg) rs(mni305_to_subject) subject_fol = op.join(subjects_dir, subject, 'mmvt') utils.make_dir(subject_fol) shutil.move(op.join(utils.get_parent_fol(), 'mn305_to_{}.dat'.format(subject)), op.join(subject_fol, 'mn305_to_{}.dat'.format(subject)))
def save_electrodes_coords(elecs_names, elecs_coords_mni, good_channels=None, bad_channels=None): good_elecs_names, good_elecs_coords_mni = [], [] for elec_name, elec_coord_min in zip(elecs_names, elecs_coords_mni): if (not good_channels or elec_name in good_channels) and (not bad_channels or elec_name not in bad_channels): good_elecs_names.append(elec_name) good_elecs_coords_mni.append(elec_coord_min) good_elecs_coords_mni = np.array(good_elecs_coords_mni) electrodes_mni_fname = save_electrodes_file(subject, args.bipolar, good_elecs_names, good_elecs_coords_mni, '_mni') output_file_name = op.split(electrodes_mni_fname)[1] utils.make_dir(op.join(MMVT_DIR, 'colin27', 'electrodes')) blender_file = op.join(MMVT_DIR, 'colin27', 'electrodes', output_file_name.replace('_mni', '')) shutil.copyfile(electrodes_mni_fname, blender_file)
def post_meg_preproc(args): inv_method, em = 'MNE', 'mean_flip' atlas = 'darpa_atlas' bands = dict(theta=[4, 8], alpha=[8, 15], beta=[15, 30], gamma=[30, 55], high_gamma=[65, 200]) times = (-2, 4) subjects = args.subject res_fol = utils.make_dir( op.join(utils.get_parent_fol(MMVT_DIR), 'msit-ecr')) for subject in subjects: args.subject = subject for task in args.tasks: task = task.lower() if not utils.both_hemi_files_exist( op.join( MMVT_DIR, subject, 'meg', 'labels_data_{}_{}_{}_{}_lh.npz'.format( task, atlas, inv_method, em, '{hemi}'))): print('label data can\'t be found for {} {}'.format( subject, task)) continue utils.make_dir(op.join(res_fol, subject)) meg.calc_labels_func(subject, task, atlas, inv_method, em, tmin=0, tmax=0.5, times=times, norm_data=False) meg.calc_labels_mean_power_bands(subject, task, atlas, inv_method, em, tmin=times[0], tmax=times[1], overwrite=True) for fname in [ f for f in glob.glob( op.join(MMVT_DIR, subject, 'labels', 'labels_data', '*')) if op.isfile(f) ]: shutil.copyfile( fname, op.join(res_fol, subject, utils.namebase_with_ext(fname)))
def get_digitization_points(subject, raw_fname): raw = mne.io.read_raw_fif(raw_fname) info = raw.info pos = np.array([p['r'] for p in info['dig']]) kind = np.array([p['kind'] for p in info['dig']]) ident = np.array([p['ident'] for p in info['dig']]) coord_frame = np.array([p['coord_frame'] for p in info['dig']]) utils.make_dir(op.join(MMVT_DIR, subject, 'meg')) np.savez(op.join(MMVT_DIR, subject, 'meg', 'digitization_points.npz'), pos=pos, kind=kind, ident=ident, coord_frame=coord_frame)
def run_on_subjects(args, main_func, subjects_itr=None, subject_func=None): if subjects_itr is None: subjects_itr = args.subject subjects_flags, subjects_errors = {}, {} args.n_jobs = utils.get_n_jobs(args.n_jobs) args.sftp_password = utils.get_sftp_password( args.subject, SUBJECTS_DIR, args.necessary_files, args.sftp_username, args.overwrite_fs_files) \ if args.sftp else '' if '*' in args.subject: args.subject = [utils.namebase(fol) for fol in glob.glob(op.join(SUBJECTS_DIR, args.subject))] os.environ['SUBJECTS_DIR'] = SUBJECTS_DIR for tup in subjects_itr: subject = get_subject(tup, subject_func) utils.make_dir(op.join(MMVT_DIR, subject, 'mmvt')) remote_subject_dir = utils.build_remote_subject_dir(args.remote_subject_dir, subject) print('****************************************************************') print('subject: {}, atlas: {}'.format(subject, args.atlas)) print('remote dir: {}'.format(remote_subject_dir)) print('****************************************************************') os.environ['SUBJECT'] = subject flags = dict() if args.necessary_files == '': args.necessary_files = dict() try: if utils.should_run(args, 'prepare_local_subjects_folder'): # *) Prepare the local subject's folder flags['prepare_local_subjects_folder'] = prepare_local_subjects_folder( subject, remote_subject_dir, args) if not flags['prepare_local_subjects_folder'] and not args.ignore_missing: ans = input('Do you which to continue (y/n)? ') if not au.is_true(ans): continue flags = main_func(tup, remote_subject_dir, args, flags) subjects_flags[subject] = flags except: subjects_errors[subject] = traceback.format_exc() print('Error in subject {}'.format(subject)) print(traceback.format_exc()) errors = defaultdict(list) for subject, flags in subjects_flags.items(): print('subject {}:'.format(subject)) for flag_type, val in flags.items(): print('{}: {}'.format(flag_type, val)) if not val: errors[subject].append(flag_type) if len(errors) > 0: print('Errors:') for subject, error in errors.items(): print('{}: {}'.format(subject, error))
def create_freeview_cmd(subject, args):#, atlas, bipolar, create_points_files=True, way_points=False): blender_freeview_fol = op.join(MMVT_DIR, subject, 'freeview') freeview_command = 'freeview -v T1.mgz:opacity=0.3 ' + \ '{0}+aseg.mgz:opacity=0.05:colormap=lut:lut={0}ColorLUT.txt '.format(args.atlas) if args.elecs_names: groups = set([utils.elec_group(name, args.bipolar) for name in args.elecs_names]) freeview_command += '-w ' if args.way_points else '-c ' postfix = '.label' if args.way_points else '.dat' for group in groups: freeview_command += group + postfix + ' ' utils.make_dir(blender_freeview_fol) with open(op.join(blender_freeview_fol, 'run_freeview.sh'), 'w') as sh_file: sh_file.write(freeview_command) print(freeview_command)
def read_psd_mat_file(subject, psd_fname, stim_channel): x = utils.read_mat_file_into_bag(psd_fname) labels = get_labels(x) data = x.psd if 'psd' in x else x.Psd if 'time' in x: time = x.time.reshape((x.time.shape[1])) else: time = None freqs = [(0, 4), (4, 8), (8, 15), (15, 30), (30, 55), (65, 100)] # plt.plot(time, psd[0, 0, :]) # plt.show() out_fol = op.join(BLENDER_ROOT_DIR, subject, 'electrodes') utils.make_dir(out_fol) np.savez(op.join(out_fol, 'psd_{}'.format(stim_channel)), labels=labels, psd=data, time=time, freqs=freqs)
def load_tracula_trk(subject): tracks_fols = utils.get_subfolders(op.join(DTI_DIR, subject, 'dpath')) output_fol = op.join(BLENDER_ROOT_DIR, subject, 'dti', 'tracula') utils.make_dir(output_fol) for track_fol in tracks_fols: track_fol_name = os.path.basename(track_fol) print('Reading {}'.format(track_fol_name)) track_gen, hdr = nib.trackvis.read(op.join(track_fol, 'path.pd.trk'), as_generator=True, points_space='rasmm') hdr = convert_header(hdr) vox2ras_trans = get_vox2ras_trans(subject) tracks = read_tracks(track_gen, hdr, vox2ras_trans) output_fname = op.join(output_fol, '{}.pkl'.format(track_fol_name)) utils.save(tracks, output_fname) print('Save in {}'.format(output_fname))
def main(subject, remote_subject_dir, args, flags): utils.make_dir(op.join(MMVT_DIR, subject, 'electrodes')) stim_data = None if utils.should_run(args, 'load_stim_file'): stim_data = load_stim_file(subject, args) flags['load_stim_file'] = not stim_data is None if utils.should_run(args, 'create_stim_electrodes_positions'): labels = stim_data['labels'] if stim_data else None flags['create_stim_electrodes_positions'] = create_stim_electrodes_positions(subject, args, labels) if utils.should_run(args, 'set_labels_colors'): flags['set_labels_colors'] = set_labels_colors(subject, args, stim_data) return flags
def create_links(links_fol_name='links', gui=True): #todo: Work only on linux (maybe mac also) if gui: from tkinter.filedialog import askdirectory links_fol = utils.get_links_dir(links_fol_name) utils.make_dir(links_fol) links_names = ['mmvt', 'subjects', 'blender', 'meg', 'fMRI', 'electrodes', 'freesurfer'] all_links_exist = np.all([op.islink(op.join(links_fol, link_name)) for link_name in links_names]) if all_links_exist: return True if os.environ.get('FREESURFER_HOME', '') == '': print('Please source FreeSurfer and rerun') return print('Where do you want to put the blend files? ') mmvt_fol = askdirectory() if gui else input() create_real_folder(mmvt_fol) print('Where do you want to store the FreeSurfer recon-all files neccessary for MMVT?\n' + 'It prefered to create a local folder, because MMVT is going to save files to this directory: ') subjects_fol = askdirectory() if gui else input() create_real_folder(subjects_fol) freesurfer_fol = os.environ['FREESURFER_HOME'] create_real_folder(freesurfer_fol) print('Where did you install Blender? ') blender_fol = askdirectory() if gui else input() create_real_folder(blender_fol) print('Where do you want to put the MEG files (Enter/Cancel if you are not going to use MEG data): ') meg_fol = askdirectory() if gui else input() create_real_folder(meg_fol) print('Where do you want to put the fMRI files (Enter/Cancel if you are not going to use fMRI data): ') fmri_fol = askdirectory() if gui else input() create_real_folder(fmri_fol) print('Where do you want to put the electrodes files (Enter/Cancel if you are not going to use electrodes data): ') electrodes_fol = askdirectory() if gui else input() create_real_folder(electrodes_fol) for real_fol, link_name in zip([mmvt_fol, subjects_fol, blender_fol, meg_fol, fmri_fol, electrodes_fol, freesurfer_fol], links_names): try: if not op.islink(op.join(links_fol, link_name)): os.symlink(real_fol, op.join(links_fol, link_name)) # Add the default task in meg folder if link_name == 'meg' and real_fol != utils.get_resources_fol(): utils.make_dir(op.join(real_fol, 'default')) except: print('Error with folder {} and link {}'.format(real_fol, link_name)) print(traceback.format_exc()) return np.all([op.islink(op.join(links_fol, link_name)) for link_name in links_names])
def compare_coh_windows(subject, task, conditions, electrodes, freqs=((8, 12), (12, 25), (25,55), (55,110)), do_plot=False): electrodes_coh = np.load(op.join(ELECTRODES_DIR, subject, task, 'electrodes_coh_windows.npy')) meg_electrodes_coh = np.load(op.join(ELECTRODES_DIR, subject, task, 'meg_electrodes_ts_coh_windows.npy')) figs_fol = op.join(MMVT_DIR, subject, 'figs', 'coh_windows') utils.make_dir(figs_fol) results = [] for cond_id, cond in enumerate(conditions): now = time.time() for freq_id, freq in enumerate(freqs): freq = '{}-{}'.format(*freq) indices = list(utils.lower_rec_indices(electrodes_coh.shape[0])) for ind, (i, j) in enumerate(indices): utils.time_to_go(now, ind, len(indices)) meg = meg_electrodes_coh[i, j, :, freq_id, cond_id][:22] elc = electrodes_coh[i, j, :, freq_id, cond_id][:22] elc_diff = np.max(elc) - np.min(elc) meg *= elc_diff / (np.max(meg) - np.min(meg)) meg += np.mean(elc) - np.mean(meg) if sum(meg) > len(meg) * 0.99: continue data_diff = meg - elc # data_diff = data_diff / max(data_diff) rms = np.sqrt(np.mean(np.power(data_diff, 2))) corr = np.corrcoef(meg, elc)[0, 1] results.append(dict(elc1=electrodes[i], elc2=electrodes[j], cond=cond, freq=freq, rms=rms, corr=corr)) if False: #do_plot and electrodes[i]=='RPT7' and electrodes[j] == 'RPT5': #corr > 10 and rms < 3: plt.figure() plt.plot(meg, label='pred') plt.plot(elc, label='elec') plt.legend() # plt.title('{}-{} {} {}'.format(electrodes[i], electrodes[j], freq, cond)) # (rms:{:.2f}) plt.savefig(op.join(figs_fol, '{:.2f}-{}-{}-{}-{}.jpg'.format(rms, electrodes[i], electrodes[j], freq, cond))) plt.close() results_fname = op.join(figs_fol, 'results{}.csv'.format('_bipolar' if bipolar else '')) rmss, corrs = [], [] with open(results_fname, 'w') as output_file: for res in results: output_file.write('{},{},{},{},{},{}\n'.format( res['elc1'], res['elc2'], res['cond'], res['freq'], res['rms'], res['corr'])) rmss.append(res['rms']) corrs.append(res['corr']) rmss = np.array(rmss) corrs = np.array(corrs) pass
def read_new_psd_mat_file(subject, psd_fname, stim_channel, labels): x = utils.read_mat_file_into_bag(psd_fname) psd = None freqs = [(0, 4), (4, 8), (8, 15), (15, 30), (30, 55), (65, 100)] F = len(freqs) for ind, field in enumerate(['P{}'.format(ind) for ind in range(1, F + 1)]): if psd is None: T, L = x[field].shape psd = np.zeros((F, T, L)) psd[ind, :, :] = x[field] del x time = range(psd.shape[1]) # plt.plot(time, psd[0, 0, :]) # plt.show() out_fol = op.join(BLENDER_ROOT_DIR, subject, 'electrodes') utils.make_dir(out_fol) np.savez(op.join(out_fol, 'psd_{}'.format(stim_channel)), labels=labels, psd=psd, time=time, freqs=freqs)
def main(subject, remote_subject_dir, args, flags): utils.make_dir(op.join(MMVT_DIR, subject, 'freeview')) args.elecs_pos, args.elecs_names = read_electrodes_pos(subject, args) if utils.should_run(args, 'copy_T1'): flags['copy_T1'] = copy_T1(subject) if utils.should_run(args, 'create_freeview_cmd'): flags['create_freeview_cmd'] = create_freeview_cmd(subject, args) if utils.should_run(args, 'create_electrodes_points'): flags['create_electrodes_points'] = create_electrodes_points(subject, args) if utils.should_run(args, 'create_aparc_aseg_file'): flags['create_aparc_aseg_file'] = create_aparc_aseg_file(subject, args) if utils.should_run(args, 'create_lut_file_for_atlas'): flags['create_lut_file_for_atlas'] = create_lut_file_for_atlas(subject, args.atlas) return flags
def run_on_subjects(subject, args): args.sftp_password = utils.get_sftp_password( args.subject, SUBJECTS_DIR, args.necessary_files, args.sftp_username, args.overwrite_fs_files) \ if args.sftp else '' utils.prepare_local_subjects_folder( args.necessary_files, subject, args.remote_subject_dir, SUBJECTS_DIR, args.sftp, args.sftp_username, args.sftp_domain, args.sftp_password, args.overwrite_fs_files, args.print_traceback) # Create the files for freeview bridge utils.make_dir(op.join(BLENDER_ROOT_DIR, subject, 'freeview')) args.elecs_pos, args.elecs_names = read_electrodes_pos(subject, args) if 'all' in args.function or 'copy_T1' in args.function: copy_T1(subject) if 'all' in args.function or 'create_freeview_cmd' in args.function: create_freeview_cmd(subject, args) if 'all' in args.function or 'create_electrodes_points' in args.function: create_electrodes_points(subject, args) if 'all' in args.function or 'create_aparc_aseg_file' in args.function: create_aparc_aseg_file(subject, args) if 'all' in args.function or 'create_lut_file_for_atlas' in args.function: create_lut_file_for_atlas(subject, args.atlas)
def save_hemis_curv(subject, atlas): out_curv_file = op.join(MMVT_DIR, subject, 'surf', '{hemi}.curv.npy') # out_border_file = op.join(MMVT_DIR, subject, 'surf', '{hemi}.curv.borders.npy') # if utils.both_hemi_files_exist(out_file): # return True for hemi in utils.HEMIS: # Load in curvature values from the ?h.curv file. if not op.isfile(out_curv_file.format(hemi=hemi)): curv_path = op.join(SUBJECTS_DIR, subject, 'surf', '{}.curv'.format(hemi)) curv = nib.freesurfer.read_morph_data(curv_path) bin_curv = np.array(curv > 0, np.int) np.save(out_curv_file.format(hemi=hemi), bin_curv) else: bin_curv = np.load(out_curv_file.format(hemi=hemi)) labels_fol = op.join(MMVT_DIR, subject, 'surf', '{}_{}_curves'.format(atlas, hemi)) utils.make_dir(labels_fol) labels = lu.read_labels(subject, SUBJECTS_DIR, atlas, hemi=hemi) for label in labels: labels_curv = bin_curv[label.vertices] np.save(op.join(labels_fol, '{}_curv.npy'.format(label.name)), labels_curv) return utils.both_hemi_files_exist(out_curv_file) # and utils.both_hemi_files_exist(out_border_file)
def project_on_surface(subject, volume_file, colors_output_fname, surf_output_fname, target_subject=None, threshold=2, overwrite_surf_data=False, overwrite_colors_file=True): if target_subject is None: target_subject = subject utils.make_dir(op.join(BLENDER_ROOT_DIR, subject, 'fmri')) for hemi in ['rh', 'lh']: print('project {} to {}'.format(volume_file, hemi)) if not op.isfile(surf_output_fname.format(hemi=hemi)) or overwrite_surf_data: surf_data = fu.project_volume_data(volume_file, hemi, subject_id=subject, surf="pial", smooth_fwhm=3, target_subject=target_subject, output_fname=surf_output_fname.format(hemi=hemi)) nans = np.sum(np.isnan(surf_data)) if nans > 0: print('there are {} nans in {} surf data!'.format(nans, hemi)) # np.save(surf_output_fname.format(hemi=hemi), surf_data) else: surf_data = np.load(surf_output_fname.format(hemi=hemi)) if not op.isfile(colors_output_fname.format(hemi=hemi)) or overwrite_colors_file: print('Calulating the activaton colors for {}'.format(surf_output_fname)) _save_fmri_colors(target_subject, hemi, surf_data, threshold, colors_output_fname.format(hemi=hemi)) shutil.copyfile(colors_output_fname.format(hemi=hemi), op.join(BLENDER_ROOT_DIR, subject, 'fmri', op.basename(colors_output_fname.format(hemi=hemi))))
def calc_subject_evoked_response(subject, root_fol, task, atlas, events_id, fname_format, fwd_fol, neccesary_files, remote_subjects_dir, fsaverage, raw_cleaning_method, inverse_method, indices=None, overwrite_epochs=False, overwrite_evoked=False, positive=True, moving_average_win_size=100): meg.init_globals(subject, fname_format=fname_format, raw_cleaning_method=raw_cleaning_method, subjects_meg_dir=SUBJECTS_MEG_DIR, task=task, subjects_mri_dir=SUBJECTS_DIR, BLENDER_ROOT_DIR=BLENDER_ROOT_DIR, files_includes_cond=True, fwd_no_cond=True) epochs_fname = '{}_arc_rer_{}-epo.fif'.format(subject, raw_cleaning_method) events_fname = '{}_arc_rer_{}-epo.csv'.format(subject, raw_cleaning_method) if indices is None: indices = find_events_indices(op.join(root_fol, events_fname)) if not indices is None: utils.make_dir(op.join(SUBJECTS_MEG_DIR, task, subject)) utils.make_dir(op.join(SUBJECTS_DIR, subject, 'mmvt')) utils.make_dir(op.join(BLENDER_ROOT_DIR, subject)) # utils.prepare_local_subjects_folder( # neccesary_files, subject, remote_subjects_dir, SUBJECTS_DIR, print_traceback=False) # anatomy_preproc.freesurfer_surface_to_blender_surface(subject, overwrite=False) # anatomy_preproc.create_annotation_file_from_fsaverage(subject, atlas, fsaverage, False, False, False, True) # calc_evoked(indices, op.join(root_fol, epochs_fname), overwrite_epochs, overwrite_evoked) fwd_fname = '{}_arc_rer_tsss-fwd.fif'.format(subject) if not op.isfile(op.join(SUBJECTS_MEG_DIR, task, subject, fwd_fname)): shutil.copy(op.join(fwd_fol, fwd_fname), op.join(SUBJECTS_MEG_DIR, task, subject, fwd_fname)) # meg_preproc.calc_inverse_operator(events_id, calc_for_cortical_fwd=True, calc_for_sub_cortical_fwd=False) # stcs = meg_preproc.calc_stc_per_condition(events_id, inverse_method) stcs = None for hemi in utils.HEMIS: meg.calc_labels_avg_per_condition( atlas, hemi, 'pial', events_id, labels_from_annot=False, labels_fol='', stcs=stcs, inverse_method=inverse_method, positive=positive, moving_average_win_size=moving_average_win_size, do_plot=True)