def calc_diff(subject, fmri_file_template='*linda_{hemi}*,*hesheng_{hemi}'): # Calc diff args = fmri.read_cmd_args( dict(subject=subject, function='calc_files_diff', fmri_file_template=fmri_file_template)) pu.run_on_subjects(args, fmri.main)
def calc_linda_surf(subject, atlas): # Check for Linda's output fname if not utils.both_hemi_files_exist(op.join(fmri.MMVT_DIR, subject, 'fmri', 'fmri_linda_{}.npy'.format('{hemi}'))) \ and not op.isfile(op.join(fmri.MMVT_DIR, subject, 'fmri', 'linda_minmax.pkl')): # Find Linda's files linda_volume_fnames = glob.glob(op.join( linda_fol.format(subject=subject), linda_vol_template.format(subject=subject))) linda_volume_folder = utils.get_parent_fol(linda_volume_fnames[0]) # project linda files on the surface args = fmri.read_cmd_args(dict( subject=subject, function='project_volume_to_surface', remote_fmri_dir=linda_volume_folder, fmri_file_template=linda_vol_template.format(subject=subject), overwrite_surf_data=True)) pu.run_on_subjects(args, fmri.main) # rename Linda's files linda_fnames = glob.glob(op.join(fmri.MMVT_DIR, subject, 'fmri', 'fmri_{}'.format( linda_template_npy.format(subject=subject)))) for fname in linda_fnames: hemi = lu.get_label_hemi(utils.namebase(fname)) target_file = op.join(fmri.MMVT_DIR, subject, 'fmri', 'fmri_linda_{}.npy'.format(hemi)) if not op.isfile(target_file): os.rename(fname, target_file) # rename minmax file linda_minmax_name = '{}.pkl'.format(utils.namebase(glob.glob(op.join( fmri.MMVT_DIR, subject, 'fmri', '{}_minmax.pkl'.format( utils.namebase(linda_vol_template.format(subject=subject)))))[0])) os.rename(op.join(fmri.MMVT_DIR, subject, 'fmri', linda_minmax_name), op.join(fmri.MMVT_DIR, subject, 'fmri', 'linda_minmax.pkl')) # delete mgz files mgz_files = glob.glob(op.join(fmri.MMVT_DIR, subject, 'fmri', 'fmri_{}_?h.mgz'.format( utils.namebase(linda_vol_template.format(subject=subject))))) for mgz_file in mgz_files: os.remove(mgz_file)
def calc_freesurfer_surf(subject, atlas): # Clean args = fmri.read_cmd_args( dict(subject=subject, atlas=atlas, function='clean_4d_data', fmri_file_template='rest.nii*', fsd='rest_linda', overwrite_4d_preproc=False)) pu.run_on_subjects(args, fmri.main) # save the surf files args = fmri.read_cmd_args( dict(subject=subject, atlas=atlas, function='load_surf_files', overwrite_surf_data=True, fmri_file_template=fs_surf_template.format(subject=subject, hemi='{hemi}'))) pu.run_on_subjects(args, fmri.main) # Renaming the files root_fol = op.join(fmri.MMVT_DIR, subject, 'fmri') for hemi in utils.HEMIS: os.rename( op.join(root_fol, 'fmri_rest_linda.sm6.{}.{}.npy'.format(subject, hemi)), op.join(root_fol, 'fmri_freesurfer_{}.npy'.format(hemi))) os.rename( op.join(root_fol, 'rest_linda.sm6.{}_minmax.pkl'.format(subject)), op.join(root_fol, 'freesurfer_minmax.pkl'))
def create_annot_from_mad(args): remote_subject_dir_template = '/mnt/cashlab/Original Data/MG/{subject}/{subject}_Notes_and_Images/{subject}_SurferOutput' for subject in args.subject: remote_subject_dir = remote_subject_dir_template.format(subject=subject) if utils.both_hemi_files_exist(op.join(remote_subject_dir, 'label', '{hemi}.aparc.DKTatlas.annot')): print('{} has already both annot files!'.format(subject)) continue args = anat.read_cmd_args(dict( subject=subject.lower(), atlas=args.atlas, remote_subject_dir=remote_subject_dir_template, function='create_annotation', ignore_missing=True, )) pu.run_on_subjects(args, anat.main) if not utils.both_hemi_files_exist(op.join(SUBJECTS_DIR, subject.lower(), 'label', '{hemi}.aparc.DKTatlas.annot')): print('Couldn\'t create annot files for {}!'.format(subject)) continue local_annot_fol = utils.make_dir(op.join(SUBJECTS_DIR, 'annot_files', subject.lower())) for hemi in utils.HEMIS: local_annot_fname = op.join(SUBJECTS_DIR, subject.lower(), 'label', '{}.aparc.DKTatlas.annot'.format(hemi)) remote_annot_fname = op.join(remote_subject_dir, 'label', '{}.aparc.DKTatlas.annot'.format(hemi)) local_temp_annot_fname = op.join(local_annot_fol, '{}.aparc.DKTatlas.annot'.format(hemi)) if not op.isfile(remote_annot_fname): if op.isfile(local_annot_fname): utils.copy_file(local_annot_fname, local_temp_annot_fname) else: print('Can\'t copy {} for {}, it doesn\'t exist!'.format(local_annot_fname, subject))
def compare_connectivity(subject, atlas, n_jobs=6): for name, fol, template in zip(['hesheng', 'linda', 'freesurfer'], [hesheng_surf_fol, linda_surf_fol, fs_surf_fol], [hesheng_template, linda_hemi_template, fs_surf_template]): output_fname_template = op.join( fmri.MMVT_DIR, subject, 'fmri', '{}_labels_data_laus125_mean_{}.npz'.format(name, '{hemi}')) if not utils.both_hemi_files_exist(output_fname_template): args = fmri.read_cmd_args(dict( subject=subject, atlas=atlas, function='analyze_4d_data', fmri_file_template=template, remote_fmri_dir=fol, labels_extract_mode='mean', overwrite_labels_data=False)) pu.run_on_subjects(args, fmri.main) for hemi in utils.HEMIS: os.rename(op.join(fmri.MMVT_DIR, subject, 'fmri', 'labels_data_laus125_mean_{}.npz'.format(hemi)), output_fname_template.format(hemi=hemi)) args = con.read_cmd_args(dict( subject=subject, atlas='laus125', function='calc_lables_connectivity', connectivity_modality='fmri', connectivity_method='corr,cv', labels_extract_mode='mean', windows_length=34, windows_shift=4, save_mmvt_connectivity=False, calc_subs_connectivity=False, labels_name=name, recalc_connectivity=True, n_jobs=n_jobs)) pu.run_on_subjects(args, con.main) conn_fol = op.join(con.MMVT_DIR, subject, 'connectivity') coloring_fol = op.join(con.MMVT_DIR, subject, 'coloring') os.rename(op.join(conn_fol, 'fmri_corr.npy'), op.join(conn_fol, 'fmri_corr_{}.npy'.format(name))) os.rename(op.join(conn_fol, 'fmri_corr_cv_mean.npz'), op.join(conn_fol, 'mri_corr_cv_mean_{}.npz'.format(name))) os.rename(op.join(conn_fol, 'fmri_corr_cv_mean_mean.npz'), op.join(conn_fol, 'fmri_corr_cv_mean_mean_{}.npz'.format(name))) os.rename(op.join(coloring_fol, 'fmri_corr_cv_mean.csv'), op.join(coloring_fol, 'fmri_corr_cv_mean_{}.csv'.format(name)))
def prepare_subject_folder_from_franklin(subject, args): args = anat.read_cmd_args(['-s', subject, '-a', args.atlas]) args.remote_subject_dir = op.join( '/autofs/space/franklin_003/users/npeled/subjects_old/{}'.format( subject)) args.function = 'prepare_subject_folder' pu.run_on_subjects(args, anat.main)
def load_edf_data_seizure_2(args): args = elecs.read_cmd_args( utils.Bag( subject=args.subject, atlas='laus125', function='create_raw_data_for_blender', task='seizure', bipolar=args.bipolar, raw_fname=op.join( ELECTRODES_DIR, args.subject[0], 'DMphaseIISz_TG.edf' ), # '/cluster/neuromind/npeled/taha/dm04002705/edf/DMphaseIISz_TG.edf', start_time='00:00:00', seizure_onset='00:01:20', seizure_end='00:02:00', baseline_onset='00:00:00', baseline_end='00:01:00', lower_freq_filter=2, upper_freq_filter=70, power_line_notch_widths=5, ref_elec='PST1', normalize_data=False, calc_zscore=False, factor=1000, channels_names_mismatches='LFO=LIF')) pu.run_on_subjects(args, elecs.main)
def darpa_prep_huygens(subject, args): args = anat.read_cmd_args(['-s', subject, '-a', args.atlas]) subject = subject[:2].upper() + subject[2:] args.remote_subject_dir = op.join( '/space/huygens/1/users/kara/{}_SurferOutput/'.format(subject)) args.function = 'prepare_subject_folder' pu.run_on_subjects(args, anat.main)
def load_edf_data_seizure(args): args = elecs.read_cmd_args( utils.Bag( subject=args.subject, atlas='laus125', function='create_raw_data_from_edf', task='seizure', bipolar=args.bipolar, raw_fname=op.join(ELECTRODES_DIR, args.subject[0], '{}.edf'.format(args.edf)), start_time='00:00:00', seizure_onset='00:01:20', seizure_end='00:02:00', baseline_onset='00:00:00', baseline_end='00:01:00', lower_freq_filter=1, upper_freq_filter=150, power_line_notch_widths=5, ref_elec='PST1', normalize_data=False, calc_zscore=False, factor=1000, # channels_names_mismatches='LFO=LIF' )) pu.run_on_subjects(args, elecs.main)
def calc_hesheng_surf(subject, atlas): subject_fol = op.join(fmri.MMVT_DIR, subject, 'fmri') if not (utils.both_hemi_files_exist( op.join(subject_fol, 'fmri_hesheng_{hemi}.npy')) and op.isfile(op.join(subject_fol, 'hesheng_minmax.pkl'))): # Copy and rename Hesheng's files hesheng_fnames = glob.glob( op.join(hesheng_surf_fol.format(subject=subject), hesheng_template.format(subject=subject))) for fname in hesheng_fnames: hemi = lu.get_label_hemi_invariant_name(utils.namebase(fname)) target_file = op.join(fmri.FMRI_DIR, subject, 'hesheng_{}.nii.gz'.format(hemi)) mgz_target_file = utils.change_fname_extension(target_file, 'mgz') if not op.isfile(mgz_target_file): shutil.copy(fname, target_file) fu.nii_gz_to_mgz(target_file) os.remove(target_file) # Load Hesheng's files args = fmri.read_cmd_args( dict(subject=subject, atlas=atlas, function='load_surf_files', overwrite_surf_data=True, fmri_file_template='hesheng_{hemi}.mgz')) pu.run_on_subjects(args, fmri.main)
def project_and_calc_clusters(args): if not op.isdir(args.root_fol): print('You should first set args.root_fol!') return False img_files = [ f for f in glob.glob(op.join(args.root_fol, '*.img')) if op.isfile(f) ] for img_fname in img_files: mgz_fname = fu.mri_convert_to(img_fname, 'mgz', overwrite=False) if ' ' in utils.namebase(mgz_fname): mgz_new_fname = op.join( utils.get_parent_fol(mgz_fname), utils.namebase_with_ext(mgz_fname).replace(' ', '_').replace( ',', '').lower()) os.rename(mgz_fname, mgz_new_fname) nii_files = [ f for f in glob.glob(op.join(args.root_fol, '*')) if op.isfile(f) and utils.file_type(f) in ('nii', 'nii.gz', 'mgz') ] for fname in nii_files: fmri_args = fmri.read_cmd_args( dict(subject=args.subject, function='project_volume_to_surface,find_clusters', fmri_file_template=fname, threshold=args.cluster_threshold)) pu.run_on_subjects(fmri_args, fmri.main)
def darpa(args): for subject in args.subject: darpa_subject = subject[:2].upper() + subject[2:] args = anat.read_cmd_args(utils.Bag( subject=subject, remote_subject_dir=op.join('/space/huygens/1/users/kara/{}_SurferOutput/'.format(darpa_subject)) )) pu.run_on_subjects(args, anat.main)
def save_dynamic_activity_map(args): args = fmri.read_cmd_args( dict(subject=args.subject, atlas=args.atlas, function='save_dynamic_activity_map', fmri_file_template='fmcpr.up.sm6.{subject}.{hemi}.*', overwrite_activity_data=True)) pu.run_on_subjects(args, fmri.main)
def calc_electrodes_con(args): # -s mg78 -a laus250 -f save_electrodes_coh --threshold_percentile 95 -c interference,non-interference args = con.read_cmd_args( utils.Bag(subject=args.subject, atlas='laus250', function='save_electrodes_coh', threshold_percentile=95, conditions='interference,non-interference')) pu.run_on_subjects(args, con.main)
def get_ras_from_mad(args): args = elecs.read_cmd_args( utils.Bag( subject=args.subject, function='get_ras_file', remote_ras_fol= '/mnt/cashlab/Original Data/MG/{subject}/{subject}_Notes_and_Images/{subject}_SurferOutput' )) pu.run_on_subjects(args, elecs.main)
def get_subject_files_from_server(args): args = anat.read_cmd_args(dict( subject=args.subject, atlas=args.atlas, function='prepare_subject_folder', sftp=True, sftp_username='******', sftp_domain='door.nmr.mgh.harvard.edu', remote_subject_dir='/space/thibault/1/users/npeled/subjects/{subject}')) pu.run_on_subjects(args, anat.main)
def get_subject_files_using_sftp_from_ohad(subject, args): args = anat.read_cmd_args(['-s', subject,'-a', args.atlas]) args.sftp = True args.sftp_username = '******' args.sftp_domain = '127.0.0.1' args.sftp_port = 4444 args.sftp_subject_dir = '/media/ohadfel/New_Volume/subs/{}'.format(subject) args.remote_subject_dir = '/media/ohadfel/New_Volume/subs/{}'.format(subject) args.function = 'prepare_subject_folder' pu.run_on_subjects(args, anat.main)
def darpa_prep_lili(subject, args): args = anat.read_cmd_args([ '-s', subject, '-a', args.atlas, '--sftp_username', args.sftp_username, '--sftp_domain', args.sftp_domain ]) args.remote_subject_dir = op.join( '/autofs/space/lilli_001/users/DARPA-Recons', subject) args.sftp = True args.function = 'prepare_subject_folder' pu.run_on_subjects(args, anat.main)
def get_subject_files_from_mad(args): for subject in args.subject: args = anat.read_cmd_args( dict( subject=subject, atlas=args.atlas, remote_subject_dir= '/mnt/cashlab/Original Data/MG/{subject}/{subject}_Notes_and_Images/{subject}_SurferOutput', function='prepare_subject_folder')) pu.run_on_subjects(args, anat.main)
def fmri_msit_pipeline(args): '-s pp009 -a laus250 -f fmri_pipeline -t MSIT --contrast_template "*Interference*"' for subject in args.subject: args = fmri.read_cmd_args( dict(subject=subject, atlas=args.atlas, function='fmri_pipeline', task='MSIT', contrast_template='*Interference*')) pu.run_on_subjects(args, fmri.main)
def calc_subcorticals_activity(args): args = fmri.read_cmd_args( dict( subject=args.subject, function='calc_subcorticals_activity', # fmri_file_template='rest*', fmri_file_template='fmcpr.sm6.mni305.2mm.*', labels_extract_mode='mean', #,pca,pca_2,pca_4,pca_8', overwrite_subs_data=True)) pu.run_on_subjects(args, fmri.main)
def anat_preproc_clin(args): # python -m src.preproc.examples.anatomy -s nmr01426 -f anat_preproc_clin for subject in args.subject: args = anat.read_cmd_args( dict( subject=subject, function='all,create_outer_skin_surface,check_bem', remote_subject_dir=op.join(args.fs_root, subject), n_jobs=args.n_jobs, )) pu.run_on_subjects(args, anat.main)
def recon_all(args): # python -m src.preoroc.anatomy -f recon-all --ignore_missing 1 --n_jobs 1 # --nifti_fname "/autofs/space/thibault_001/users/npeled/T1/{subject}/mprage.nii" -s "wake5,wake6,wake7,wake8" args = anat.read_cmd_args(dict( subject=args.subject, function='recon-all', nifti_fname='/autofs/space/thibault_001/users/npeled/T1/{subject}/mprage.nii', ignore_missing=True, n_jobs=1, )) pu.run_on_subjects(args, anat.main)
def darpa_sftp(args): for subject in args.subject: darpa_subject = subject[:2].upper() + subject[2:] args = anat.read_cmd_args(utils.Bag( subject=subject, remote_subject_dir=op.join('/space/huygens/1/users/kara/{}_SurferOutput/'.format(darpa_subject)), sftp=True, sftp_username='******', sftp_domain='door.nmr.mgh.harvard.edu', )) pu.run_on_subjects(args, anat.main)
def get_subject_files_using_sftp(args): for subject in args.subject: args = anat.read_cmd_args( dict(subject=subject, atlas=args.atlas, sftp_username=args.sftp_username, sftp_domain=args.sftp_domain, sftp=True, remote_subject_dir=args.remote_subject_dir, function='prepare_subject_folder')) pu.run_on_subjects(args, anat.main)
def project_volume_to_surface(args): args = fmri.read_cmd_args(dict( subject=args.subject, function='project_volume_to_surface', fmri_file_template='f*.gz', sftp_username=args.sftp_username, sftp_domain=args.sftp_domain, sftp=True, remote_subject_dir=args.remote_subject_dir, )) pu.run_on_subjects(args, fmri.main)
def get_subject_files_from_mad(args=None, subjects=None, necessary_files=None): subjects = args.subject if args is not None else subjects for subject in subjects: args = anat.read_cmd_args(dict( subject=subject, atlas=args.atlas, remote_subject_dir='/mnt/cashlab/Original Data/MG/{subject}/{subject}_Notes_and_Images/{subject}_SurferOutput', function='prepare_subject_folder' )) if necessary_files is not None: args.necessary_files = necessary_files pu.run_on_subjects(args, anat.main)
def get_subject_files_from_mad(org_args=None, subjects=None, necessary_files=None): subjects = org_args.subject if org_args is not None else subjects for subject in subjects: root_fol = '/mnt/cashlab/Original Data/{}'.format(subject[:2].upper()) args = anat.read_cmd_args(dict( subject=subject, atlas=org_args.atlas, remote_subject_dir=op.join(root_fol, '{subject}/{subject}_Notes_and_Images/{subject}_SurferOutput'), function='prepare_subject_folder' )) if necessary_files is not None: args.necessary_files = necessary_files pu.run_on_subjects(args, anat.main)
def load_edf_data_rest(args): args = elecs.read_cmd_args( utils.Bag( subject=args.subject, function='create_raw_data_for_blender', task='rest', bipolar=False, remove_power_line_noise=True, raw_fname='MG102_d3_Fri.edf', # rest_onset_time='6:50:00', # end_time='7:05:00', normalize_data=False, preload=False)) pu.run_on_subjects(args, elecs.main)
def project_and_calc_clusters(args): if not op.isdir(args.root_fol): print('You should first set args.root_fol!') return False nii_files = [ f for f in glob.glob(op.join(args.root_fol, '*')) if op.isfile(f) and utils.file_type(f) in ('nii', 'nii.gz', 'mgz') ] for fname in nii_files: args = fmri.read_cmd_args( dict(subject=args.subject, function='project_volume_to_surface,find_clusters', fmri_file_template=fname)) pu.run_on_subjects(args, fmri.main)
if utils.should_run(args, 'create_aparc_aseg_file'): flags['create_aparc_aseg_file'] = create_aparc_aseg_file(subject, args) if utils.should_run(args, 'create_lut_file_for_atlas'): flags['create_lut_file_for_atlas'] = create_lut_file_for_atlas(subject, args.atlas) return flags def read_cmd_args(argv): import argparse from src.utils import args_utils as au parser = argparse.ArgumentParser(description='MMVT freeview preprocessing') parser.add_argument('-b', '--bipolar', help='bipolar', required=False, default=0, type=au.is_true) parser.add_argument('--overwrite_aseg_file', help='overwrite_aseg_file', required=False, default=0, type=au.is_true) parser.add_argument('--create_volume_file', help='create_volume_file', required=False, default=1, type=au.is_true) parser.add_argument('--electrodes_pos_fname', help='electrodes_pos_fname', required=False, default='') parser.add_argument('--way_points', help='way_points', required=False, default=0, type=au.is_true) pu.add_common_args(parser) args = utils.Bag(au.parse_parser(parser, argv)) args.necessary_files = {'mri': ['T1.mgz', 'orig.mgz']} print(args) return args if __name__ == '__main__': args = read_cmd_args(None) pu.check_freesurfer() pu.run_on_subjects(args, main) print('finish!')
flags['save_evoked_to_blender'] = save_evoked_to_blender(mri_subject, conditions, args, evoked) if not op.isfile(meg.COR): eeg_cor = op.join(meg.SUBJECT_MEG_FOLDER, '{}-cor-trans.fif'.format(subject)) if not op.isfile(eeg_cor): raise Exception("Can't find head-MRI transformation matrix. Should be in {} or in {}".format(meg.COR, eeg_cor)) meg.COR = eeg_cor flags = meg.calc_fwd_inv_wrapper(subject, mri_subject, conditions, args, flags) flags = meg.calc_stc_per_condition_wrapper(subject, conditions, inverse_method, args, flags) return flags def read_cmd_args(argv=None): args = meg.read_cmd_args(argv) args.pick_meg = False args.pick_eeg = True args.reject = False args.fwd_usingMEG = False args.fwd_usingEEG = True return args if __name__ == '__main__': from src.utils import preproc_utils as pu from itertools import product args = read_cmd_args() subjects_itr = product(zip(args.subject, args.mri_subject), args.inverse_method) subject_func = lambda x:x[0][1] pu.run_on_subjects(args, main, subjects_itr, subject_func) print('finish!')