def anatomy_preproc(args): args = anat.read_cmd_args(dict( subject=args.subject, remote_subject_dir='/autofs/space/lilli_001/users/DARPA-Recons/{subject}', high_level_atlas_name='darpa_atlas' )) anat.call_main(args)
def prepare_subject_folder_from_franklin(subject, args): args = anat.read_cmd_args(['-s', subject, '-a', args.atlas]) args.remote_subject_dir = op.join( '/autofs/space/franklin_003/users/npeled/subjects_old/{}'.format( subject)) args.function = 'prepare_subject_folder' pu.run_on_subjects(args, anat.main)
def init_anatomy(args): args = anat.read_cmd_args( dict(subject=args.subject, remote_subject_dir=args.remote_subject_dir, exclude='create_new_subject_blend_file', ignore_missing=True)) anat.call_main(args)
def darpa_prep_huygens(subject, args): args = anat.read_cmd_args(['-s', subject, '-a', args.atlas]) subject = subject[:2].upper() + subject[2:] args.remote_subject_dir = op.join( '/space/huygens/1/users/kara/{}_SurferOutput/'.format(subject)) args.function = 'prepare_subject_folder' pu.run_on_subjects(args, anat.main)
def create_annot_from_mad(args): remote_subject_dir_template = '/mnt/cashlab/Original Data/MG/{subject}/{subject}_Notes_and_Images/{subject}_SurferOutput' for subject in args.subject: remote_subject_dir = remote_subject_dir_template.format(subject=subject) if utils.both_hemi_files_exist(op.join(remote_subject_dir, 'label', '{hemi}.aparc.DKTatlas.annot')): print('{} has already both annot files!'.format(subject)) continue args = anat.read_cmd_args(dict( subject=subject.lower(), atlas=args.atlas, remote_subject_dir=remote_subject_dir_template, function='create_annotation', ignore_missing=True, )) pu.run_on_subjects(args, anat.main) if not utils.both_hemi_files_exist(op.join(SUBJECTS_DIR, subject.lower(), 'label', '{hemi}.aparc.DKTatlas.annot')): print('Couldn\'t create annot files for {}!'.format(subject)) continue local_annot_fol = utils.make_dir(op.join(SUBJECTS_DIR, 'annot_files', subject.lower())) for hemi in utils.HEMIS: local_annot_fname = op.join(SUBJECTS_DIR, subject.lower(), 'label', '{}.aparc.DKTatlas.annot'.format(hemi)) remote_annot_fname = op.join(remote_subject_dir, 'label', '{}.aparc.DKTatlas.annot'.format(hemi)) local_temp_annot_fname = op.join(local_annot_fol, '{}.aparc.DKTatlas.annot'.format(hemi)) if not op.isfile(remote_annot_fname): if op.isfile(local_annot_fname): utils.copy_file(local_annot_fname, local_temp_annot_fname) else: print('Can\'t copy {} for {}, it doesn\'t exist!'.format(local_annot_fname, subject))
def darpa(args): for subject in args.subject: darpa_subject = subject[:2].upper() + subject[2:] args = anat.read_cmd_args(utils.Bag( subject=subject, remote_subject_dir=op.join('/space/huygens/1/users/kara/{}_SurferOutput/'.format(darpa_subject)) )) pu.run_on_subjects(args, anat.main)
def anatomy_preproc(args, subject=''): args = anat.read_cmd_args( dict(subject=args.subject if subject == '' else subject, remote_subject_dir= '/autofs/space/lilli_001/users/DARPA-Recons/{subject}', high_level_atlas_name='darpa_atlas', function='create_annotation,create_high_level_atlas', ignore_missing=True)) anat.call_main(args)
def calc_anatomy(subject, atlas, remote_subject_dir, n_jobs): from src.preproc import anatomy as anat args = anat.read_cmd_args( dict(subject=subject, atlas=atlas, function='all,check_bem', remote_subject_dir=remote_subject_dir, exclude='create_new_subject_blend_file', n_jobs=n_jobs)) anat.call_main(args)
def darpa_prep_lili(subject, args): args = anat.read_cmd_args([ '-s', subject, '-a', args.atlas, '--sftp_username', args.sftp_username, '--sftp_domain', args.sftp_domain ]) args.remote_subject_dir = op.join( '/autofs/space/lilli_001/users/DARPA-Recons', subject) args.sftp = True args.function = 'prepare_subject_folder' pu.run_on_subjects(args, anat.main)
def get_subject_files_using_sftp_from_ohad(subject, args): args = anat.read_cmd_args(['-s', subject,'-a', args.atlas]) args.sftp = True args.sftp_username = '******' args.sftp_domain = '127.0.0.1' args.sftp_port = 4444 args.sftp_subject_dir = '/media/ohadfel/New_Volume/subs/{}'.format(subject) args.remote_subject_dir = '/media/ohadfel/New_Volume/subs/{}'.format(subject) args.function = 'prepare_subject_folder' pu.run_on_subjects(args, anat.main)
def get_subject_files_using_sftp_from_ohad(subject, args): args = anat.read_cmd_args(['-s', subject,'-a', args.atlas]) args.sftp = True args.sftp_username = '******' args.sftp_domain = '127.0.0.1' args.sftp_port = 4444 args.sftp_subject_dir = '/media/ohadfel/New_Volume/subs/{}'.format(subject) args.remote_subject_dir = '/media/ohadfel/New_Volume/subs/{}'.format(subject) args.function = 'prepare_local_subjects_folder' anat.run_on_subjects(args)
def get_subject_files_from_server(args): args = anat.read_cmd_args(dict( subject=args.subject, atlas=args.atlas, function='prepare_subject_folder', sftp=True, sftp_username='******', sftp_domain='door.nmr.mgh.harvard.edu', remote_subject_dir='/space/thibault/1/users/npeled/subjects/{subject}')) pu.run_on_subjects(args, anat.main)
def get_subject_files_from_mad(args): for subject in args.subject: args = anat.read_cmd_args( dict( subject=subject, atlas=args.atlas, remote_subject_dir= '/mnt/cashlab/Original Data/MG/{subject}/{subject}_Notes_and_Images/{subject}_SurferOutput', function='prepare_subject_folder')) pu.run_on_subjects(args, anat.main)
def get_subject_files_using_sftp(args): for subject in args.subject: args = anat.read_cmd_args( dict(subject=subject, atlas=args.atlas, sftp_username=args.sftp_username, sftp_domain=args.sftp_domain, sftp=True, remote_subject_dir=args.remote_subject_dir, function='prepare_subject_folder')) pu.run_on_subjects(args, anat.main)
def darpa_sftp(args): for subject in args.subject: darpa_subject = subject[:2].upper() + subject[2:] args = anat.read_cmd_args(utils.Bag( subject=subject, remote_subject_dir=op.join('/space/huygens/1/users/kara/{}_SurferOutput/'.format(darpa_subject)), sftp=True, sftp_username='******', sftp_domain='door.nmr.mgh.harvard.edu', )) pu.run_on_subjects(args, anat.main)
def recon_all(args): # python -m src.preoroc.anatomy -f recon-all --ignore_missing 1 --n_jobs 1 # --nifti_fname "/autofs/space/thibault_001/users/npeled/T1/{subject}/mprage.nii" -s "wake5,wake6,wake7,wake8" args = anat.read_cmd_args(dict( subject=args.subject, function='recon-all', nifti_fname='/autofs/space/thibault_001/users/npeled/T1/{subject}/mprage.nii', ignore_missing=True, n_jobs=1, )) pu.run_on_subjects(args, anat.main)
def anat_preproc_clin(args): # python -m src.preproc.examples.anatomy -s nmr01426 -f anat_preproc_clin for subject in args.subject: args = anat.read_cmd_args( dict( subject=subject, function='all,create_outer_skin_surface,check_bem', remote_subject_dir=op.join(args.fs_root, subject), n_jobs=args.n_jobs, )) pu.run_on_subjects(args, anat.main)
def get_subject_files_from_mad(args=None, subjects=None, necessary_files=None): subjects = args.subject if args is not None else subjects for subject in subjects: args = anat.read_cmd_args(dict( subject=subject, atlas=args.atlas, remote_subject_dir='/mnt/cashlab/Original Data/MG/{subject}/{subject}_Notes_and_Images/{subject}_SurferOutput', function='prepare_subject_folder' )) if necessary_files is not None: args.necessary_files = necessary_files pu.run_on_subjects(args, anat.main)
def get_subject_files_from_mad(org_args=None, subjects=None, necessary_files=None): subjects = org_args.subject if org_args is not None else subjects for subject in subjects: root_fol = '/mnt/cashlab/Original Data/{}'.format(subject[:2].upper()) args = anat.read_cmd_args(dict( subject=subject, atlas=org_args.atlas, remote_subject_dir=op.join(root_fol, '{subject}/{subject}_Notes_and_Images/{subject}_SurferOutput'), function='prepare_subject_folder' )) if necessary_files is not None: args.necessary_files = necessary_files pu.run_on_subjects(args, anat.main)
def darpa_prep_angelique(args): import glob for subject in args.subject: darpa_subject = subject[:2].upper() + subject[2:] root = op.join('/homes/5/npeled/space1/Angelique/recon-alls', darpa_subject) recon_all_dirs = glob.glob(op.join(root, '**', '*SurferOutput*'), recursive=True) if len(recon_all_dirs) == 0: print("Can't find the recon-all folder for {}!".format(subject)) continue args = anat.read_cmd_args(utils.Bag( subject=subject, function='prepare_subject_folder', remote_subject_dir=recon_all_dirs[0] )) pu.run_on_subjects(args, anat.main)
def get_subject_files_from_mad(subjects, atlas): for subject in subjects: root_fol = '/mnt/cashlab/Original Data/{}'.format(subject[:2].upper()) args = anat.read_cmd_args( dict( subject=subject, atlas=atlas, remote_subject_dir=op.join( root_fol, '{subject}/{subject}_Notes_and_Images/{subject}_SurferOutput' ), function='prepare_subject_folder', ignore_missing=1, )) pu.run_on_subjects(args, anat.main)
def recon_all_clin(args): # python -m src.preproc.examples.anatomy -s nmr01426 -f recon_all_clin --clin_fol clin_6966926 --dicoms_fol Prisma_fit-67026-20200618-141203-000586 import os for subject, clin_fol, dicoms_fol in zip(args.subject, args.clin_fol, args.dicoms_fol): clin_full_fol = utils.make_dir( op.join(args.clin_root, clin_fol, 'mne_dicom')) memprage_fols = glob.glob(op.join(clin_full_fol, '*MEMPRAGE*')) print('mne_organize_dicom output fol: {}'.format(clin_full_fol)) if len(memprage_fols) > 0: ret = au.is_true( input( 'It seems like you already have memprage folders, are you sure you want to rerun?' )) if not ret: continue utils.delete_folder_files(clin_full_fol) fs_dir = utils.make_dir(op.join(args.fs_root, subject)) print('FreeSurfer output fol: {}'.format(fs_dir)) dicoms_full_path = op.join(args.dicoms_root, dicoms_fol) if not op.isdir(dicoms_full_path): print('{} does not exist!'.format(dicoms_full_path)) continue rs = utils.partial_run_script(locals(), print_only=args.print_only) os.chdir(clin_full_fol) rs('mne_organize_dicom {dicoms_full_path}') anat.recon_all(subject, clin_full_fol, overwrite=True, subjects_dir=args.fs_root, print_only=False, n_jobs=args.n_jobs) args = anat.read_cmd_args( dict( subject=subject, function='all,create_skull_surfaces', remote_subject_dir=op.join(args.fs_root, subject), n_jobs=args.n_jobs, )) pu.run_on_subjects(args, anat.main)
def get_subject_files_from_server(subject, args): args = anat.read_cmd_args(['-s', subject]) args.remote_subject_dir = op.join('/autofs/cluster/neuromind/npeled/subjects', subject) anat.run_on_subjects(args)
def get_subject_files_using_sftp(subject, args): args = anat.read_cmd_args(['-s', subject, '--sftp_username', args.sftp_username, '--sftp_domain', args.sftp_domain]) args.sftp = True anat.run_on_subjects(args)
def add_parcellation(subject, args): args = anat.read_cmd_args(['-s', subject, '-a', args.atlas]) args.function = 'create_annotation_from_template,parcelate_cortex,calc_faces_verts_dic,' + \ 'save_labels_vertices,save_hemis_curv,calc_labels_center_of_mass,save_labels_coloring' anat.run_on_subjects(args)
def darpa(subject, args): args = anat.read_cmd_args(['-s', subject, '-a', args.atlas]) subject = subject[:2].upper() + subject[2:] args.remote_subject_dir = op.join('/space/huygens/1/users/kara/{}_SurferOutput/'.format(subject)) anat.run_on_subjects(args)
def prepare_subject_folder_from_huygens(subject, args): args = anat.read_cmd_args(['-s', subject]) subject = subject[:2].upper() + subject[2:] args.remote_subject_dir = op.join('/space/huygens/1/users/kara/{}_SurferOutput/'.format(subject)) args.function = 'prepare_local_subjects_folder' anat.run_on_subjects(args)
def prepare_subject_folder_from_franklin(subject, args): args = anat.read_cmd_args(['-s', subject]) args.remote_subject_dir = op.join('/autofs/space/franklin_003/users/npeled/subjects_old/{}'.format(subject)) args.function = 'prepare_local_subjects_folder' anat.run_on_subjects(args)
def add_parcellation(subject, args): args = anat.read_cmd_args(['-s', subject, '-a', args.atlas]) args.function = 'create_annotation_from_template,parcelate_cortex,calc_faces_verts_dic,' + \ 'save_labels_vertices,save_hemis_curv,calc_labels_center_of_mass,save_labels_coloring' pu.run_on_subjects(args, anat.main)
def language(args): # -f language -s nmr01361 --clinical_dir clin_4090354 # -s nmr01353 -f clean_4d_data --fsd sycabs --remote_fmri_dir "/space/megraid/clinical/MEG-MRI/seder/freesurfer" --nconditions 4 if args.clinical_dir == '': print('You should set the clinical_dir first. Example: clin_4090354') return clinical_root_dir = op.join(args.remote_fmri_dir, args.clinical_dir) if not op.isdir(clinical_root_dir): print('{} does not exist!'.format(clinical_root_dir)) task = 'sycabs' fwhm = 6 subject = args.subject[0] remote_mri_dir = args.remote_clinical_subjects_dir subject_mri_dir = op.join(remote_mri_dir, subject) mri_subject_task_dir = utils.make_dir(op.join(subject_mri_dir, task)) clinical_dirs = glob.glob(op.join(clinical_root_dir, '*')) clinical_dirs = [ d for d in clinical_dirs if utils.namebase(d) != 'mne_dcm' ] remote_fmri_dir = utils.select_one_file(clinical_dirs) fmri_fols = sorted(glob.glob(op.join(remote_fmri_dir, '*_SyCAbs'))) par_fol = utils.make_dir(op.join(remote_mri_dir, subject, 'par')) par_files = glob.glob(op.join(par_fol, '*.par')) sessions = sorted( [utils.find_num_in_str(utils.namebase(d))[0] for d in fmri_fols]) # Warning: You first need to put the original ones in the following folder: if len(par_files) == 0: print('\n *** Please put the original par files in {} and rerun ***'. format(op.join(remote_mri_dir, subject, 'par'))) return par_files.sort(key=lambda x: int(utils.namebase(x).split('_')[-1])) ret = input(''' Patient: {} MRI folder: {} fMRI root folder: {} fMRI sessions: {} Session and pars: {} Do you want to continue (y/n)? '''.format( subject, subject_mri_dir, remote_fmri_dir, [utils.namebase(d) for d in fmri_fols], list(zip([utils.namebase(f) for f in par_files], sessions)))) if not au.is_true(ret): return # You need first to run src.preproc.anatomy if not op.isfile(anat.get_blend_fname(subject, args.atlas)): args = anat.read_cmd_args( dict( subject=subject, remote_subject_dir=subject_mri_dir, ignore_missing=True, )) pu.run_on_subjects(args, anat.main) # convert the fMRI dicom files to nii for fmri_fol in fmri_fols: ses_num = utils.find_num_in_str(utils.namebase(fmri_fol))[0] ses_files = glob.glob(op.join(fmri_fol, '**', '*.*'), recursive=True) output_fname = op.join( utils.make_dir(op.join(mri_subject_task_dir, ses_num)), 'f.nii.gz') if not op.isfile(output_fname): fu.mri_convert(ses_files[0], output_fname) # Convert and arrange the par file from src.misc.fmri_scripts import convert_par for par_file, session in zip(par_files, sessions): fs_par_fname = op.join(mri_subject_task_dir, session, '{}.par'.format(task)) # if not op.isfile(fs_par_fname): warnings = convert_par.sycabs(par_file, fs_par_fname) if warnings != '': print( '\n *** Please fix the problems with the par convertion ({}) and rerun ***\n' .format(par_file)) return for hemi in utils.HEMIS: utils.delete_folder_files( op.join(remote_mri_dir, '{}_sm{}_{}'.format(task, fwhm, hemi))) # Run the FreeSurfer analysis args = fmri.read_cmd_args( dict(subject=subject, atlas=args.atlas, function='clean_4d_data', fsd=task, fwhm=fwhm, remote_fmri_dir=remote_mri_dir, nconditions=4, ignore_missing=True, print_only=False, overwrite_4d_preproc=False)) pu.run_on_subjects(args, fmri.main) # Load the fMRI results args = fmri.read_cmd_args( dict( subject=subject, atlas=args.atlas, function='load_surf_files', fmri_file_template=op.join(MMVT_DIR, subject, 'fmri', 'words_v_symbols_{hemi}.mgz'), )) pu.run_on_subjects(args, fmri.main)