def copypasta(subs=all_sub_args, feat=None): os.system('rm copypasta.txt') for sub in subs: subj = bids_meta(sub) os.system( 'echo "%s" >> copypasta.txt' % (os.path.join(subj.feat_dir, subj.fsub + '_' + feat + '.feat')))
def wrangle_first_level_rsa(): mem_weights = { 1: 'mem_baseline_CSp.nii.gz', 2: 'mem_baseline_CSm.nii.gz', 3: 'mem_acquisition_CSp.nii.gz', 4: 'mem_acquisition_CSm.nii.gz', 5: 'mem_extinction_CSp.nii.gz', 6: 'mem_extinction_CSm.nii.gz', 7: 'mem_foil_CSp.nii.gz', 8: 'mem_foil_CSm.nii.gz' } for sub in all_sub_args: subj = bids_meta(sub) print(sub) # for phase in ['baseline','acquisition','extinction']: # csp_cope = os.path.join(subj.model_dir,phase,'rsa_model.feat','stats','cope1.nii.gz') # csm_cope = os.path.join(subj.model_dir,phase,'rsa_model.feat','stats','cope2.nii.gz') # os.system('cp %s %s'%(csp_cope,os.path.join(subj.weights,'%s_CSp.nii.gz'%(phase)))) # os.system('cp %s %s'%(csm_cope,os.path.join(subj.weights,'%s_CSm.nii.gz'%(phase)))) for cope in mem_weights: in_file = os.path.join(subj.model_dir, 'feats', 'memory_rsa_lvl2.gfeat', f'cope{cope}.feat', 'stats', 'cope1.nii.gz') out_file = os.path.join(subj.weights, mem_weights[cope]) os.system(f'cp {in_file} {out_file}')
def motion_outlier_count(): import os import re import matplotlib.pyplot as plt import seaborn as sns '''to build the output you need a list of all subjects "all_sub_args" and a list of all tasks "tasks" ''' df = pd.DataFrame(index=pd.MultiIndex.from_product([all_sub_args, tasks])) df['censored'] = 0. for sub in all_sub_args: ''' to make this work for CCX, comment out the next line and change the subj.prep_dir so it is the path to each subject's fMRIprep directory ''' subj = bids_meta(sub) for folder in os.walk(subj.prep_dir): for file in folder[2]: if 'confounds' in file and '.tsv' in file: C = pd.read_csv(os.path.join(subj.prep_dir, folder[0], file), sep='\t') mo = [c for c in C.columns if 'motion_outlier' in c] task = re.search('task-(.*)_desc', file)[1] df.loc[(sub, task), 'censored'] = len(mo) / C.shape[0] sns.boxplot(data=df, y='censored') sns.swarmplot(data=df, y='censored', color='black')
def __init__(self, sub, mask=None, phases='encode_mem'): self.subj = bids_meta(sub) if '_mask' in mask: self.mask_name = mask[:-5] else: self.mask_name = mask #self.mask = self.load_mask(mask) #self.data = self.load_clean_data(phases=phases) #self.extract_timecourse() # self.interact() self._autofill_fsf()
def prep_lvl2(): import os from fg_config import bids_meta, all_sub_args for sub in all_sub_args: print(sub) subj = bids_meta(sub) # for phase in ['memory_run-01','memory_run-02','memory_run-03']: for phase in ['baseline', 'acquisition', 'extinction']: regfolder = os.path.join( subj.model_dir, phase, '%s_%s_reg_gPPI.feat' % (subj.fsub, phase), 'reg') for roi in [ 'rh_hpc', 'hc_tail', 'hc_body', 'hc_head', 'amyg_bla', 'amyg_cem' ]: dest = os.path.join(subj.model_dir, phase, roi, 'source.feat', 'reg') os.system('cp -r %s %s' % (regfolder, dest))
def wrap_lss_jobs(): for sub in all_sub_args: subj = bids_meta(sub) os.system( 'launch -N 1 -n 24 -J lss_%s -s jobs/lss_betas/%s_job.txt -m [email protected] -p normal -r 6:00:00 -A LewPea_MRI_Analysis' % (sub, subj.fsub)) for i, job in enumerate(os.listdir('jobs/lss_betas')): if '.txt' in job: os.system( 'launch -N 1 -n 12 -J lss_%s -s jobs/lss_betas/%s -m [email protected] -p normal -r 09:00:00 -A LewPea_MRI_Analysis' % (i, job)) for job in ['acquisition', 'extinction', 'baseline']: os.system( 'launch -N 1 -n 12 -J %s -s jobs/%s_rsa_job.txt -m [email protected] -p normal -r 12:00:00' ) for job in [1, 2, 3]: # os.system('launch -N 1 -n 24 -J %s -s jobs/memory_run-0%s_rsa_job.txt -m [email protected] -p normal -r 2:00:00 -A LewPea_MRI_Analysis'%(job,job)) os.system( f'launch -N 1 -n 24 -J m{job} -s jobs/gPPI/memory-{job}_gPPI_job.txt -m [email protected] -p normal -r 10:00:00 -A LewPea_MRI_Analysis' ) #submit a bunch of jobs at once for job in range(28): os.system( 'launch -N 1 -n 12 -J flss_%s -s jobs/final_lss_job_%s.txt -m [email protected] -p normal -r 12:00:00 -A LewPea_MRI_Analysis' % (job, job)) #splitting up a bunch of jobs into different job scripts for i in range(bad.shape[0]): os.system('echo "%s" >> jobs/final_lss_job_%s.txt' % (bad[0][i], int(np.floor(i / 12)))) for run in [1, 2, 3]: for roi in ['dACC', 'mOFC', 'rh_hpc', 'lh_hpc', 'lh_amyg', 'rh_amyg']: os.system( 'launch -N 1 -n 24 -J %s_%s -s jobs/%s_memory_run-0%s_gPPI_job.txt -m [email protected] -p normal -r 3:00:00 -A LewPea_MRI_Analysis' % (run, roi, roi, run)) for run in [1, 2]: os.system( f'launch -N 2 -n 24 -J localizer-{run} -s jobs/localizer-{run}_job.txt -m [email protected] -p normal -r 1:00:00 -A LewPea_MRI_Analysis' ) os.system( 'launch -N 2 -n 24 -J localizer_lvl2 -s jobs/localizer_lvl2_job.txt -m [email protected] -p normal -r 1:00:00 -A LewPea_MRI_Analysis' ) # for roi in ['rh_hpc','hc_tail','hc_body','hc_head','amyg_bla','amyg_cem']: for roi in ['rh_hpc']: # os.system('launch -N 1 -n 24 -J %s_lvl2 -s jobs/%s_mem_encode_lvl2_gPPI_job.txt -m [email protected] -p normal -r 00:45:00 -A LewPea_MRI_Analysis'%(roi,roi)) os.system( 'launch -N 1 -n 14 -J %s_lvl3 -s jobs/%s_group_cope_job.txt -m [email protected] -p normal -r 2:00:00 -A LewPea_MRI_Analysis -d 2897228' % (roi, roi)) os.system( 'launch -N 1 -n 18 -J day1_lvl3 -s jobs/group_day1_gPPI_job.txt -m [email protected] -p normal -r 4:00:00 -A LewPea_MRI_Analysis' % (roi, roi)) for phase in ['baseline', 'acquisition', 'extinction']: os.system( 'launch -N 1 -n 6 -J %s_lvl3 -s jobs/%s_group_gPPI_job.txt -m [email protected] -p normal -r 3:00:00 -A LewPea_MRI_Analysis' % (phase, phase)) #launch -N 2 -n 34 -J sm -s jobs/source_memory_job.txt -m [email protected] -p normal -r 3:00:00 -A LewPea_MRI_Analysis #launch -N 2 -n 34 -J sm_reg -s jobs/source_memory_reg_job.txt -m [email protected] -p normal -r 0:30:00 -A LewPea_MRI_Analysis -d 3051447 #launch -N 3 -n 30 -J sm_lvl3 -s jobs/source_memory_lvl3_job.txt -m [email protected] -p normal -r 3:00:00 -A LewPea_MRI_Analysis -d 3051447 #launch -N 1 -n 64 -p largemem512GB -J 3dlme -s jobs/3dlme_job.txt -m [email protected] -r 48:00:00 -A LewPea_MRI_Analysis #launch -N 48 -n 48 -J smooth -s jobs/afni_fwhm_job.txt -m [email protected] -p normal -r 2:00:00 -A LewPea_MRI_Analysis #launch -N 1 -n 64 -p largemem512GB -J 3dmvm -s jobs/3dMVM_job.txt -m [email protected] -r 48:00:00 -A LewPea_MRI_Analysis os.system( f'launch -N 2 -n 48 -J univariate -s jobs/memory_univariate_lvl2_job.txt -m [email protected] -p normal -r 2:00:00 -A LewPea_MRI_Analysis' ) # for phase in ['baseline','acquisition','extinction','memory_run-01','memory_run-02','memory_run-03']: for phase in ['memory_run-01', 'memory_run-02', 'memory_run-03']: # for roi in ['rh_hpc']: # for roi in ['rACC','sgACC','rh_hpc','lh_hpc','lh_amyg','rh_amyg']: for roi in ['hc_tail', 'hc_body', 'hc_head', 'amyg_bla', 'amyg_cem']: # for roi in ['rh_hc_tail','lh_hc_tail','rh_hc_body','lh_hc_body','rh_hc_head','lh_hc_head','rh_amyg_bla','lh_amyg_bla','rh_amyg_cem','lh_amyg_cem']: # os.system('launch -N 1 -n 24 -J %s_gPPI -s jobs/%s_gPPI_job.txt -m [email protected] -p normal -r 2:00:00 -A LewPea_MRI_Analysis'%(phase,phase)) os.system( f'launch -N 1 -n 24 -J {roi}_{phase} -s jobs/gPPI/{roi}_{phase}_gPPI_job.txt -m [email protected] -p normal -r 01:30:00 -A LewPea_MRI_Analysis' )
def autofill_fsf(group=False, template='', ses=None, name=None, roi=None, run=None, subjects=all_sub_args): if 'template' in template: outstr = re.search('template_(.*)', template)[1] elif roi is not None: outstr = roi + '_' + name else: outstr = name if run is not None: outstr = name if group: if roi is not None: replacements = {'ROI': roi} if ses == 'mem': for cope in range(1, 15): replacements['COPEID'] = 'cope%s' % (cope) outfeat = os.path.join(SCRATCH, 'group_gPPI', roi, '%s_%s.fsf' % (outstr, cope)) with open( os.path.join(gPPI_codebase, 'feats', '%s.fsf' % (template))) as infile: with open(outfeat, 'w') as outfile: for line in infile: for src, target in replacements.items(): line = line.replace(src, target) outfile.write(line) #also go ahead and make the job script here os.system('echo "feat %s" >> jobs/%s_job.txt' % (outfeat, outstr)) if ses == 1: for phase in ['baseline', 'acquisition', 'extinction']: outfeat = os.path.join(SCRATCH, 'group_gPPI', roi, '%s_%s_%s.fsf' % (roi, phase, name)) replacements['RUNID'] = phase with open( os.path.join(gPPI_codebase, 'feats', '%s.fsf' % (template))) as infile: with open(outfeat, 'w') as outfile: for line in infile: for src, target in replacements.items(): line = line.replace(src, target) outfile.write(line) os.system('echo "feat %s" >> jobs/%s_job.txt' % (outfeat, name)) else: for sub in subjects: subj = bids_meta(sub) replacements = {'SUBID': subj.fsub} if roi is not None: replacements['ROI'] = roi if run is not None: replacements['RUNID'] = run #need to handle the special cases where the TR is longer if ses == 1 and sub in [105, 106]: replacements['TR_length'] = '2.23' else: replacements['TR_length'] = '2' outfeat = os.path.join(subj.feat_dir, '%s_%s.fsf' % (subj.fsub, outstr)) with open( os.path.join(gPPI_codebase, 'feats', '%s.fsf' % (template))) as infile: with open(outfeat, 'w') as outfile: for line in infile: for src, target in replacements.items(): line = line.replace(src, target) outfile.write(line) #also go ahead and make the job script here os.system('echo "feat %s" >> jobs/%s_job.txt' % (outfeat, outstr))
def __init__(self, sub): self.subj = bids_meta(sub)