def subject_factory(): for subject_id in subject_ids: if subject_id in exclusions: continue # construct subject data structure subject_data = nipype_preproc_spm_utils.SubjectData() subject_data.session_id = session_ids subject_data.subject_id = subject_id subject_data.func = [] # glob for bold data has_bad_sessions = False for session_id in subject_data.session_id: bold_dir = os.path.join( data_dir, "%s/BOLD/%s" % (subject_id, session_id)) # extract .nii.gz to .nii unzip_nii_gz(bold_dir) # glob bold data for this session func = glob.glob(os.path.join(bold_dir, "bold.nii")) # check that this session is OK (has bold data, etc.) if not func: has_bad_sessions = True break subject_data.func.append(func[0]) # exclude subject if necessary if has_bad_sessions: continue # glob for anatomical data anat_dir = os.path.join( data_dir, "%s/anatomy" % subject_id) # extract .nii.gz to .ni unzip_nii_gz(anat_dir) # glob anatomical data proper subject_data.anat = glob.glob( os.path.join( data_dir, "%s/anatomy/highres001_brain.nii" % subject_id))[0] # set subject output dir (all calculations for # this subject go here) subject_data.output_dir = os.path.join( output_dir, subject_id) yield subject_data
def subject_factory(): """producer for subject (input) data""" for subject_id, sd in haxby_data.iteritems(): subject_data = nipype_preproc_spm_utils.SubjectData() subject_data.session_id = "haxby2001" subject_data.subject_id = subject_id unzip_nii_gz(sd.subject_dir) subject_data.anat = sd.anat.replace(".gz", "") subject_data.func = sd.bold.replace(".gz", "") subject_data.output_dir = os.path.join( OUTPUT_DIR, subject_data.subject_id) yield subject_data
def subject_factory(session_output_dir, session): session_func = [x for x in nyu_data.func if "session%i" % session in x] session_anat = [ x for x in nyu_data.anat_skull if "session%i" % session in x] for subject_id in set([os.path.basename( os.path.dirname (os.path.dirname(x))) for x in session_func]): # check that subject is not condemned if subject_id in BAD_SUBJECTS: continue # instantiate subject_data object subject_data = nipype_preproc_spm_utils.SubjectData() subject_data.subject_id = subject_id subject_data.session_id = session # set func subject_data.func = [ x.replace(".gz", "") for x in session_func if subject_id in x] assert len(subject_data.func) == 1 subject_data.func = subject_data.func[0] unzip_nii_gz(os.path.dirname(subject_data.func)) # set anat subject_data.anat = [ x.replace(".gz", "") for x in session_anat if subject_id in x] assert len(subject_data.anat) == 1 subject_data.anat = subject_data.anat[0] unzip_nii_gz(os.path.dirname(subject_data.anat)) # set subject output directory subject_data.output_dir = os.path.join( session_output_dir, subject_data.subject_id) yield subject_data
DATASET_DESCRIPTION = "FSL FEADS example data (single-subject)" """sanitize cmd line""" if len(sys.argv) < 3: print ("\r\nUsage: python %s <path to FSL feeds data directory>" " <output_dir>\r\n") % sys.argv[0] print ("Example:\r\npython %s /usr/share/fsl-feeds/data/" " fsl_feeds_fmri_runs") % sys.argv[0] sys.exit(1) """set data dir""" data_dir = os.path.abspath(sys.argv[1]) """set output dir""" output_dir = os.path.abspath(sys.argv[2]) unzip_nii_gz(data_dir) """experimental setup""" stats_start_time = time.ctime() n_scans = 180 TR = 3. EV1_epoch_duration = 2 * 30 EV2_epoch_duration = 2 * 45 TA = TR * n_scans EV1_epochs = TA / EV1_epoch_duration EV1_epochs = int(TA / EV1_epoch_duration) EV2_epochs = int(TA / EV2_epoch_duration) EV1_onset = np.linspace(0, EV1_epoch_duration * (EV1_epochs - 1), EV1_epochs) EV2_onset = np.linspace(0, EV2_epoch_duration * (EV2_epochs - 1), EV2_epochs) EV1_on = 30 EV2_on = 45