def save_maps(model_dir, doc, resample=False, target_affine=None, target_shape=None): for dtype in ['c_maps', 't_maps']: if dtype in doc: maps_dir = make_dir(model_dir, dtype, strict=False) for key in doc[dtype]: fname = '%s.nii.gz' % safe_name(key.lower()) img = nb.load(doc[dtype][key]) if resample: img = resample_img(img, target_affine, target_shape) nb.save(img, os.path.join(maps_dir, fname)) if 'beta_maps' in doc: maps_dir = make_dir(model_dir, 'beta_maps') for path in doc['beta_maps']: fname = '%s.nii.gz' % safe_name( os.path.split(path)[1].lower().split('.')[0]) img = nb.load(path) if resample: img = resample_img(img, target_affine, target_shape, copy=False) nb.save(img, os.path.join(maps_dir, fname)) if 'mask' in doc: img = nb.load(doc['mask']) if resample: img = resample_img(img, target_affine, target_shape, interpolation='nearest', copy=False) nb.save(img, os.path.join(model_dir, 'mask.nii.gz'))
def save_maps(model_dir, doc, resample=False, target_affine=None, target_shape=None): for dtype in ['c_maps', 't_maps']: if dtype in doc: maps_dir = make_dir(model_dir, dtype, strict=False) for key in doc[dtype]: fname = '%s.nii.gz' % safe_name(key.lower()) img = nb.load(doc[dtype][key]) if resample: img = resample_img(img, target_affine, target_shape) nb.save(img, os.path.join(maps_dir, fname)) if 'beta_maps' in doc: maps_dir = make_dir(model_dir, 'beta_maps') for path in doc['beta_maps']: fname = '%s.nii.gz' % safe_name(os.path.split( path)[1].lower().split('.')[0]) img = nb.load(path) if resample: img = resample_img( img, target_affine, target_shape, copy=False) nb.save(img, os.path.join(maps_dir, fname)) if 'mask' in doc: img = nb.load(doc['mask']) if resample: img = resample_img(img, target_affine, target_shape, interpolation='nearest', copy=False) nb.save(img, os.path.join(model_dir, 'mask.nii.gz'))
def transform(self, catalog, subjects_id): catalog_ = copy.deepcopy(catalog) study_dir = make_dir(self.data_dir, self.study_id, strict=False) if isinstance(self.subject_key_, dict): save_table(self.subject_key_, os.path.join(study_dir, 'subject_key.txt')) save_table(self.task_key_, os.path.join(study_dir, 'task_key.txt'), merge=self.merge_tasks) save_table({'TR': catalog_[0]['tr']}, os.path.join(study_dir, 'scan_key.txt')) model_dir = make_dir(study_dir, 'models', self.model_id, strict=False) save_task_contrasts(model_dir, catalog_[0], merge=self.merge_tasks) save_condition_key(model_dir, catalog_[0], merge=self.merge_tasks) n_jobs = -1 if self.n_jobs != 1 else 1 self.encoder_ = IntraEncoder(hrf_model=self.hrf_model, drift_model=self.drift_model, memory=self.memory, n_jobs=n_jobs) all_niimgs = self.encoder_.fit_transform(catalog_, subjects_id) if subjects_id is None: subjects_id = [doc['subject_id'] for doc in catalog] outputs = Parallel(n_jobs=self.n_jobs)( delayed(_compute_glm)( LinearModeler(masker=self.masker, reporter=os.path.join( study_dir, subject_id, 'model', self.model_id), glm_model=self.glm_model, hrf_model=self.hrf_model, contrast_type=self.contrast_type, output_z=self.output_z, output_stat=self.output_stat, output_effects=self.output_effects, output_variance=self.output_variance), niimgs=niimgs, design_matrices=design_matrices, contrasts=doc['contrasts']) for subject_id, doc, niimgs, design_matrices in zip( subjects_id, catalog_, all_niimgs, self.encoder_.design_matrices_)) if self.resample: Parallel(n_jobs=n_jobs)( delayed(_resample_img)( doc[dtype][cid], self.target_affine, self.target_shape, ) for doc in outputs for dtype in doc for cid in doc[dtype]) return outputs
def transform(self, catalog, subjects_id): catalog_ = copy.deepcopy(catalog) study_dir = make_dir(self.data_dir, self.study_id, strict=False) if isinstance(self.subject_key_, dict): save_table(self.subject_key_, os.path.join(study_dir, 'subject_key.txt')) save_table(self.task_key_, os.path.join(study_dir, 'task_key.txt'), merge=self.merge_tasks) save_table({'TR': catalog_[0]['tr']}, os.path.join(study_dir, 'scan_key.txt')) model_dir = make_dir(study_dir, 'models', self.model_id, strict=False) save_task_contrasts(model_dir, catalog_[0], merge=self.merge_tasks) save_condition_key(model_dir, catalog_[0], merge=self.merge_tasks) n_jobs = -1 if self.n_jobs != 1 else 1 self.encoder_ = IntraEncoder(hrf_model=self.hrf_model, drift_model=self.drift_model, memory=self.memory, n_jobs=n_jobs) all_niimgs = self.encoder_.fit_transform(catalog_, subjects_id) if subjects_id is None: subjects_id = [doc['subject_id'] for doc in catalog] outputs = Parallel(n_jobs=self.n_jobs)( delayed(_compute_glm)(LinearModeler( masker=self.masker, reporter=os.path.join(study_dir, subject_id, 'model', self.model_id), glm_model=self.glm_model, hrf_model=self.hrf_model, contrast_type=self.contrast_type, output_z=self.output_z, output_stat=self.output_stat, output_effects=self.output_effects, output_variance=self.output_variance), niimgs=niimgs, design_matrices=design_matrices, contrasts=doc['contrasts']) for subject_id, doc, niimgs, design_matrices in zip( subjects_id, catalog_, all_niimgs, self.encoder_.design_matrices_)) if self.resample: Parallel(n_jobs=n_jobs)(delayed(_resample_img)( doc[dtype][cid], self.target_affine, self.target_shape, ) for doc in outputs for dtype in doc for cid in doc[dtype]) return outputs
def save_raw(subject_dir, doc): if 'bold' in doc: run_key = doc['runs'] for label, session_data in zip(run_key, doc['bold']): if isinstance(session_data, (list, np.ndarray)): img = nb.concat_images(session_data, check_affines=False) else: img = nb.load(session_data) session_dir = make_dir(subject_dir, 'BOLD', label, strict=False) nb.save(img, os.path.join(session_dir, 'bold.nii.gz')) if 'anatomy' in doc: anat_dir = make_dir(subject_dir, 'anatomy', strict=False) img = nb.load(doc['anatomy']) nb.save(img, os.path.join(anat_dir, 'highres001.nii.gz'))
def save_preproc(model_dir, doc): if 'swabold' in doc: run_key = doc['runs'] for label, session_data, motion in zip(run_key, doc['swabold'], doc['motion']): if isinstance(session_data, (list, np.ndarray)): img = nb.concat_images(session_data) else: img = nb.load(session_data) session_dir = make_dir(model_dir, 'BOLD', label) nb.save(img, os.path.join(session_dir, 'bold.nii.gz')) if isinstance(motion, (str, unicode)): shutil.copyfile(motion, os.path.join(session_dir, 'motion.txt')) else: np.savetxt(os.path.join(session_dir, 'motion.txt'), motion) if 'wmanatomy' in doc: anat_dir = make_dir(model_dir, 'anatomy') img = nb.load(doc['wmanatomy']) nb.save(img, os.path.join(anat_dir, 'highres001_brain.nii.gz'))
def save_preproc(model_dir, doc): if 'swabold' in doc: run_key = doc['runs'] for label, session_data, motion in zip( run_key, doc['swabold'], doc['motion']): if isinstance(session_data, (list, np.ndarray)): img = nb.concat_images(session_data) else: img = nb.load(session_data) session_dir = make_dir(model_dir, 'BOLD', label) nb.save(img, os.path.join(session_dir, 'bold.nii.gz')) if isinstance(motion, (str, unicode)): shutil.copyfile( motion, os.path.join(session_dir, 'motion.txt')) else: np.savetxt(os.path.join(session_dir, 'motion.txt'), motion) if 'wmanatomy' in doc: anat_dir = make_dir(model_dir, 'anatomy') img = nb.load(doc['wmanatomy']) nb.save(img, os.path.join(anat_dir, 'highres001_brain.nii.gz'))
def transform(self, catalog, subjects_id): catalog_ = copy.deepcopy(catalog) study_dir = make_dir(self.data_dir, self.study_id, strict=False) if isinstance(self.subject_key_, dict): save_table(self.subject_key_, os.path.join(study_dir, 'subject_key.txt')) save_table(self.task_key_, os.path.join(study_dir, 'task_key.txt'), merge=self.merge_tasks) save_table({'TR': catalog[0]['tr']}, os.path.join(study_dir, 'scan_key.txt')) model_dir = make_dir(study_dir, 'models', self.model_id, strict=False) save_task_contrasts(model_dir, catalog_[0], merge=self.merge_tasks) save_condition_key(model_dir, catalog_[0], merge=self.merge_tasks) if subjects_id is None: subjects_id = [doc['subject_id'] for doc in catalog] Parallel(n_jobs=self.n_jobs)( delayed(save_maps)(os.path.join(study_dir, subject_id, 'model', self.model_id), doc, self.resample, self.target_affine, self.target_shape) for subject_id, doc in zip(subjects_id, catalog_)) Parallel(n_jobs=self.n_jobs)( delayed(save_preproc)(os.path.join(study_dir, subject_id, 'model', self.model_id), doc) for subject_id, doc in zip(subjects_id, catalog_)) Parallel(n_jobs=self.n_jobs)( delayed(save_raw)(os.path.join(study_dir, subject_id), doc) for subject_id, doc in zip(subjects_id, catalog_)) Parallel(n_jobs=self.n_jobs)( delayed(save_onsets)(os.path.join(study_dir, subject_id, 'model', self.model_id, 'onsets'), doc) for subject_id, doc in zip(subjects_id, catalog_)) return catalog_
def transform(self, catalog, subjects_id): catalog_ = copy.deepcopy(catalog) study_dir = make_dir(self.data_dir, self.study_id, strict=False) if isinstance(self.subject_key_, dict): save_table(self.subject_key_, os.path.join(study_dir, 'subject_key.txt')) save_table(self.task_key_, os.path.join(study_dir, 'task_key.txt'), merge=self.merge_tasks) save_table({'TR': catalog[0]['tr']}, os.path.join(study_dir, 'scan_key.txt')) model_dir = make_dir(study_dir, 'models', self.model_id, strict=False) save_task_contrasts(model_dir, catalog_[0], merge=self.merge_tasks) save_condition_key(model_dir, catalog_[0], merge=self.merge_tasks) if subjects_id is None: subjects_id = [doc['subject_id'] for doc in catalog] Parallel(n_jobs=self.n_jobs)(delayed(save_maps)( os.path.join(study_dir, subject_id, 'model', self.model_id), doc, self.resample, self.target_affine, self.target_shape) for subject_id, doc in zip(subjects_id, catalog_)) Parallel(n_jobs=self.n_jobs)(delayed(save_preproc)( os.path.join(study_dir, subject_id, 'model', self.model_id), doc) for subject_id, doc in zip(subjects_id, catalog_)) Parallel(n_jobs=self.n_jobs)(delayed(save_raw)( os.path.join(study_dir, subject_id), doc) for subject_id, doc in zip(subjects_id, catalog_)) Parallel(n_jobs=self.n_jobs)(delayed(save_onsets)( os.path.join(study_dir, subject_id, 'model', self.model_id, 'onsets'), doc) for subject_id, doc in zip(subjects_id, catalog_)) return catalog_
def save_onsets(onsets_dir, doc, merge=False): run_key = doc.get('runs') if 'onsets' in doc: for session_id, session in zip(run_key, doc['onsets']): if not merge: del_dir(onsets_dir, session_id) session_dir = make_dir(onsets_dir, session_id, strict=False) for onset in session: cond_id = onset[0] values = [str(v) for v in onset[1:]] with open(os.path.join(session_dir, '%s.txt' % cond_id), 'a') as f: writer = csv.writer(f, delimiter=' ', quotechar='"') writer.writerow(values)