def fetch_masker(masker_path, language, path_to_fmridata, path_to_input, smoothing_fwhm=None, logger=None): """ Fetch or compute if needed a global masker from all subjects of a given language. Arguments: - masker_path: str - language: str - path_to_input: str - path_to_fmridata: str - smoothing_fwhm: int - logger: Logger """ if os.path.exists(masker_path + '.nii.gz') and os.path.exists(masker_path + '.yml'): logger.report_state(" loading existing masker...") params = read_yaml(masker_path + '.yml') mask_img = nib.load(masker_path + '.nii.gz') masker = MultiNiftiMasker() masker.set_params(**params) masker.fit([mask_img]) else: logger.report_state(" recomputing masker...") fmri_runs = {} subjects = [ get_subject_name(id) for id in possible_subjects_id(language) ] for subject in subjects: _, fmri_paths = fetch_data(path_to_fmridata, path_to_input, subject, language) fmri_runs[subject] = fmri_paths masker = compute_global_masker(list(fmri_runs.values()), smoothing_fwhm=smoothing_fwhm) params = masker.get_params() params = { key: params[key] for key in [ 'detrend', 'dtype', 'high_pass', 'low_pass', 'mask_strategy', 'memory_level', 'n_jobs', 'smoothing_fwhm', 'standardize', 't_r', 'verbose' ] } nib.save(masker.mask_img_, masker_path + '.nii.gz') save_yaml(params, masker_path + '.yml') return masker
def create_raw_rest_data(imgs_list, root, raw_dir, masker_params=None, n_jobs=1, mock=False, memory=Memory(cachedir=None), overwrite=False): """ Parameters ---------- memory imgs_list: DataFrame with columns filename, confounds root raw_dir masker_params n_jobs mock Returns ------- """ if masker_params is None: masker_params = {} masker = MultiNiftiMasker(verbose=1, memory=memory, memory_level=1, **masker_params) if masker.mask_img is None: masker.fit(imgs_list['filename']) else: masker.fit() if 'confounds' in imgs_list.columns: confounds = imgs_list['confounds'] imgs_list.rename(columns={'confounds': 'orig_confounds'}) else: confounds = repeat(None) if not os.path.exists(raw_dir): os.makedirs(raw_dir) filenames = Parallel(n_jobs=n_jobs)(delayed(_unmask_single_img)( masker, imgs, confounds, root, raw_dir, mock=mock, overwrite=overwrite) for imgs, confounds in zip( imgs_list['filename'], confounds)) imgs_list = imgs_list.rename(columns={'filename': 'orig_filename'}) imgs_list = imgs_list.assign(filename=filenames) imgs_list = imgs_list.assign(confounds=None) if not mock: imgs_list.to_csv(os.path.join(raw_dir, 'data.csv'), mode='w+') mask_img_file = os.path.join(raw_dir, 'mask_img.nii.gz') masker.mask_img_.to_filename(mask_img_file) params = masker.get_params() params.pop('memory') params.pop('memory_level') params.pop('n_jobs') params.pop('verbose') params['mask_img'] = mask_img_file json.dump(params, open(os.path.join(raw_dir, 'masker.json'), 'w+'))