def download_subset(): data_dir = get_data_dirs()[0] error_dir = join(data_dir, 'failures') if not os.path.exists(error_dir): os.makedirs(error_dir) n_jobs = 4 subjects = fetch_subject_list()[:4] Parallel(n_jobs=n_jobs, verbose=10)(delayed(download_single)(subject, verbose=2) for subject in subjects)
def contrasts(): data_dir = get_data_dirs()[0] error_dir = join(data_dir, 'failures') if not os.path.exists(error_dir): os.makedirs(error_dir) n_jobs = 36 subjects = fetch_subject_list() tasks = TASK_LIST Parallel(n_jobs=n_jobs, verbose=10)( delayed(make_contrasts)(subject, task, overwrite=True, verbose=1) for subject in subjects for task in tasks)
from hcp_builder.dataset import fetch_subject_list from hcp_builder.utils.fsl import clean_artifacts subject_list = fetch_subject_list() total = len(subject_list) for i, subject in enumerate(subject_list): print('Clearning subject %s, %i / %i' % (subject, i, total)) clean_artifacts(subject)
import numpy as np from joblib import Parallel, delayed, Memory import matplotlib.pyplot as plt from nilearn.input_data import NiftiMasker from pybold.bold_signal import bd from pybold.hrf_model import spm_hrf, MIN_DELTA, MAX_DELTA from pybold.utils import inf_norm from hrf_estimation.rank_one_ import glm as pglm from hcp_builder.dataset import fetch_subject_list from utils import (create_result_dir, get_hcp_fmri_fname, get_protocol_hcp, mask_n_max, plot_trial_z_map, hrf_coef_to_hrf, TR_HCP, N_SCANS_HCP, DUR_RUN_HCP) # usefull functions if not fetch_subject_list(): def fetch_subject_list(): """Fix troubles cause by bug/wrong usage with hcp_builder.""" return [100206, 996782] pglm_cached = Memory('./.cachedir').cache(pglm, ignore=['n_jobs', 'verbose']) def _bd(voxels, lbda, hrf_dur, n_jobs, verbose=10): """Blind deconvolution""" res = Parallel(n_jobs=n_jobs, verbose=verbose)(delayed(bd)(voxel, t_r=TR_HCP, lbda=lbda,