def get_bids_df(bids_dir, scans_only=None, keep_defaced=False): if isinstance(bids_dir, Path): bids_dir = bids_dir.as_posix() layout = BIDSLayout(bids_dir) df_pybids = layout.as_data_frame() if not keep_defaced: df_pybids = df_pybids.query('~path.str.contains("defaced")') if scans_only: df_pybids = (df_pybids.loc[df_pybids.path.str.contains('nii.gz'), :]. query("~path.str.contains('.git')")) df_pybids['json_path'] = \ (df_pybids.path.apply( lambda x: Path(''.join([*x.split('.')[:-2], '.json'])))) return df_pybids
def main(sourcedata, derivatives, subject, session, tmp_dir): sourcedata_layout = BIDSLayout(sourcedata) sourcedata_df = sourcedata_layout.as_data_frame() events = sourcedata_df[(sourcedata_df['suffix'] == 'events') & (sourcedata_df['subject'] == subject) & (sourcedata_df['session'] == session)] derivatives_layout = BIDSLayout(os.path.join(derivatives), validate=False) derivatives_df = derivatives_layout.as_data_frame() bold = derivatives_df[(derivatives_df['suffix'] == 'preproc') & (derivatives_df['subject'] == subject) & (derivatives_df['session'] == session)] confounds = derivatives_df[(derivatives_df['suffix'] == 'confounds') & (derivatives_df['subject'] == subject) & (derivatives_df['session'] == session)] compcor = derivatives_df[(derivatives_df['suffix'] == 'compcor') & (derivatives_df['subject'] == subject) & (derivatives_df['session'] == session)] mask = derivatives_layout.get(subject=subject, session=session, suffix='mask', return_type='file')[0] df = events.merge(bold, on=['subject', 'session', 'run'], suffixes=('_events', '_bold')) confounds = confounds.rename(columns={'path': 'confounds'}) df = df.merge(confounds[['subject', 'session', 'run', 'confounds']]) compcor = compcor.rename(columns={'path': 'compcor'}) df = df.merge(compcor[['subject', 'session', 'run', 'compcor']]) df.sort_values('run', inplace=True) print(df.iloc[0]) models = [] for ix, row in df.iterrows(): results_dir = os.path.join(derivatives, 'modelfitting', 'glm8', 'sub-{}'.format(row['subject'])) if 'session' in row: results_dir = os.path.join(results_dir, 'ses-{}'.format(row['session'])) results_dir = op.join(results_dir, 'func') os.makedirs(results_dir, exist_ok=True) confounds = pd.read_table(row.confounds).fillna(method='bfill') compcor = pd.read_table(row.compcor).fillna(method='bfill') confounds = pd.concat((confounds, compcor), 1) confounds -= confounds.mean() confounds /= confounds.std() pca = decomposition.PCA(n_components=6) confounds_trans = pd.DataFrame( pca.fit_transform(confounds), columns=['pca_{}'.format(i) for i in range(6)]) print('Fitting {}'.format(row['path_bold'])) model = FirstLevelModel(t_r=4, signal_scaling=False, subject_label=int(row['run']), mask_img=mask) paradigm = pd.read_table(row['path_events']) paradigm_ = paradigm.copy() paradigm['trial_type'] = 'stimulation' paradigm['modulation'] = 1 paradigm_['modulation'] = paradigm_.trial_type.map({ 'eye_L': 1, 'eye_R': -1 }) paradigm_['trial_type'] = 'eye' paradigm = pd.concat((paradigm, paradigm_), ignore_index=True) model.fit(row['path_bold'], paradigm, confounds=confounds_trans) row['run'] = int(row['run']) row = dict(row) left_right = model.compute_contrast('eye', output_type='z_score') left_right.to_filename( os.path.join( results_dir, 'sub-{subject}_ses-{session}_task-{task_events}_run-{run:02d}_left_over_right_zmap.nii.gz' .format(**row))) left_right = model.compute_contrast('eye', output_type='effect_size') left_right.to_filename( os.path.join( results_dir, 'sub-{subject}_ses-{session}_task-{task_events}_run-{run:02d}_left_over_right_psc.nii.gz' .format(**row))) stimulation = model.compute_contrast('stimulation', output_type='effect_size') stimulation.to_filename( os.path.join( results_dir, 'sub-{subject}_ses-{session}_task-{task_events}_run-{run:02d}_stimulation_psc.nii.gz' .format(**row))) stimulation = model.compute_contrast('stimulation', output_type='z_score') stimulation.to_filename( os.path.join( results_dir, 'sub-{subject}_ses-{session}_task-{task_events}_run-{run:02d}_stimulation_zmap.nii.gz' .format(**row))) models.append(model) second_level_model = SecondLevelModel(mask_img=mask) second_level_model.fit(models) left_right_group = second_level_model.compute_contrast( first_level_contrast='eye', output_type='z_score') left_right_group.to_filename( os.path.join( results_dir, 'sub-{}_ses-{}_left_over_right_zmap.nii.gz'.format( row['subject'], row['session']))) left_right_group = second_level_model.compute_contrast( first_level_contrast='eye', output_type='effect_size') left_right_group.to_filename( os.path.join( results_dir, 'sub-{}_ses-{}_left_over_right_effect_size.nii.gz'.format( row['subject'], row['session']))) stimulation_group = second_level_model.compute_contrast( first_level_contrast='stimulation', output_type='z_score') left_right_group.to_filename( os.path.join( results_dir, 'sub-{}_ses-{}_stimulation_zmap.nii.gz'.format( row['subject'], row['session']))) stimulation_group = second_level_model.compute_contrast( first_level_contrast='stimulation', output_type='effect_size') left_right_group.to_filename( os.path.join( results_dir, 'sub-{}_ses-{}_stimulation_effect_size.nii.gz'.format( row['subject'], row['session'])))
def main(sourcedata, derivatives, subject, session): derivatives_layout = BIDSLayout(op.join(derivatives), validate=False) derivatives_df = derivatives_layout.as_data_frame() confounds = derivatives_df[(derivatives_df['suffix'] == 'confounds') & (derivatives_df['subject'] == subject) & (derivatives_df['session'] == session)].set_index(['subject', 'session', 'task', 'run']) compcor = derivatives_df[(derivatives_df['suffix'] == 'compcor') & (derivatives_df['subject'] == subject) & (derivatives_df['session'] == session)].set_index(['subject', 'session', 'task', 'run']) fns = glob.glob(op.join(derivatives, 'sampled_giis/sub-{subject}/ses-{session}/func/sub-{subject}_ses-{session}_*.gii'.format(**locals()))) # Events are the same for all subjects events = pd.read_table(op.join(sourcedata, 'sub-tk/ses-odc2/func/sub-tk_ses-odc2_task-checkerboard_acq-07_run-02_events.tsv')) df = pd.read_pickle(op.join(derivatives, 'depth_sampled_surfaces/sub-{subject}/sub-{subject}_ses-{session}_depth_sampled_data.pkl.gz'.format(**locals()))) print(df.head()) df = df.loc[:, 'psc'] df.index = df.index.droplevel('acq') frametimes = np.linspace(0, 66*4, 66, endpoint=False) X = make_first_level_design_matrix(frametimes, events, drift_order=None, drift_model=None) ts = [] for ix, d in df.loc[:, ['V1l', 'V1r']].groupby(['subject', 'session', 'task', 'run']): print(ix) conf = pd.read_table(confounds.loc[ix].path).fillna(method='bfill') comc = pd.read_table(compcor.loc[ix].path).fillna(method='bfill') conf = pd.concat((conf, comc), 1) conf -= conf.mean() conf /= conf.std() pca = decomposition.PCA(n_components=6) confounds_trans = pd.DataFrame(pca.fit_transform(conf), columns=['pca_{}'.format(i) for i in range(6)]) confounds_trans.index = X.index X_ = pd.concat((X, confounds_trans), axis=1) labels, results = run_glm(d, X, noise_model='ols') results = results[0.0] ts.append(pd.DataFrame([results.theta[0], results.theta[1], results.t(0), results.t(1)], index=pd.MultiIndex.from_product([[e] for e in ix] + [['psc', 't'], ['eye_L', 'eye_R']], names=['subject', 'session', 'task', 'run', 'type', 'contrast']), columns=d.columns)) ts = pd.concat(ts, axis=0) results_dir = op.join(derivatives, 'surfacewise_glms', 'sub-{subject}', 'ses-{session}', 'func').format(**locals()) if not op.exists(results_dir): os.makedirs(results_dir) ts.to_pickle(op.join(results_dir, 'sub-{subject}_ses-{session}_desc-laminarglms.pkl').format(**locals()))