def test_SurfaceTransform_outputs(): output_map = dict(out_file=dict(), ) outputs = SurfaceTransform.output_spec() for key, metadata in output_map.items(): for metakey, value in metadata.items(): yield assert_equal, getattr(outputs.traits()[key], metakey), value
def test_SurfaceTransform_inputs(): input_map = dict( args=dict(argstr='%s', ), environ=dict( nohash=True, usedefault=True, ), hemi=dict( argstr='--hemi %s', mandatory=True, ), ignore_exception=dict( nohash=True, usedefault=True, ), out_file=dict( argstr='--tval %s', genfile=True, ), reshape=dict(argstr='--reshape', ), reshape_factor=dict(argstr='--reshape-factor', ), source_annot_file=dict( argstr='--sval-annot %s', mandatory=True, xor=['source_file'], ), source_file=dict( argstr='--sval %s', mandatory=True, xor=['source_annot_file'], ), source_subject=dict( argstr='--srcsubject %s', mandatory=True, ), source_type=dict( argstr='--sfmt %s', requires=['source_file'], ), subjects_dir=dict(), target_ico_order=dict(argstr='--trgicoorder %d', ), target_subject=dict( argstr='--trgsubject %s', mandatory=True, ), target_type=dict(argstr='--tfmt %s', ), terminal_output=dict( mandatory=True, nohash=True, ), ) inputs = SurfaceTransform.input_spec() for key, metadata in input_map.items(): for metakey, value in metadata.items(): yield assert_equal, getattr(inputs.traits()[key], metakey), value
def test_SurfaceTransform_inputs(): input_map = dict(ignore_exception=dict(nohash=True, usedefault=True, ), hemi=dict(mandatory=True, argstr='--hemi %s', ), out_file=dict(argstr='--tval %s', genfile=True, ), source_subject=dict(mandatory=True, argstr='--srcsubject %s', ), reshape=dict(argstr='--reshape', ), args=dict(argstr='%s', ), target_ico_order=dict(argstr='--trgicoorder %d', ), target_type=dict(argstr='--tfmt %s', ), reshape_factor=dict(argstr='--reshape-factor', ), source_type=dict(requires=['source_file'], argstr='--sfmt %s', ), terminal_output=dict(mandatory=True, nohash=True, ), environ=dict(nohash=True, usedefault=True, ), subjects_dir=dict(), source_file=dict(mandatory=True, argstr='--sval %s', xor=['source_annot_file'], ), target_subject=dict(mandatory=True, argstr='--trgsubject %s', ), source_annot_file=dict(mandatory=True, argstr='--sval-annot %s', xor=['source_file'], ), ) inputs = SurfaceTransform.input_spec() for key, metadata in input_map.items(): for metakey, value in metadata.items(): yield assert_equal, getattr(inputs.traits()[key], metakey), value
def main(subject, sourcedata, derivatives, smoothed, n_jobs=5): os.environ['SUBJECTS_DIR'] = op.join(derivatives, 'freesurfer') source_layout = BIDSLayout(sourcedata, validate=False, derivatives=False) fmriprep_layout = BIDSLayout(op.join(derivatives, 'fmriprep'), validate=False) if smoothed: bold_layout = BIDSLayout(op.join(derivatives, 'smoothed'), validate=False) bold = bold_layout.get(subject=subject, extension='func.gii') else: bold = fmriprep_layout.get(subject=subject, extension='func.gii') bold = sorted([e for e in bold if 'fsaverage6' in e.filename], key=lambda x: x.run) fmriprep_layout_df = fmriprep_layout.to_df() fmriprep_layout_df = fmriprep_layout_df[~fmriprep_layout_df.subject.isnull( )] fmriprep_layout_df['subject'] = fmriprep_layout_df.subject.astype(int) fmriprep_layout_df = fmriprep_layout_df[np.in1d(fmriprep_layout_df.suffix, ['regressors'])] fmriprep_layout_df = fmriprep_layout_df[np.in1d( fmriprep_layout_df.extension, ['tsv'])] fmriprep_layout_df = fmriprep_layout_df.set_index(['subject', 'run']) events_df = source_layout.to_df() events_df = events_df[events_df.suffix == 'events'] events_df['subject'] = events_df['subject'].astype(int) events_df = events_df.set_index(['subject', 'run']) tr = source_layout.get_tr(bold[0].path) if smoothed: base_dir = op.join(derivatives, 'glm_stim1_surf_smoothed', f'sub-{subject}', 'func') else: base_dir = op.join(derivatives, 'glm_stim1_surf', f'sub-{subject}', 'func') if not op.exists(base_dir): os.makedirs(base_dir) for b in bold: run = b.entities['run'] hemi = b.entities['suffix'] # print(run) confounds_ = fmriprep_layout_df.loc[(subject, run), 'path'].iloc[0] confounds_ = pd.read_csv(confounds_, sep='\t') confounds_ = confounds_[to_include].fillna(method='bfill') pca = PCA(n_components=7) confounds_ -= confounds_.mean(0) confounds_ /= confounds_.std(0) confounds_pca = pca.fit_transform(confounds_[to_include]) events_ = events_df.loc[(subject, run), 'path'] events_ = pd.read_csv(events_, sep='\t') events_['trial_type'] = events_['trial_type'].apply( lambda x: 'stim2' if x.startswith('stim2') else x) frametimes = np.arange(0, tr * len(confounds_), tr) X = make_first_level_design_matrix( frametimes, events_, add_regs=confounds_pca, add_reg_names=[f'confound_pca.{i}' for i in range(1, 8)]) Y = surface.load_surf_data(b.path).T Y = (Y / Y.mean(0) * 100) Y -= Y.mean(0) fit = run_glm(Y, X, noise_model='ols', n_jobs=n_jobs) r = fit[1][0.0] betas = pd.DataFrame(r.theta, index=X.columns) stim1 = [] for stim in 5, 7, 10, 14, 20, 28: stim1.append(betas.loc[f'stim1-{stim}']) result = pd.concat(stim1, 1).T print(result.shape) pes = nb.gifti.GiftiImage(header=nb.load(b.path).header, darrays=[ nb.gifti.GiftiDataArray(row) for ix, row in result.iterrows() ]) fn_template = op.join( base_dir, 'sub-{subject}_run-{run}_space-{space}_desc-stims1_hemi-{hemi}.pe.gii' ) space = 'fsaverage6' pes.to_filename(fn_template.format(**locals())) transformer = SurfaceTransform(source_subject='fsaverage6', target_subject='fsaverage', hemi={ 'L': 'lh', 'R': 'rh' }[hemi]) transformer.inputs.source_file = pes.get_filename() space = 'fsaverage' transformer.inputs.out_file = fn_template.format(**locals()) # Disable on MAC OS X (SIP problem) transformer.run()
def main(subject, sourcedata, trialwise, clip=(-100, 100)): derivatives = op.join(sourcedata, 'derivatives') if trialwise: layout = BIDSLayout(op.join(derivatives, 'glm_stim1_trialwise_surf'), validate=False) else: layout = BIDSLayout(op.join(derivatives, 'glm_stim1_surf'), validate=False) for hemi in ['L', 'R']: pes = layout.get(subject=subject, suffix=hemi) print(pes) df = [] for pe in pes: d = pd.DataFrame( np.clip(surface.load_surf_data(pe.path).T, clip[0], clip[1])) df.append(d) d['run'] = pe.run d['number'] = np.log([5, 7, 10, 14, 20, 28]) df = pd.concat(df).set_index(['run', 'number']) mask = ~df.isnull().any(0) print('fitting {} time series'.format(mask.sum())) for run in df.index.unique('run'): train = df.drop(run) test = df.loc[run] model = GaussianReceptiveFieldModel() costs, parameters, predictions = model.optimize( train.index.get_level_values('number').values, train.loc[:, mask].values) base_dir = op.join(derivatives, 'modelfit_surf_cv', f'sub-{subject}', 'func') if not op.exists(base_dir): os.makedirs(base_dir) parameters.columns = df.loc[:, mask].columns pars_df = pd.DataFrame(columns=df.columns) pars_df = pd.concat((pars_df, parameters)) par_fn = op.join( base_dir, f'sub-{subject}_space-fsaverage6_desc-pars_hemi-{hemi}_cvrun-{run}.func.gii' ) nb.gifti.GiftiImage( header=nb.load(pe.path).header, darrays=[ nb.gifti.GiftiDataArray(data=p.astype(float)) for _, p in pars_df.iterrows() ]).to_filename(par_fn) transformer = SurfaceTransform(source_subject='fsaverage6', target_subject='fsaverage', hemi={ 'L': 'lh', 'R': 'rh' }[hemi]) transformer.inputs.source_file = par_fn transformer.inputs.out_file = par_fn.replace( 'fsaverage6', 'fsaverage') # Disable on MAC OS X (SIP problem) transformer.run() r2 = get_rsq(test.loc[:, mask].values, predictions.values[:len(test), :]).to_frame('r2').T r2.columns = test.loc[:, mask].columns r2_df = pd.DataFrame(columns=test.columns) r2_df = pd.concat((r2_df, r2), axis=0) r2_fn = op.join( base_dir, f'sub-{subject}_space-fsaverage6_desc-r2_hemi-{hemi}_cvrun-{run}.func.gii' ) nb.gifti.GiftiImage( header=nb.load(pe.path).header, darrays=[ nb.gifti.GiftiDataArray(data=r.astype(float)) for _, r in r2_df.iterrows() ]).to_filename(r2_fn) transformer.inputs.source_file = r2_fn transformer.inputs.out_file = r2_fn.replace( 'fsaverage6', 'fsaverage') # Disable on MAC OS X (SIP problem) transformer.run()
def main(subject, sourcedata, trialwise, clip=(-100, 100)): derivatives = op.join(sourcedata, 'derivatives') if trialwise: layout = BIDSLayout(op.join(derivatives, 'glm_stim1_trialwise_surf'), validate=False) else: layout = BIDSLayout(op.join(derivatives, 'glm_stim1_surf'), validate=False) for hemi in ['L', 'R']: pes = layout.get(subject=subject, suffix=hemi) print(pes) if trialwise: paradigm = np.log( pd.Series( np.repeat([5, 7, 10, 14, 20, 28], 6).tolist() * len(pes))) else: paradigm = np.log(pd.Series([5, 7, 10, 14, 20, 28] * len(pes))) df = [] for pe in pes: d = pd.DataFrame( np.clip(surface.load_surf_data(pe.path).T, clip[0], clip[1])) df.append(d) df = pd.concat(df) mask = ~df.isnull().any(0) # mask = mask & (np.random.rand(df.shape[1]) < 0.001) print('fitting {} time series'.format(mask.sum())) model = GaussianReceptiveFieldModel() costs, parameters, predictions = model.optimize( paradigm.values.ravel(), df.loc[:, mask].values) if trialwise: base_dir = op.join(derivatives, 'modelfit_trialwise_surf', f'sub-{subject}', 'func') else: base_dir = op.join(derivatives, 'modelfit_surf', f'sub-{subject}', 'func') if not op.exists(base_dir): os.makedirs(base_dir) parameters.columns = df.loc[:, mask].columns pars_df = pd.DataFrame(columns=df.columns) pars_df = pd.concat((pars_df, parameters)) par_fn = op.join( base_dir, f'sub-{subject}_space-fsaverage6_desc-pars_hemi-{hemi}.func.gii') nb.gifti.GiftiImage(header=nb.load(pe.path).header, darrays=[ nb.gifti.GiftiDataArray(data=p.astype(float)) for _, p in pars_df.iterrows() ]).to_filename(par_fn) transformer = SurfaceTransform(source_subject='fsaverage6', target_subject='fsaverage', hemi={ 'L': 'lh', 'R': 'rh' }[hemi]) transformer.inputs.source_file = par_fn transformer.inputs.out_file = par_fn.replace('fsaverage6', 'fsaverage') # Disable on MAC OS X (SIP problem) transformer.run() r2 = get_rsq(df.loc[:, mask].values, predictions).to_frame('r2').T r2.columns = df.loc[:, mask].columns r2_df = pd.DataFrame(columns=df.columns) r2_df = pd.concat((r2_df, r2), axis=0) r2_fn = op.join( base_dir, f'sub-{subject}_space-fsaverage6_desc-r2_hemi-{hemi}.func.gii') nb.gifti.GiftiImage(header=nb.load(pe.path).header, darrays=[ nb.gifti.GiftiDataArray(data=r.astype(float)) for _, r in r2_df.iterrows() ]).to_filename(r2_fn) transformer.inputs.source_file = r2_fn transformer.inputs.out_file = r2_fn.replace('fsaverage6', 'fsaverage') # Disable on MAC OS X (SIP problem) transformer.run()