def model_fitting(source_img, prepped_img, subject_info, task): taskdir = os.path.join(outputdir, task) if not os.path.exists(taskdir): os.mkdir(taskdir) # skull strip the preprocessed BOLD bet = fsl.BET() bet.inputs.in_file = prepped_img bet.inputs.frac = 0.7 bet.inputs.functional = True bet.inputs.out_file = os.path.join(taskdir, task + "_input_functional_bet.nii.gz") bet_res = bet.run() bettedinput = bet_res.outputs.out_file task_vs_baseline = [task + " vs baseline", 'T', [task], [1]] # set up contrasts contrasts = [task_vs_baseline] modelfit = pe.Workflow(name='modelfit', base_dir=taskdir) # generate the model fitting workflow modelspec = pe.Node(interface=model.SpecifyModel(), name="modelspec") # generate design info level1design = pe.Node(interface=fsl.Level1Design(), name="level1design") # generate fsf file modelgen = pe.MapNode( # generate .mat file interface=fsl.FEATModel(), name='modelgen', iterfield=['fsf_file', 'ev_files']) feat = pe.Node( # feat statistics interface=fsl.FEAT(), name='feat', iterfield=['fsf_file']) # put it all together modelfit.connect([ (modelspec, level1design, [('session_info', 'session_info')]), (level1design, modelgen, [('fsf_files', 'fsf_file'), ('ev_files', 'ev_files')]), (level1design, feat, [('fsf_files', 'fsf_file')])]) # define inputs to workflow modelspec.inputs.input_units = 'secs' modelspec.inputs.functional_runs = bettedinput modelspec.inputs.time_repetition = source_img.entities['RepetitionTime'] modelspec.inputs.high_pass_filter_cutoff = 90 modelspec.inputs.subject_info = subject_info level1design.inputs.interscan_interval = source_img.entities['RepetitionTime'] level1design.inputs.bases = {'gamma': {'gammasigma': 3, 'gammadelay': 6, 'derivs': True}} level1design.inputs.contrasts = contrasts level1design.inputs.model_serial_correlations = True # Run the model-fitting pipeline. Main outputs are a feat directory (w/ functional img) and a design.mat file res = modelfit.run() # outputs feat_dir = list(res.nodes)[3].result.outputs.feat_dir thresh_img = feat_dir + "/thresh_zstat1.nii.gz" return thresh_img
high_pass_filter_cutoff=100), name="modelspec") # first-level design level1design = Node(fsl.Level1Design(bases={'dgamma':{'derivs': True}}, interscan_interval=TR, model_serial_correlations=True, contrasts=contrast_list), name="level1design") # creating all the other files necessary to run the model modelgen = Node(fsl.FEATModel(), name='modelgen') # then running through FEAT feat = Node(fsl.FEAT(), name="feat") # creating datasink to collect outputs datasink = Node(DataSink(base_directory=outDir), name='datasink') ## Use the following DataSink output substitutions substitutions = [('_subject_id_', 'sub-'), ('_subsession_id_', '/ses-') ] datasink.inputs.substitutions = substitutions ########### #
high_pass_filter_cutoff=100), name="modelspec") # first-level design level1design = Node(fsl.Level1Design(bases={'dgamma': { 'derivs': True }}, interscan_interval=TR, model_serial_correlations=True), name="level1design") # creating all the other files necessary to run the model modelgen = Node(fsl.FEATModel(), name='modelgen') # then running through FEAT feat = Node(fsl.FEAT(), name="feat") # creating datasink to collect outputs datasink = Node(DataSink(base_directory=outDir), name='datasink') ## Use the following DataSink output substitutions substitutions = [('_subject_id_', '/sub-'), ('_run_id_', '/run-')] datasink.inputs.substitutions = substitutions ########### # # SETTING UP THE WORKFLOW NODES # ###########
), name='l1_spec') # l1_model creates a first-level model design l1_model = pe.Node(fsl.Level1Design( bases={'dgamma': {'derivs': True}}, model_serial_correlations=True, interscan_interval = tr, contrasts=contrasts # orthogonalization=orthogonality, ), name='l1_model') # feat_spec generates an fsf model specification file feat_spec = pe.Node(fsl.FEATModel(), name='feat_spec') # feat_fit actually runs FEAT feat_fit = pe.Node(fsl.FEAT(), name='feat_fit', mem_gb=5) ## instead of FEAT #modelestimate = pe.MapNode(interface=fsl.FILMGLS(smooth_autocorr=True, # mask_size=5, # threshold=1000), # name='modelestimate', # iterfield = ['design_file', # 'in_file', # 'tcon_file']) feat_select = pe.Node(nio.SelectFiles({ 'cope': 'stats/cope*.nii.gz', 'pe': 'stats/pe[0-9][0-9].nii.gz', 'tstat': 'stats/tstat*.nii.gz', 'varcope': 'stats/varcope*.nii.gz', 'zstat': 'stats/zstat*.nii.gz',
preproc.connect(inputnode, 'func', get_totalVoxels, 'in_file') # get the number of volumes in a scan get_nVols = pe.MapNode(interface=Function(input_names=['in_file'], output_names=['output'], function=Get_nVols), name='get_nVols', iterfield=['in_file']) preproc.connect(inputnode, 'func', get_nVols, 'in_file') # customize template melodic.fsf file for data of one participant prepare_design_fsf = pe.MapNode(interface=Function(input_names=['feat_files','initial_highres_files','highres_files','npts','total_voxels'], output_names=['out_file'], function=Prepare_Design_FSF), name='prepare_design_fsf', iterfield=['feat_files','initial_highres_files', 'npts','total_voxels']) preproc.connect(inputnode, 'func', prepare_design_fsf, 'feat_files') preproc.connect(n4biasCorrMbRef, 'output_image', prepare_design_fsf, 'initial_highres_files') preproc.connect(maskT2, 'out_file', prepare_design_fsf, 'highres_files') preproc.connect(get_totalVoxels, 'output', prepare_design_fsf, 'total_voxels') preproc.connect(get_nVols, 'output', prepare_design_fsf, 'npts') # define the feat process for preprocessing the data feat = pe.MapNode(interface=fsl.FEAT(), name='feat', iterfield=['fsf_file']) preproc.connect(prepare_design_fsf, 'out_file', feat, 'fsf_file') # # ----------------- denoise --------- # call the ica-fix feature extraction, mel_ica is the output directory of a melodic run extract_features = pe.MapNode(interface=fix.FeatureExtractor(), name='extract_features', iterfield=['mel_ica']) preproc.connect(feat, 'feat_dir', extract_features, 'mel_ica') # assemble inputs for training the ica-fix classifier
def first_level_wf(in_files, output_dir, fwhm=6.0, name='wf_1st_level'): workflow = pe.Workflow(name=name) datasource = pe.Node(niu.Function(function=_dict_ds, output_names=DATA_ITEMS), name='datasource') datasource.inputs.in_dict = in_files datasource.iterables = ('sub', sorted(in_files.keys())) # Extract motion parameters from regressors file runinfo = pe.Node(niu.Function(input_names=[ 'in_file', 'events_file', 'regressors_file', 'regressors_names' ], function=_bids2nipypeinfo, output_names=['info', 'realign_file']), name='runinfo') # Set the column names to be used from the confounds file runinfo.inputs.regressors_names = ['dvars', 'framewise_displacement'] + \ ['a_comp_cor_%02d' % i for i in range(6)] + ['cosine%02d' % i for i in range(4)] # SUSAN smoothing susan = create_susan_smooth() susan.inputs.inputnode.fwhm = fwhm l1_spec = pe.Node(SpecifyModel(parameter_source='FSL', input_units='secs', high_pass_filter_cutoff=100), name='l1_spec') # l1_model creates a first-level model design l1_model = pe.Node( fsl.Level1Design( bases={'dgamma': { 'derivs': True }}, model_serial_correlations=True, #ENTER YOUR OWN CONTRAST HERE contrasts=[], # orthogonalization=orthogonality, ), name='l1_model') # feat_spec generates an fsf model specification file feat_spec = pe.Node(fsl.FEATModel(), name='feat_spec') # feat_fit actually runs FEAT feat_fit = pe.Node(fsl.FEAT(), name='feat_fit', mem_gb=12) feat_select = pe.Node(nio.SelectFiles({ 'cope': 'stats/cope1.nii.gz', 'pe': 'stats/pe[0-9][0-9].nii.gz', 'tstat': 'stats/tstat1.nii.gz', 'varcope': 'stats/varcope1.nii.gz', 'zstat': 'stats/zstat1.nii.gz', }), name='feat_select') ds_cope = pe.Node(DerivativesDataSink(base_directory=str(output_dir), keep_dtype=False, suffix='cope', desc='intask'), name='ds_cope', run_without_submitting=True) ds_varcope = pe.Node(DerivativesDataSink(base_directory=str(output_dir), keep_dtype=False, suffix='varcope', desc='intask'), name='ds_varcope', run_without_submitting=True) ds_zstat = pe.Node(DerivativesDataSink(base_directory=str(output_dir), keep_dtype=False, suffix='zstat', desc='intask'), name='ds_zstat', run_without_submitting=True) ds_tstat = pe.Node(DerivativesDataSink(base_directory=str(output_dir), keep_dtype=False, suffix='tstat', desc='intask'), name='ds_tstat', run_without_submitting=True) workflow.connect([ (datasource, susan, [('bold', 'inputnode.in_files'), ('mask', 'inputnode.mask_file')]), (datasource, runinfo, [('events', 'events_file'), ('regressors', 'regressors_file')]), (susan, l1_spec, [('outputnode.smoothed_files', 'functional_runs')]), (datasource, l1_spec, [('tr', 'time_repetition')]), (datasource, l1_model, [('tr', 'interscan_interval')]), (datasource, ds_cope, [('bold', 'source_file')]), (datasource, ds_varcope, [('bold', 'source_file')]), (datasource, ds_zstat, [('bold', 'source_file')]), (datasource, ds_tstat, [('bold', 'source_file')]), (susan, runinfo, [('outputnode.smoothed_files', 'in_file')]), (runinfo, l1_spec, [('info', 'subject_info'), ('realign_file', 'realignment_parameters')]), (l1_spec, l1_model, [('session_info', 'session_info')]), (l1_model, feat_spec, [('fsf_files', 'fsf_file'), ('ev_files', 'ev_files')]), (l1_model, feat_fit, [('fsf_files', 'fsf_file')]), (feat_fit, feat_select, [('feat_dir', 'base_directory')]), (feat_select, ds_cope, [('cope', 'in_file')]), (feat_select, ds_varcope, [('varcope', 'in_file')]), (feat_select, ds_zstat, [('zstat', 'in_file')]), (feat_select, ds_tstat, [('tstat', 'in_file')]), ]) return workflow
def first_level_wf(pipeline, subject_id, task_id, output_dir): """ First level workflow """ workflow = pe.Workflow(name='_'.join((pipeline, subject_id, task_id))) inputnode = pe.Node(niu.IdentityInterface(fields=[ 'bold_preproc', 'contrasts', 'confounds', 'brainmask', 'events_file' ]), name='inputnode') outputnode = pe.Node( niu.IdentityInterface(fields=['sigma_pre', 'sigma_post', 'out_stats']), name='outputnode') conf2movpar = pe.Node(niu.Function(function=_confounds2movpar), name='conf2movpar') masker = pe.Node(fsl.ApplyMask(), name='masker') bim = pe.Node(afni.BlurInMask(fwhm=5.0, outputtype='NIFTI_GZ'), name='bim', mem_gb=20) ev = pe.Node(EventsFilesForTask(task=task_id), name='events') l1 = pe.Node(SpecifyModel( input_units='secs', time_repetition=2, high_pass_filter_cutoff=100, parameter_source='FSL', ), name='l1') l1model = pe.Node(fsl.Level1Design(interscan_interval=2, bases={'dgamma': { 'derivs': True }}, model_serial_correlations=True), name='l1design') l1featmodel = pe.Node(fsl.FEATModel(), name='l1model') l1estimate = pe.Node(fsl.FEAT(), name='l1estimate', mem_gb=40) pre_smooth_afni = pe.Node(afni.FWHMx(combine=True, detrend=True, args='-ShowMeClassicFWHM'), name='smooth_pre_afni', mem_gb=20) post_smooth_afni = pe.Node(afni.FWHMx(combine=True, detrend=True, args='-ShowMeClassicFWHM'), name='smooth_post_afni', mem_gb=20) pre_smooth = pe.Node(fsl.SmoothEstimate(), name='smooth_pre', mem_gb=20) post_smooth = pe.Node(fsl.SmoothEstimate(), name='smooth_post', mem_gb=20) def _resels(val): return val**(1 / 3.) def _fwhm(fwhm): from numpy import mean return float(mean(fwhm, dtype=float)) workflow.connect([ (inputnode, masker, [('bold_preproc', 'in_file'), ('brainmask', 'mask_file')]), (inputnode, ev, [('events_file', 'in_file')]), (inputnode, l1model, [('contrasts', 'contrasts')]), (inputnode, conf2movpar, [('confounds', 'in_confounds')]), (inputnode, bim, [('brainmask', 'mask')]), (masker, bim, [('out_file', 'in_file')]), (bim, l1, [('out_file', 'functional_runs')]), (ev, l1, [('event_files', 'event_files')]), (conf2movpar, l1, [('out', 'realignment_parameters')]), (l1, l1model, [('session_info', 'session_info')]), (ev, l1model, [('orthogonalization', 'orthogonalization')]), (l1model, l1featmodel, [('fsf_files', 'fsf_file'), ('ev_files', 'ev_files')]), (l1model, l1estimate, [('fsf_files', 'fsf_file')]), # Smooth (inputnode, pre_smooth, [('bold_preproc', 'zstat_file'), ('brainmask', 'mask_file')]), (bim, post_smooth, [('out_file', 'zstat_file')]), (inputnode, post_smooth, [('brainmask', 'mask_file')]), (pre_smooth, outputnode, [(('resels', _resels), 'sigma_pre')]), (post_smooth, outputnode, [(('resels', _resels), 'sigma_post')]), # Smooth with AFNI (inputnode, pre_smooth_afni, [('bold_preproc', 'in_file'), ('brainmask', 'mask')]), (bim, post_smooth_afni, [('out_file', 'in_file')]), (inputnode, post_smooth_afni, [('brainmask', 'mask')]), ]) # Writing outputs csv = pe.Node(AddCSVRow(in_file=str(output_dir / 'smoothness.csv')), name='addcsv_%s_%s' % (subject_id, pipeline)) csv.inputs.sub_id = subject_id csv.inputs.pipeline = pipeline # Datasinks ds_stats = pe.Node(niu.Function(function=_feat_stats), name='ds_stats') ds_stats.inputs.subject_id = subject_id ds_stats.inputs.task_id = task_id ds_stats.inputs.variant = pipeline ds_stats.inputs.out_path = output_dir setattr(ds_stats.interface, '_always_run', True) workflow.connect([ (outputnode, csv, [('sigma_pre', 'smooth_pre'), ('sigma_post', 'smooth_post')]), (pre_smooth_afni, csv, [(('fwhm', _fwhm), 'fwhm_pre')]), (post_smooth_afni, csv, [(('fwhm', _fwhm), 'fwhm_post')]), (l1estimate, ds_stats, [('feat_dir', 'feat_dir')]), (ds_stats, outputnode, [('out', 'out_stats')]), ]) return workflow
def feat_o(project_name, file_name): feat = fsl.FEAT() feat.inputs.fsf_file = f'/home/fsluser/Documents/{project_name}' \ f'/{file_name}/design.fsf' feat.run()