def create_modelfit_workflow(name='modelfit'): """Create an FSL individual modelfitting workflow Example ------- >>> modelfit = create_modelfit_workflow() >>> modelfit.base_dir = '.' >>> info = dict() >>> modelfit.inputs.inputspec.session_info = info >>> modelfit.inputs.inputspec.interscan_interval = 3. >>> modelfit.inputs.inputspec.film_threshold = 1000 >>> modelfit.run() #doctest: +SKIP Inputs:: inputspec.session_info : info generated by modelgen.SpecifyModel inputspec.interscan_interval : interscan interval inputspec.contrasts : list of contrasts inputspec.film_threshold : image threshold for FILM estimation Outputs:: outputspec.realignment_parameters : realignment parameter files outputspec.smoothed_files : smoothed functional files outputspec.outlier_files : list of outliers outputspec.outlier_stats : statistics of outliers outputspec.outlier_plots : images of outliers outputspec.mask_file : binary mask file in reference image space outputspec.reg_file : registration file that maps reference image to freesurfer space outputspec.reg_cost : cost of registration (useful for detecting misalignment) """ modelfit = pe.Workflow(name=name) """ Create the nodes """ inputspec = pe.Node(util.IdentityInterface(fields=['session_info', 'interscan_interval', 'contrasts', 'film_threshold', 'functional_data', 'bases', 'model_serial_correlations']), name='inputspec') level1design = pe.Node(interface=fsl.Level1Design(), name="level1design") modelgen = pe.MapNode(interface=fsl.FEATModel(), name='modelgen', iterfield=['fsf_file', 'ev_files']) modelestimate = pe.MapNode(interface=fsl.FILMGLS(smooth_autocorr=True, mask_size=5), name='modelestimate', iterfield=['design_file', 'in_file']) conestimate = pe.MapNode(interface=fsl.ContrastMgr(), name='conestimate', iterfield=['tcon_file', 'param_estimates', 'sigmasquareds', 'corrections', 'dof_file']) ztopval = pe.MapNode(interface=fsl.ImageMaths(op_string='-ztop', suffix='_pval'), name='ztop', iterfield=['in_file']) outputspec = pe.Node(util.IdentityInterface(fields=['copes', 'varcopes', 'dof_file', 'pfiles', 'parameter_estimates']), name='outputspec') """ Utility function """ pop_lambda = lambda x: x[0] """ Setup the connections """ modelfit.connect([ (inputspec, level1design, [('interscan_interval', 'interscan_interval'), ('session_info', 'session_info'), ('contrasts', 'contrasts'), ('bases', 'bases'), ('model_serial_correlations', 'model_serial_correlations')]), (inputspec, modelestimate, [('film_threshold', 'threshold'), ('functional_data', 'in_file')]), (level1design, modelgen, [('fsf_files', 'fsf_file'), ('ev_files', 'ev_files')]), (modelgen, modelestimate, [('design_file', 'design_file')]), (modelgen, conestimate, [('con_file', 'tcon_file')]), (modelestimate, conestimate, [('param_estimates', 'param_estimates'), ('sigmasquareds', 'sigmasquareds'), ('corrections', 'corrections'), ('dof_file', 'dof_file')]), (conestimate, ztopval, [(('zstats', pop_lambda), 'in_file')]), (ztopval, outputspec, [('out_file', 'pfiles')]), (modelestimate, outputspec, [('param_estimates', 'parameter_estimates'), ('dof_file', 'dof_file')]), (conestimate, outputspec, [('copes', 'copes'), ('varcopes', 'varcopes')]), ]) return modelfit
def modelfit_fsl(wf_name='modelfit'): """ Fit 1st level GLM using FSL routines Usage (TODO) modelfit.inputs.inputspec.fwhm = 12 modelfit.inputs.inputspec.brain_mask = ['/opt/shared2/nipype-test/testblock/example_func_brain_mask.nii.gz', '/opt/shared2/nipype-test/testblock/example_func_brain_mask.nii.gz'] modelfit.inputs.inputspec.input_units = 'secs' modelfit.inputs.inputspec.in_file = ['/opt/shared2/nipype-test/testblock/mc_data_brain.nii.gz', '/opt/shared2/nipype-test/testblock/mc_data_brain.nii.gz'] modelfit.inputs.inputspec.TR = 2 modelfit.inputs.inputspec.high_pass_filter_cutoff = 100 #sigma in TR modelfit.inputs.inputspec.event_files = ['/opt/shared2/nipype-test/testblock/a'] cont1 = ['whisker', 'T', ['a', 'a'], [1.0, 0.0]] cont2 = ['-whisker', 'T', ['a', 'a'], [-1.0, 0.0]] cont3 = ['Task','F', [cont1, cont2]] contrasts = [cont1] modelfit.inputs.inputspec.contrasts = contrasts #TODO: change condition names modelfit.inputs.inputspec.bases_function = {'dgamma': {'derivs': True}} modelfit.inputs.inputspec.model_serial_correlations = True #modelfit.write_graph('graph.dot'); modelfit.write_graph('graph.dot', graph2use='colored'); x=modelfit.run() #x=modelfit.run(plugin='MultiProc', plugin_args={'n_procs': 8}) server.serve_content(modelfit) """ modelfit = pe.Workflow(name=wf_name) """ Set up a node to define all inputs required for the preprocessing workflow """ inputnode = pe.Node(interface=util.IdentityInterface( fields=[ 'in_file', 'ev_file', 'confounders', 'contrasts', 'high_pass_filter_cutoff', 'fwhm', 'interscan_interval', 'TR', 'input_units', 'bases_function', 'model_serial_correlations', 'brain_mask' ], mandatory_inputs=True), name='inputspec') #TODO: eliminate brain mask #inputnode.iterables=[('high_pass_filter_cutoff', [30, 60, 90, 120, 500])] """ Set up a node to define outputs for the preprocessing workflow """ outputnode = pe.Node(interface=util.IdentityInterface( fields=['zstats', 'zfstats', 'copes', 'varcopes'], mandatory_inputs=True), name='outputspec') # collect subject info getsubjectinfo = pe.MapNode(util.Function( input_names=['ev_file', 'confounders'], output_names=['subject_info'], function=get_subject_info), name='getsubjectinfo', iterfield=['confounders']) # nipype.algorithms.modelgen.SpecifyModel to generate design information. modelspec = pe.MapNode(interface=model.SpecifyModel(), name="modelspec", iterfield=['subject_info']) # smooth #TODO: move into preproc pipeline smooth = preproc.create_susan_smooth("smooth") #smooth.get_node( "smooth").iterables=[('fwhm', [6., 8., 10., 12., 14., 16.])] toSigma = pe.Node(interface=util.Function( input_names=['high_pass_filter_cutoff', 'TR'], output_names=['high_pass_filter_opstring'], function=highpass_operand), name='toSigma') highpass = pe.MapNode(interface=fsl.ImageMaths(suffix='_tempfilt', op_string=''), iterfield=['in_file'], name='highpass') # Use nipype.interfaces.fsl.Level1Design to generate a run specific fsf file for analysis level1design = pe.MapNode(interface=fsl.Level1Design(), name="level1design", iterfield='session_info') # Use nipype.interfaces.fsl.FEATModel to generate a run specific mat file for use by FILMGLS modelgen = pe.MapNode(interface=fsl.FEATModel(), name='modelgen', iterfield=['fsf_file', 'ev_files']) # Use nipype.interfaces.fsl.FILMGLS to estimate a model specified by a mat file and a functional run modelestimate = pe.MapNode( interface=fsl.FILMGLS(smooth_autocorr=True, mask_size=5, threshold=200), name='modelestimate', #iterfield=['design_file', 'in_file']) iterfield=['in_file', 'design_file']) # Use nipype.interfaces.fsl.ContrastMgr to generate contrast estimates conestimate = pe.MapNode(interface=fsl.ContrastMgr(), name='conestimate', iterfield=[ 'param_estimates', 'sigmasquareds', 'corrections', 'dof_file', 'tcon_file' ]) modelfit.connect([ ( inputnode, smooth, [ ('in_file', 'inputnode.in_files'), ('fwhm', 'inputnode.fwhm'), # in iterable ('brain_mask', 'inputnode.mask_file') ]), (smooth, highpass, [('outputnode.smoothed_files', 'in_file')]), (inputnode, toSigma, [('high_pass_filter_cutoff', 'high_pass_filter_cutoff')]), (inputnode, toSigma, [('TR', 'TR')]), (toSigma, highpass, [('high_pass_filter_opstring', 'op_string')]), (inputnode, getsubjectinfo, [('ev_file', 'ev_file'), ('confounders', 'confounders')]), (getsubjectinfo, modelspec, [('subject_info', 'subject_info')]), (highpass, modelspec, [('out_file', 'functional_runs')]), (highpass, modelestimate, [('out_file', 'in_file')]), (inputnode, modelspec, [ ('input_units', 'input_units'), ('TR', 'time_repetition'), ('high_pass_filter_cutoff', 'high_pass_filter_cutoff'), ]), (inputnode, level1design, [('TR', 'interscan_interval'), ('model_serial_correlations', 'model_serial_correlations'), ('bases_function', 'bases'), ('contrasts', 'contrasts')]), (modelspec, level1design, [('session_info', 'session_info')]), (level1design, modelgen, [('fsf_files', 'fsf_file'), ('ev_files', 'ev_files')]), (modelgen, modelestimate, [('design_file', 'design_file')]), (modelgen, conestimate, [('con_file', 'tcon_file')]), (modelestimate, conestimate, [('param_estimates', 'param_estimates'), ('sigmasquareds', 'sigmasquareds'), ('corrections', 'corrections'), ('dof_file', 'dof_file')]), (conestimate, outputnode, [('zstats', 'zstats'), ('zfstats', 'zfstats'), ('copes', 'copes'), ('varcopes', 'varcopes')]) ]) return modelfit
def create_timeseries_model_workflow(name="model", exp_info=None): # Default experiment parameters for generating graph image, testing, etc. if exp_info is None: exp_info = lyman.default_experiment_parameters() # Define constant inputs inputs = ["realign_file", "nuisance_file", "artifact_file", "timeseries"] # Possibly add the design and regressor files to the inputs if exp_info["design_name"] is not None: inputs.append("design_file") if exp_info["regressor_file"] is not None: inputs.append("regressor_file") # Define the workflow inputs inputnode = Node(IdentityInterface(inputs), "inputs") # Set up the experimental design modelsetup = MapNode( ModelSetup(exp_info=exp_info), ["timeseries", "realign_file", "nuisance_file", "artifact_file"], "modelsetup") # For some nodes, make it possible to request extra memory mem_request = {"qsub_args": "-l h_vmem=%dG" % exp_info["memory_request"]} # Use film_gls to estimate the timeseries model modelestimate = MapNode( fsl.FILMGLS(smooth_autocorr=True, mask_size=5, threshold=100), ["design_file", "in_file", "tcon_file"], "modelestimate") modelestimate.plugin_args = mem_request # Compute summary statistics about the model fit modelsummary = MapNode(ModelSummary(), ["design_matrix_pkl", "timeseries", "pe_files"], "modelsummary") modelsummary.plugin_args = mem_request # Save the experiment info for this run # Save the experiment info for this run saveparams = MapNode(SaveParameters(exp_info=exp_info), "in_file", "saveparams") # Report on the results of the model # Note: see below for a conditional iterfield modelreport = MapNode( ModelReport(), ["timeseries", "sigmasquareds_file", "tsnr_file", "r2_files"], "modelreport") # Define the workflow outputs outputnode = Node( IdentityInterface([ "results", "copes", "varcopes", "zstats", "r2_files", "ss_files", "tsnr_file", "report", "design_mat", "contrast_mat", "design_pkl", "design_report", "json_file" ]), "outputs") # Define the workflow and connect the nodes model = Workflow(name=name) model.connect([ (inputnode, modelsetup, [("realign_file", "realign_file"), ("nuisance_file", "nuisance_file"), ("artifact_file", "artifact_file"), ("timeseries", "timeseries")]), (inputnode, modelestimate, [("timeseries", "in_file")]), (inputnode, saveparams, [("timeseries", "in_file")]), (modelsetup, modelestimate, [("design_matrix_file", "design_file"), ("contrast_file", "tcon_file")]), (modelsetup, modelsummary, [("design_matrix_pkl", "design_matrix_pkl") ]), (inputnode, modelsummary, [("timeseries", "timeseries")]), (modelestimate, modelsummary, [("param_estimates", "pe_files")]), (inputnode, modelreport, [("timeseries", "timeseries")]), (modelestimate, modelreport, [("sigmasquareds", "sigmasquareds_file") ]), (modelsummary, modelreport, [("r2_files", "r2_files"), ("tsnr_file", "tsnr_file")]), (modelsetup, outputnode, [("design_matrix_file", "design_mat"), ("contrast_file", "contrast_mat"), ("design_matrix_pkl", "design_pkl"), ("report", "design_report")]), (saveparams, outputnode, [("json_file", "json_file")]), (modelestimate, outputnode, [("results_dir", "results"), ("copes", "copes"), ("varcopes", "varcopes"), ("zstats", "zstats")]), (modelsummary, outputnode, [("r2_files", "r2_files"), ("ss_files", "ss_files"), ("tsnr_file", "tsnr_file")]), (modelreport, outputnode, [("out_files", "report")]), ]) if exp_info["design_name"] is not None: model.connect(inputnode, "design_file", modelsetup, "design_file") if exp_info["regressor_file"] is not None: model.connect(inputnode, "regressor_file", modelsetup, "regressor_file") if exp_info["contrasts"]: model.connect(modelestimate, "zstats", modelreport, "zstat_files") modelreport.iterfield.append("zstat_files") return model, inputnode, outputnode
modelspec = pe.MapNode(interface=model.SpecifyModel(), name="modelspec", iterfield=['subject_info']) level1design = pe.MapNode(interface=fsl.Level1Design(), name="level1design", iterfield=['session_info']) modelgen = pe.MapNode(interface=fsl.FEATModel(), name='modelgen', iterfield=["fsf_file", "ev_files"]) trim = pe.MapNode(interface=Trim(), name="trim", iterfield=['in_file']) applymask = pe.MapNode(interface=fsl.ApplyMask(), name="applymask", iterfield=["in_file", "mask_file"]) modelestimate = pe.MapNode(interface=fsl.FILMGLS(), name='modelestimate', iterfield=['design_file', 'in_file', 'tcon_file']) # combine copes, varcopes, and masks across multiple sessions copemerge = pe.MapNode(interface=fsl.Merge(dimension='t'), iterfield=['in_files'], name="copemerge") varcopemerge = pe.MapNode(interface=fsl.Merge(dimension='t'), iterfield=['in_files'], name="varcopemerge") maskemerge = pe.MapNode(interface=fsl.Merge(dimension='t'), iterfield=['in_files'], name="maskemerge") # set up and estimate fixed-effects cross-session analysis
Get_Mean_Image = Node(fsl.MeanImage(), name = 'Get_Mean_Image') Get_Mean_Image.inputs.dimension = 'T' #Add the mean image to the filtered image Add_Mean_Image = Node(fsl.BinaryMaths(), name = 'Add_Mean_Image') Add_Mean_Image.inputs.operation = 'add' #----------------------------------------------------------------------------------------------------- # In[15]: #Fit the design to the voxels time-series design = '/media/amr/HDD/Work/Stimulation/1st_Level_Designs/10s_Stimulation_design.mat' t_contrast = '/media/amr/HDD/Work/Stimulation/1st_Level_Designs/10s_Stimulation_design.con' f_contrast = '/media/amr/HDD/Work/Stimulation/1st_Level_Designs/10s_Stimulation_design.fts' Film_Gls = Node(fsl.FILMGLS(), name = 'Fit_Design_to_Timeseries') Film_Gls.inputs.design_file = design Film_Gls.inputs.tcon_file = t_contrast Film_Gls.inputs.fcon_file = f_contrast Film_Gls.inputs.threshold = 1000.0 Film_Gls.inputs.smooth_autocorr = True #----------------------------------------------------------------------------------------------------- # In[15]: #Estimate smootheness of the image Smooth_Est = Node(fsl.SmoothEstimate(), name = 'Smooth_Estimation') Smooth_Est.inputs.dof = 148 #150 volumes and only one regressor #----------------------------------------------------------------------------------------------------- # In[15]: #Clusterin on the statistical output of t-contrasts
level1design = pe.Node(interface=fsl.Level1Design(), name="level1design") """ Use :class:`nipype.interfaces.fsl.FEATModel` to generate a run specific mat file for use by FILMGLS """ modelgen = pe.MapNode(interface=fsl.FEATModel(), name='modelgen', iterfield=['fsf_file', 'ev_files']) """ Use :class:`nipype.interfaces.fsl.FILMGLS` to estimate a model specified by a mat file and a functional run """ modelestimate = pe.MapNode(interface=fsl.FILMGLS(smooth_autocorr=True, mask_size=5, threshold=1000), name='modelestimate', iterfield=['design_file', 'in_file']) """ Use :class:`nipype.interfaces.fsl.ContrastMgr` to generate contrast estimates """ conestimate = pe.MapNode(interface=fsl.ContrastMgr(), name='conestimate', iterfield=[ 'tcon_file', 'param_estimates', 'sigmasquareds', 'corrections', 'dof_file' ]) modelfit.connect([
def init_glm_wf(conditions, contrasts, repetition_time, use_mov_pars, name="glm"): """ create workflow to calculate a first level glm for task functional data :param conditions: dictionary of conditions with onsets and durations by condition names :param contrasts: dictionary of contrasts by names :param repetition_time: repetition time :param use_mov_pars: if true, regress out movement parameters when calculating the glm :param name: workflow name (Default value = "glm") """ workflow = pe.Workflow(name=name) # inputs are the bold file, the mask file and the confounds file # that contains the movement parameters inputnode = pe.Node(niu.IdentityInterface( fields=["bold_file", "mask_file", "confounds_file"]), name="inputnode") # transform (unordered) conditions dictionary into three (ordered) lists names = list(conditions.keys()) onsets = [conditions[k]["onsets"] for k in names] durations = [conditions[k]["durations"] for k in names] # first level model specification modelspec = pe.Node(interface=model.SpecifyModel( input_units="secs", high_pass_filter_cutoff=128., time_repetition=repetition_time, subject_info=Bunch(conditions=names, onsets=onsets, durations=durations)), name="modelspec") # transform contrasts dictionary to nipype list data structure contrasts_ = [[k, "T"] + [list(i) for i in zip(*[(n, val) for n, val in v.items()])] for k, v in contrasts.items()] connames = [k[0] for k in contrasts_] # outputs are cope, varcope and zstat for each contrast and a dof_file outputnode = pe.Node(niu.IdentityInterface(fields=sum( [["%s_img" % conname, "%s_varcope" % conname, "%s_zstat" % conname] for conname in connames], []) + ["dof_file"]), name="outputnode") outputnode._interface.names = connames # generate design from first level specification level1design = pe.Node(interface=fsl.Level1Design( contrasts=contrasts_, interscan_interval=repetition_time, model_serial_correlations=True, bases={"dgamma": { "derivs": False }}), name="level1design") # generate required input files for FILMGLS from design modelgen = pe.Node(interface=fsl.FEATModel(), name="modelgen", iterfield=["fsf_file", "ev_files"]) # calculate range of image values to determine cutoff value # for FILMGLS stats = pe.Node(interface=fsl.ImageStats(op_string="-R"), name="stats") # actuallt estimate the firsy level model modelestimate = pe.Node(interface=fsl.FILMGLS(smooth_autocorr=True, mask_size=5), name="modelestimate", iterfield=["design_file", "in_file", "tcon_file"]) # mask regression outputs maskimgs = pe.MapNode(interface=fsl.ApplyMask(), name="maskimgs", iterfield=["in_file"]) maskvarcopes = pe.MapNode(interface=fsl.ApplyMask(), name="maskvarcopes", iterfield=["in_file"]) maskzstats = pe.MapNode(interface=fsl.ApplyMask(), name="maskzstats", iterfield=["in_file"]) # split regression outputs by name splitimgs = pe.Node(interface=niu.Split(splits=[1 for conname in connames]), name="splitimgs") splitvarcopes = pe.Node( interface=niu.Split(splits=[1 for conname in connames]), name="splitvarcopes") splitzstats = pe.Node( interface=niu.Split(splits=[1 for conname in connames]), name="splitzstats") # pass movement parameters to glm model specification if requested c = [("bold_file", "functional_runs")] if use_mov_pars: c.append(("confounds_file", "realignment_parameters")) workflow.connect([ (inputnode, modelspec, c), (inputnode, modelestimate, [("bold_file", "in_file")]), (modelspec, level1design, [("session_info", "session_info")]), (level1design, modelgen, [("fsf_files", "fsf_file"), ("ev_files", "ev_files")]), (inputnode, stats, [("bold_file", "in_file")]), (stats, modelestimate, [(("out_stat", get_float), "threshold")]), (modelgen, modelestimate, [("design_file", "design_file"), ("con_file", "tcon_file")]), (inputnode, maskimgs, [("mask_file", "mask_file")]), (inputnode, maskvarcopes, [("mask_file", "mask_file")]), (inputnode, maskzstats, [("mask_file", "mask_file")]), (modelestimate, maskimgs, [ (("copes", flatten), "in_file"), ]), (modelestimate, maskvarcopes, [ (("varcopes", flatten), "in_file"), ]), (modelestimate, maskzstats, [ (("zstats", flatten), "in_file"), ]), (modelestimate, outputnode, [("dof_file", "dof_file")]), (maskimgs, splitimgs, [ ("out_file", "inlist"), ]), (maskvarcopes, splitvarcopes, [ ("out_file", "inlist"), ]), (maskzstats, splitzstats, [ ("out_file", "inlist"), ]), ]) # connect outputs named for the contrasts for i, conname in enumerate(connames): workflow.connect(splitimgs, "out%i" % (i + 1), outputnode, "%s_img" % conname) workflow.connect(splitvarcopes, "out%i" % (i + 1), outputnode, "%s_varcope" % conname) workflow.connect(splitzstats, "out%i" % (i + 1), outputnode, "%s_zstat" % conname) return workflow, connames
def create_nuisance_modelfit_workflow(name='modelfit', f_contrasts=False): """ Create an FSL modelfitting workflow that returns also residual4d and sigmasquareds. Example ------- # >>> modelfit = create_modelfit_workflow() # >>> modelfit.base_dir = '.' # >>> info = dict() # >>> modelfit.inputs.inputspec.session_info = info # >>> modelfit.inputs.inputspec.interscan_interval = 3. # >>> modelfit.inputs.inputspec.film_threshold = 1000 # >>> modelfit.run() #doctest: +SKIP Inputs:: inputspec.session_info : info generated by modelgen.SpecifyModel inputspec.interscan_interval : interscan interval inputspec.contrasts : list of contrasts inputspec.film_threshold : image threshold for FILM estimation inputspec.model_serial_correlations inputspec.bases Outputs:: outputspec.copes outputspec.varcopes outputspec.dof_file outputspec.pfiles outputspec.zfiles outputspec.parameter_estimates outputspec.residual4d outputspec.sigmasquareds """ version = 0 if fsl.Info.version() and \ LooseVersion(fsl.Info.version()) > LooseVersion('5.0.6'): version = 507 modelfit = pe.Workflow(name=name) """ Create the nodes """ inputspec = pe.Node(util.IdentityInterface(fields=[ 'session_info', 'interscan_interval', 'contrasts', 'film_threshold', 'functional_data', 'bases', 'model_serial_correlations' ]), name='inputspec') level1design = pe.Node(interface=fsl.Level1Design(), name="level1design") modelgen = pe.MapNode(interface=fsl.FEATModel(), name='modelgen', iterfield=['fsf_file', 'ev_files']) if version < 507: modelestimate = pe.MapNode(interface=fsl.FILMGLS(smooth_autocorr=True, mask_size=5), name='modelestimate', iterfield=['design_file', 'in_file']) else: if f_contrasts: iterfield = ['design_file', 'in_file', 'tcon_file', 'fcon_file'] else: iterfield = ['design_file', 'in_file', 'tcon_file'] modelestimate = pe.MapNode(interface=fsl.FILMGLS(smooth_autocorr=True, mask_size=5), name='modelestimate', iterfield=iterfield) if version < 507: if f_contrasts: iterfield = [ 'tcon_file', 'fcon_file', 'param_estimates', 'sigmasquareds', 'corrections', 'dof_file' ] else: iterfield = [ 'tcon_file', 'param_estimates', 'sigmasquareds', 'corrections', 'dof_file' ] conestimate = pe.MapNode(interface=fsl.ContrastMgr(), name='conestimate', iterfield=[ 'tcon_file', 'fcon_file', 'param_estimates', 'sigmasquareds', 'corrections', 'dof_file' ]) if f_contrasts: iterfield = ['in1', 'in2'] else: iterfield = ['in1'] merge_contrasts = pe.MapNode(interface=util.Merge(2), name='merge_contrasts', iterfield=iterfield) ztopval = pe.MapNode(interface=fsl.ImageMaths(op_string='-ztop', suffix='_pval'), nested=True, name='ztop', iterfield=['in_file']) outputspec = pe.Node(util.IdentityInterface(fields=[ 'copes', 'varcopes', 'dof_file', 'pfiles', 'zfiles', 'parameter_estimates', 'residual4d', 'sigmasquareds' ]), name='outputspec') """ Setup the connections """ modelfit.connect([ (inputspec, level1design, [('interscan_interval', 'interscan_interval'), ('session_info', 'session_info'), ('contrasts', 'contrasts'), ('bases', 'bases'), ('model_serial_correlations', 'model_serial_correlations')]), (inputspec, modelestimate, [('film_threshold', 'threshold'), ('functional_data', 'in_file')]), (level1design, modelgen, [('fsf_files', 'fsf_file'), ('ev_files', 'ev_files')]), (modelgen, modelestimate, [('design_file', 'design_file')]), # connect also residual4d and sigmasquared (modelestimate, outputspec, [('param_estimates', 'parameter_estimates'), ('dof_file', 'dof_file'), ('residual4d', 'residual4d'), ('sigmasquareds', 'sigmasquareds')]), ]) if version < 507: modelfit.connect([ (modelgen, conestimate, [('con_file', 'tcon_file'), ('fcon_file', 'fcon_file')]), (modelestimate, conestimate, [('param_estimates', 'param_estimates'), ('sigmasquareds', 'sigmasquareds'), ('corrections', 'corrections'), ('dof_file', 'dof_file')]), (conestimate, outputspec, [('copes', 'copes'), ('varcopes', 'varcopes')]), ]) else: modelfit.connect([ (modelgen, modelestimate, [('con_file', 'tcon_file'), ('fcon_file', 'fcon_file')]), (modelestimate, outputspec, [('copes', 'copes'), ('varcopes', 'varcopes')]), ]) return modelfit
NodeHash_1e7a3420.inputs.high_pass_filter_cutoff = 0 NodeHash_1e7a3420.inputs.input_units = 'secs' NodeHash_1e7a3420.inputs.time_repetition = 2.0 #Generate FEAT specific files NodeHash_9bb0d40 = pe.MapNode(interface = fsl.Level1Design(), name = 'NodeName_9bb0d40', iterfield = ['session_info']) NodeHash_9bb0d40.inputs.bases = {'dgamma':{'derivs': False}} NodeHash_9bb0d40.inputs.contrasts = [('con-incon', 'T', ['congruent_correct', 'congruent_correct'], [-1, 1])] NodeHash_9bb0d40.inputs.interscan_interval = 2.0 NodeHash_9bb0d40.inputs.model_serial_correlations = True #Wraps command **feat_model** NodeHash_6b33f50 = pe.MapNode(interface = fsl.FEATModel(), name = 'NodeName_6b33f50', iterfield = ['ev_files', 'fsf_file']) #Wraps command **film_gls** NodeHash_2762fb60 = pe.MapNode(interface = fsl.FILMGLS(), name = 'NodeName_2762fb60', iterfield = ['design_file', 'in_file', 'tcon_file']) #Wraps command **fslmaths** NodeHash_2df82970 = pe.MapNode(interface = fsl.MeanImage(), name = 'NodeName_2df82970', iterfield = ['in_file']) NodeHash_2df82970.inputs.dimension = 'T' #Generic datasink module to store structured outputs NodeHash_33a4bec0 = pe.Node(interface = io.DataSink(), name = 'NodeName_33a4bec0') NodeHash_33a4bec0.inputs.base_directory = '/tmp/FIRSTLEVEL' #Basic interface class to select specific elements from a list NodeHash_7caa820 = pe.MapNode(interface = utility.Select(), name = 'NodeName_7caa820', iterfield = ['inlist']) NodeHash_7caa820.inputs.index = 0 #Basic interface class to select specific elements from a list NodeHash_b8ed090 = pe.MapNode(interface = utility.Select(), name = 'NodeName_b8ed090', iterfield = ['inlist'])
level1design = pe.Node(interface=fsl.model.Level1Design(),name='level1design') level1design.inputs.interscan_interval =2.0 level1design.inputs.bases = {'dgamma':{'derivs': True}} level1design.inputs.model_serial_correlations=True firstlevel.connect(specifymodel,'session_info',level1design,'session_info') firstlevel.connect(taskinfo,('taskname',get_contrasts),level1design,'contrasts') modelgen = pe.Node(interface=fsl.model.FEATModel(),name='modelgen') firstlevel.connect(level1design,'fsf_files',modelgen,'fsf_file') firstlevel.connect(level1design,'ev_files',modelgen,'ev_files') filmgls= pe.Node(interface=fsl.FILMGLS(),name='filmgls') filmgls.inputs.autocorr_noestimate = True firstlevel.connect(datasource_func,'func',filmgls,'in_file') firstlevel.connect(modelgen,'design_file',filmgls,'design_file') firstlevel.connect(modelgen,'con_file',filmgls,'tcon_file') firstlevel.connect(filmgls,'param_estimates',datasink,'filmgls.param_estimates') firstlevel.connect(filmgls,'sigmasquareds',datasink,'filmgls.sigmasquareds') firstlevel.connect(filmgls,'copes',datasink,'filmgls.copes') firstlevel.connect(filmgls,'varcopes',datasink,'filmgls.varcopes') firstlevel.connect(filmgls,'dof_file',datasink,'filmgls.dof_file') firstlevel.connect(filmgls,'tstats',datasink,'filmgls.tstats') firstlevel.connect(filmgls,'zstats',datasink,'filmgls.zstats')
iterfield=['session_info']) NodeHash_2087a210.inputs.bases = {'dgamma': {'derivs': False}} NodeHash_2087a210.inputs.contrasts = [ ('con-incon', 'T', ['congruent_correct', 'congruent_correct'], [-1, 1]) ] NodeHash_2087a210.inputs.interscan_interval = 2.0 NodeHash_2087a210.inputs.model_serial_correlations = True #Wraps command **feat_model** NodeHash_219c0190 = pe.MapNode(interface=fsl.FEATModel(), name='NodeName_219c0190', iterfield=['ev_files', 'fsf_file']) #Wraps command **film_gls** NodeHash_215cb480 = pe.MapNode( interface=fsl.FILMGLS(), name='NodeName_215cb480', iterfield=['design_file', 'in_file', 'tcon_file']) #Wraps command **fslmaths** NodeHash_23b7ddc0 = pe.MapNode(interface=fsl.MeanImage(), name='NodeName_23b7ddc0', iterfield=['in_file']) NodeHash_23b7ddc0.inputs.dimension = 'T' #Generic datasink module to store structured outputs NodeHash_23ed28b0 = pe.Node(interface=io.DataSink(), name='NodeName_23ed28b0') NodeHash_23ed28b0.inputs.base_directory = '/tmp/FIRSTLEVEL' #Basic interface class to select specific elements from a list NodeHash_256f1c70 = pe.MapNode(interface=utility.Select(),
cont1 = ['Bundling-Control', 'T', ['Bundling', 'Control'], [1, -1]] s = SpecifyModel() s.inputs.input_units = 'secs' s.inputs.functional_runs = results.outputs.func s.inputs.time_repetition = 2 s.inputs.high_pass_filter_cutoff = 128. s.inputs.event_files = results.outputs.evs model = s.run() level1design = Level1Design() level1design.inputs.interscan_interval = 2.5 level1design.inputs.bases = {'dgamma': {'derivs': False}} level1design.inputs.model_serial_correlations = False level1design.inputs.session_info = model.outputs.session_info level1design.inputs.contrasts = [cont1] l1d = level1design.run() print l1d.outputs.ev_files modelgen = FEATModel() modelgen.inputs.ev_files = l1d.outputs.ev_files modelgen.inputs.fsf_file = l1d.outputs.fsf_files model = modelgen.run() fgls = fsl.FILMGLS() fgls.inputs.in_file = results.outputs.func fgls.inputs.design_file = model.outputs.design_file fgls.inputs.threshold = 10 fgls.inputs.results_dir = 'stats' res = fgls.run()
def init_taskbased_wf(analysis=None, memcalc=MemoryCalculator()): """ create workflow to calculate a first level glm for task functional data """ assert isinstance(analysis, Analysis) assert isinstance(analysis.tags, Tags) # make bold file variant specification boldfilefields = ["bold_file"] varianttupls = [("space", analysis.tags.space)] if analysis.tags.grand_mean_scaled is not None: assert isinstance(analysis.tags.grand_mean_scaled, GrandMeanScaledTag) varianttupls.append(analysis.tags.grand_mean_scaled.as_tupl()) if analysis.tags.band_pass_filtered is not None: assert isinstance(analysis.tags.band_pass_filtered, BandPassFilteredTag) assert analysis.tags.band_pass_filtered.type == "gaussian" varianttupls.append(analysis.tags.band_pass_filtered.as_tupl()) if analysis.tags.confounds_removed is not None: assert isinstance(analysis.tags.confounds_removed, ConfoundsRemovedTag) confounds_removed_names = tuple( name for name in analysis.tags.confounds_removed.names if "aroma_motion" in name) varianttupls.append(("confounds_removed", confounds_removed_names)) confounds_extract_names = tuple( name for name in analysis.tags.confounds_removed.names if "aroma_motion" not in name) if len(confounds_extract_names) > 0: boldfilefields.append("confounds_file") varianttupls.append(("confounds_extract", confounds_extract_names)) if analysis.tags.smoothed is not None: assert isinstance(analysis.tags.smoothed, SmoothedTag) varianttupls.append(analysis.tags.smoothed.as_tupl()) variantdict = dict(varianttupls) boldfilevariant = (tuple(boldfilefields), tuple(varianttupls)) assert analysis.name is not None workflow = pe.Workflow(name=analysis.name) # inputs are the bold file, the mask file and the confounds file inputnode = pe.Node( niu.IdentityInterface(fields=[ *boldfilefields, "mask_file", "condition_files", "metadata" ]), name="inputnode", ) # parse condition files into three (ordered) lists parseconditionfile = pe.Node( interface=ParseConditionFile(), name="parseconditionfile", ) workflow.connect(inputnode, "condition_files", parseconditionfile, "in_any") def get_repetition_time(dic): return dic.get("RepetitionTime") # first level model specification modelspec = pe.Node( interface=model.SpecifyModel(input_units="secs", ), name="modelspec", ) workflow.connect([ ( inputnode, modelspec, [ ("bold_file", "functional_runs"), (("metadata", get_repetition_time), "time_repetition"), ], ), (parseconditionfile, modelspec, [("subject_info", "subject_info")]), ]) if "band_pass_filtered" in variantdict: modelspec.inputs.high_pass_filter_cutoff = float( analysis.tags.band_pass_filtered.high) if "confounds_extract" in variantdict: workflow.connect([(inputnode, modelspec, [("confounds_file", "realignment_parameters")])]) # transform contrasts dictionary to nipype list data structure contrasts = [[ contrast.name, contrast.type.upper(), *map(list, zip(*contrast.values.items())) ] for contrast in analysis.contrasts] # generate design from first level specification level1design = pe.Node( interface=fsl.Level1Design( contrasts=contrasts, model_serial_correlations=True, bases={"dgamma": { "derivs": False }}, ), name="level1design", ) workflow.connect([ ( inputnode, level1design, [(("metadata", get_repetition_time), "interscan_interval")], ), (modelspec, level1design, [("session_info", "session_info")]), ]) # generate required input files for FILMGLS from design modelgen = pe.Node(interface=fsl.FEATModel(), name="modelgen", iterfield=["fsf_file", "ev_files"]) workflow.connect([( level1design, modelgen, [("fsf_files", "fsf_file"), ("ev_files", "ev_files")], )]) # calculate range of image values to determine cutoff value # for FILMGLS boldfilecutoff = pe.Node(interface=fsl.ImageStats(op_string="-R"), name="boldfilecutoff") workflow.connect([(inputnode, boldfilecutoff, [("bold_file", "in_file")])]) # actually estimate the first level model modelestimate = pe.Node( interface=fsl.FILMGLS(smooth_autocorr=True, mask_size=5), name="modelestimate", iterfield=["design_file", "in_file", "tcon_file"], ) workflow.connect([ (inputnode, modelestimate, [("bold_file", "in_file")]), (boldfilecutoff, modelestimate, [(("out_stat", firstfloat), "threshold")]), ( modelgen, modelestimate, [("design_file", "design_file"), ("con_file", "tcon_file")], ), ]) # make dof volume makedofvolume = pe.MapNode( interface=MakeDofVolume(), iterfield=["dof_file", "cope_file"], name="makedofvolume", ) workflow.connect([ ( modelestimate, makedofvolume, [(("copes", first), "cope_file"), ("dof_file", "dof_file")], ), ]) outputnode = pe.Node( interface=MakeResultdicts(keys=[ "firstlevelanalysisname", "firstlevelfeaturename", "cope", "varcope", "zstat", "dof_file", "mask_file", ]), name="outputnode", ) outputnode.inputs.firstlevelanalysisname = analysis.name outputnode.inputs.firstlevelfeaturename = list(map(first, contrasts)) workflow.connect([ (inputnode, outputnode, [("metadata", "basedict"), ("mask_file", "mask_file")]), ( modelestimate, outputnode, [ (("copes", ravel), "cope"), (("varcopes", ravel), "varcope"), (("zstats", ravel), "zstat"), ], ), (makedofvolume, outputnode, [("out_file", "dof_file")]), ]) return workflow, (boldfilevariant, )
def init_taskbased_wf( workdir=None, feature=None, condition_files=None, condition_units=None, memcalc=MemoryCalculator(), ): """ create workflow to calculate a first level glm for task functional data """ if feature is not None: name = f"{formatlikebids(feature.name)}_wf" else: name = "taskbased_wf" workflow = pe.Workflow(name=name) # inputnode = pe.Node( niu.IdentityInterface( fields=[ "tags", "vals", "metadata", "bold", "mask", "repetition_time", "confounds_selected", "condition_names", "condition_files", "condition_units", ] ), name="inputnode", ) outputnode = pe.Node(niu.IdentityInterface(fields=["resultdicts"]), name="outputnode") if feature is not None: inputnode.inputs.condition_names = feature.conditions if condition_files is not None: inputnode.inputs.condition_files = condition_files if condition_units is not None: inputnode.inputs.condition_units = condition_units # make_resultdicts_a = pe.Node( MakeResultdicts(tagkeys=["feature"], imagekeys=["design_matrix", "contrast_matrix"]), name="make_resultdicts_a", ) if feature is not None: make_resultdicts_a.inputs.feature = feature.name workflow.connect(inputnode, "tags", make_resultdicts_a, "tags") workflow.connect(inputnode, "vals", make_resultdicts_a, "vals") workflow.connect(inputnode, "metadata", make_resultdicts_a, "metadata") make_resultdicts_b = pe.Node( MakeResultdicts( tagkeys=["feature", "taskcontrast"], imagekeys=["effect", "variance", "z", "dof", "mask"], metadatakeys=["sources"], ), name="make_resultdicts_b", ) if feature is not None: make_resultdicts_b.inputs.feature = feature.name workflow.connect(inputnode, "tags", make_resultdicts_b, "tags") workflow.connect(inputnode, "vals", make_resultdicts_b, "vals") workflow.connect(inputnode, "metadata", make_resultdicts_b, "metadata") workflow.connect(inputnode, "mask", make_resultdicts_b, "mask") workflow.connect(make_resultdicts_b, "resultdicts", outputnode, "resultdicts") # merge_resultdicts = pe.Node(niu.Merge(2), name="merge_resultdicts") workflow.connect(make_resultdicts_a, "resultdicts", merge_resultdicts, "in1") workflow.connect(make_resultdicts_b, "resultdicts", merge_resultdicts, "in2") resultdict_datasink = pe.Node( ResultdictDatasink(base_directory=workdir), name="resultdict_datasink" ) workflow.connect(merge_resultdicts, "out", resultdict_datasink, "indicts") # parse condition files into three (ordered) lists parseconditionfile = pe.Node(ParseConditionFile(), name="parseconditionfile") workflow.connect(inputnode, "condition_names", parseconditionfile, "condition_names") workflow.connect(inputnode, "condition_files", parseconditionfile, "in_any") fillna = pe.Node(FillNA(), name="fillna") workflow.connect(inputnode, "confounds_selected", fillna, "in_tsv") # first level model specification modelspec = pe.Node(model.SpecifyModel(), name="modelspec") if hasattr(feature, "high_pass_filter_cutoff"): modelspec.inputs.high_pass_filter_cutoff = feature.high_pass_filter_cutoff else: modelspec.inputs.high_pass_filter_cutoff = np.inf workflow.connect(inputnode, "bold", modelspec, "functional_runs") workflow.connect(inputnode, "condition_units", modelspec, "input_units") workflow.connect(inputnode, "repetition_time", modelspec, "time_repetition") workflow.connect(fillna, "out_no_header", modelspec, "realignment_parameters") workflow.connect(parseconditionfile, "subject_info", modelspec, "subject_info") # transform contrasts dictionary to nipype list data structure contrasts = [] if feature is not None: condition_names = feature.conditions for contrast in feature.contrasts: contrast_values = [contrast["values"].get(c, 0.0) for c in condition_names] contrasts.append( [contrast["name"], contrast["type"].upper(), condition_names, contrast_values] ) contrast_names = list(map(firststr, contrasts)) make_resultdicts_b.inputs.taskcontrast = contrast_names # generate design from first level specification level1design = pe.Node( fsl.Level1Design( contrasts=contrasts, model_serial_correlations=True, bases={"dgamma": {"derivs": False}}, ), name="level1design", ) workflow.connect(inputnode, "repetition_time", level1design, "interscan_interval") workflow.connect(modelspec, "session_info", level1design, "session_info") # generate required input files for FILMGLS from design modelgen = pe.Node(fsl.FEATModel(), name="modelgen") workflow.connect([(level1design, modelgen, [(("fsf_files", firststr), "fsf_file")])]) workflow.connect([(level1design, modelgen, [(("ev_files", ravel), "ev_files")])]) # calculate range of image values to determine cutoff value stats = pe.Node(fsl.ImageStats(op_string="-R"), name="stats") workflow.connect(inputnode, "bold", stats, "in_file") cutoff = pe.Node( niu.Function(input_names=["obj"], output_names=["min_val"], function=firstfloat), name="cutoff", ) workflow.connect(stats, "out_stat", cutoff, "obj") # actually estimate the first level model modelestimate = pe.Node( fsl.FILMGLS(smooth_autocorr=True, mask_size=5), name="modelestimate" ) workflow.connect(inputnode, "bold", modelestimate, "in_file") workflow.connect(cutoff, "min_val", modelestimate, "threshold") workflow.connect(modelgen, "design_file", modelestimate, "design_file") workflow.connect(modelgen, "con_file", modelestimate, "tcon_file") # make dof volume makedofvolume = pe.Node( MakeDofVolume(), iterfield=["dof_file", "copes"], name="makedofvolume" ) workflow.connect(modelestimate, "copes", makedofvolume, "copes") workflow.connect(modelestimate, "dof_file", makedofvolume, "dof_file") workflow.connect(modelestimate, "copes", make_resultdicts_b, "effect") workflow.connect(modelestimate, "varcopes", make_resultdicts_b, "variance") workflow.connect(modelestimate, "zstats", make_resultdicts_b, "z") workflow.connect(makedofvolume, "out_file", make_resultdicts_b, "dof") # mergecolumnnames = pe.Node(niu.Merge(2), name="mergecolumnnames") mergecolumnnames.inputs.in1 = condition_names workflow.connect(fillna, "column_names", mergecolumnnames, "in2") design_unvest = pe.Node(Unvest(), name="design_unvest") workflow.connect(modelgen, "design_file", design_unvest, "in_vest") design_tsv = pe.Node(MergeColumns(1), name="design_tsv") workflow.connect(design_unvest, "out_no_header", design_tsv, "in1") workflow.connect(mergecolumnnames, "out", design_tsv, "column_names1") contrast_unvest = pe.Node(Unvest(), name="contrast_unvest") workflow.connect(modelgen, "con_file", contrast_unvest, "in_vest") contrast_tsv = pe.Node(MergeColumns(1), name="contrast_tsv") contrast_tsv.inputs.row_index = contrast_names workflow.connect(contrast_unvest, "out_no_header", contrast_tsv, "in1") workflow.connect(mergecolumnnames, "out", contrast_tsv, "column_names1") workflow.connect(design_tsv, "out_with_header", make_resultdicts_a, "design_matrix") workflow.connect(contrast_tsv, "out_with_header", make_resultdicts_a, "contrast_matrix") return workflow
# **ApplyMask:** Prepare brainmask for modeling # In[20]: mask = pe.Node(fsl.maths.ApplyMask(), 'mask') # **FILM:** Run-specific model # In[21]: filmgls = pe.Node(fsl.FILMGLS(), 'filmgls') filmgls.inputs.autocorr_noestimate = True # ## Subject-level fit # Helper function: Sort FILM outputs # In[22]: pass_run_data = pe.Node(niu.IdentityInterface(fields = ['mask', 'dof_file', 'copes', 'varcopes']), 'pass_run_data') join_run_data = pe.JoinNode( niu.IdentityInterface(fields=['masks', 'dof_files', 'copes', 'varcopes']),
def fsl_run_level_wf( model, step, bids_dir, output_dir, work_dir, subject_id, database_path, smoothing_fwhm=None, smoothing_level=None, smoothing_type=None, use_rapidart=False, detrend_poly=None, align_volumes=None, smooth_autocorrelations=False, despike=False, name="fsl_run_level_wf", ): """Generate run level workflow for a given model.""" bids_dir = Path(bids_dir) work_dir = Path(work_dir) workflow = pe.Workflow(name=name) level = step["Level"] dimensionality = 3 # Nipype FSL.SUSAN Default if smoothing_type == "inp": dimensionality = 2 workflow.__desc__ = "" (work_dir / model["Name"]).mkdir(exist_ok=True) include_entities = {} if "Input" in model: if "Include" in model["Input"]: include_entities = model["Input"]["Include"] include_entities.update({"subject": subject_id}) getter = pe.Node( BIDSGet( database_path=database_path, fixed_entities=include_entities, align_volumes=align_volumes, ), name="func_select", ) get_info = pe.MapNode( GetRunModelInfo(model=step, detrend_poly=detrend_poly), iterfield=[ "metadata_file", "regressor_file", "events_file", "entities" ], name=f"get_{level}_info", ) despiker = pe.MapNode( afni.Despike(outputtype="NIFTI_GZ"), iterfield=["in_file"], name="despiker", ) realign_runs = pe.MapNode( fsl.MCFLIRT(output_type="NIFTI_GZ", interpolation="sinc"), iterfield=["in_file", "ref_file"], name="func_realign", ) wrangle_volumes = pe.MapNode( IdentityInterface(fields=["functional_file"]), iterfield=["functional_file"], name="wrangle_volumes", ) specify_model = pe.MapNode( modelgen.SpecifyModel(high_pass_filter_cutoff=-1.0, input_units="secs"), iterfield=["functional_runs", "subject_info", "time_repetition"], name=f"model_{level}_specify", ) fit_model = pe.MapNode( IdentityInterface( fields=[ "session_info", "interscan_interval", "contrasts", "functional_data" ], mandatory_inputs=True, ), iterfield=[ "functional_data", "session_info", "interscan_interval", "contrasts" ], name=f"model_{level}_fit", ) first_level_design = pe.MapNode( fsl.Level1Design( bases={"dgamma": { "derivs": False }}, model_serial_correlations=False, ), iterfield=["session_info", "interscan_interval", "contrasts"], name=f"model_{level}_design", ) generate_model = pe.MapNode( fsl.FEATModel(output_type="NIFTI_GZ"), iterfield=["fsf_file", "ev_files"], name=f"model_{level}_generate", ) estimate_model = pe.MapNode( fsl.FILMGLS( threshold=0.0, # smooth_autocorr=True output_type="NIFTI_GZ", results_dir="results", smooth_autocorr=False, autocorr_noestimate=True, ), iterfield=["design_file", "in_file", "tcon_file"], name=f"model_{level}_estimate", ) if smooth_autocorrelations: first_level_design.inputs.model_serial_correlations = True estimate_model.inputs.smooth_autocorr = True estimate_model.inputs.autocorr_noestimate = False calculate_p = pe.MapNode( fsl.ImageMaths(output_type="NIFTI_GZ", op_string="-ztop", suffix="_pval"), iterfield=["in_file"], name=f"model_{level}_caculate_p", ) image_pattern = ("[sub-{subject}/][ses-{session}/]" "[sub-{subject}_][ses-{session}_]" "task-{task}_[acq-{acquisition}_]" "[rec-{reconstruction}_][run-{run}_]" "[echo-{echo}_][space-{space}_]contrast-{contrast}_" "stat-{stat<effect|variance|z|p|t|F>}_statmap.nii.gz") run_rapidart = pe.MapNode( ra.ArtifactDetect( use_differences=[True, False], use_norm=True, zintensity_threshold=3, norm_threshold=1, bound_by_brainmask=True, mask_type="file", parameter_source="FSL", ), iterfield=["realignment_parameters", "realigned_files", "mask_file"], name="rapidart_run", ) reshape_rapidart = pe.MapNode( Function( input_names=[ "run_info", "functional_file", "outlier_file", "contrast_entities" ], output_names=["run_info", "contrast_entities"], function=utils.reshape_ra, ), iterfield=[ "run_info", "functional_file", "outlier_file", "contrast_entities" ], name="reshape_rapidart", ) mean_img = pe.MapNode( fsl.ImageMaths(output_type="NIFTI_GZ", op_string="-Tmean", suffix="_mean"), iterfield=["in_file", "mask_file"], name="smooth_susan_avgimg", ) median_img = pe.MapNode( fsl.ImageStats(output_type="NIFTI_GZ", op_string="-k %s -p 50"), iterfield=["in_file", "mask_file"], name="smooth_susan_medimg", ) merge = pe.Node(Merge(2, axis="hstack"), name="smooth_merge") run_susan = pe.MapNode( fsl.SUSAN(output_type="NIFTI_GZ"), iterfield=["in_file", "brightness_threshold", "usans"], name="smooth_susan", ) mask_functional = pe.MapNode(ApplyMask(), iterfield=["in_file", "mask_file"], name="mask_functional") # Exists solely to correct undesirable behavior of FSL # that results in loss of constant columns correct_matrices = pe.MapNode( Function( input_names=["design_matrix"], output_names=["design_matrix"], function=utils.correct_matrix, ), iterfield=["design_matrix"], run_without_submitting=True, name=f"correct_{level}_matrices", ) collate = pe.Node( MergeAll( fields=[ "effect_maps", "variance_maps", "zscore_maps", "pvalue_maps", "tstat_maps", "contrast_metadata", ], check_lengths=True, ), name=f"collate_{level}", ) collate_outputs = pe.Node( CollateWithMetadata( fields=[ "effect_maps", "variance_maps", "zscore_maps", "pvalue_maps", "tstat_maps" ], field_to_metadata_map={ "effect_maps": { "stat": "effect" }, "variance_maps": { "stat": "variance" }, "zscore_maps": { "stat": "z" }, "pvalue_maps": { "stat": "p" }, "tstat_maps": { "stat": "t" }, }, ), name=f"collate_{level}_outputs", ) plot_matrices = pe.MapNode( PlotMatrices(output_dir=output_dir, database_path=database_path), iterfield=["mat_file", "con_file", "entities", "run_info"], run_without_submitting=True, name=f"plot_{level}_matrices", ) ds_contrast_maps = pe.MapNode( BIDSDataSink(base_directory=output_dir, path_patterns=image_pattern), iterfield=["entities", "in_file"], run_without_submitting=True, name=f"ds_{level}_contrast_maps", ) wrangle_outputs = pe.Node( IdentityInterface(fields=["contrast_metadata", "contrast_maps"]), name=f"wrangle_{level}_outputs", ) # Setup connections among nodes workflow.connect([( getter, get_info, [ ("metadata_files", "metadata_file"), ("events_files", "events_file"), ("regressor_files", "regressor_file"), ("entities", "entities"), ], )]) if align_volumes and despike: workflow.connect([ (getter, despiker, [("functional_files", "in_file")]), (despiker, realign_runs, [("out_file", "in_file")]), (getter, realign_runs, [("reference_files", "ref_file")]), ( realign_runs, wrangle_volumes, [("out_file", "functional_file")], ), ]) elif align_volumes and not despike: workflow.connect([ ( getter, realign_runs, [("functional_files", "in_file"), ("reference_files", "ref_file")], ), ( realign_runs, wrangle_volumes, [("out_file", "functional_file")], ), ]) elif despike: workflow.connect([ (getter, despiker, [("functional_files", "in_file")]), (despiker, wrangle_volumes, [("out_file", "functional_file")]), ]) else: workflow.connect([(getter, wrangle_volumes, [("functional_files", "functional_file")])]) if use_rapidart: workflow.connect([ (get_info, run_rapidart, [("motion_parameters", "realignment_parameters")]), (getter, run_rapidart, [("mask_files", "mask_file")]), ( wrangle_volumes, run_rapidart, [("functional_file", "realigned_files")], ), ( run_rapidart, reshape_rapidart, [("outlier_files", "outlier_file")], ), ( get_info, reshape_rapidart, [("run_info", "run_info"), ("contrast_entities", "contrast_entities")], ), (wrangle_volumes, reshape_rapidart, [("functional_file", "functional_file")]), ( reshape_rapidart, specify_model, [("run_info", "subject_info")], ), (reshape_rapidart, plot_matrices, [("run_info", "run_info")]), (reshape_rapidart, collate, [("contrast_entities", "contrast_metadata")]), ]) else: workflow.connect([ (get_info, specify_model, [("run_info", "subject_info")]), (get_info, plot_matrices, [("run_info", "run_info")]), ( get_info, collate, [("contrast_entities", "contrast_metadata")], ), ]) if smoothing_level == "l1" or smoothing_level == "run": run_susan.inputs.fwhm = smoothing_fwhm run_susan.inputs.dimension = dimensionality estimate_model.inputs.mask_size = smoothing_fwhm workflow.connect([ (wrangle_volumes, mean_img, [("functional_file", "in_file")]), ( wrangle_volumes, median_img, [("functional_file", "in_file")], ), (getter, mean_img, [("mask_files", "mask_file")]), (getter, median_img, [("mask_files", "mask_file")]), (mean_img, merge, [("out_file", "in1")]), (median_img, merge, [("out_stat", "in2")]), (wrangle_volumes, run_susan, [("functional_file", "in_file")]), ( median_img, run_susan, [( ("out_stat", utils.get_btthresh), "brightness_threshold", )], ), (merge, run_susan, [(("out", utils.get_usans), "usans")]), (getter, mask_functional, [("mask_files", "mask_file")]), (run_susan, mask_functional, [("smoothed_file", "in_file")]), ( mask_functional, specify_model, [("out_file", "functional_runs")], ), ( mask_functional, fit_model, [("out_file", "functional_data")], ), ]) else: workflow.connect([ (getter, mask_functional, [("mask_files", "mask_file")]), ( wrangle_volumes, mask_functional, [("functional_file", "in_file")], ), ( mask_functional, specify_model, [("out_file", "functional_runs")], ), ( mask_functional, fit_model, [("out_file", "functional_data")], ), ]) workflow.connect([ ( get_info, specify_model, [("repetition_time", "time_repetition")], ), (specify_model, fit_model, [("session_info", "session_info")]), ( get_info, fit_model, [("repetition_time", "interscan_interval"), ("run_contrasts", "contrasts")], ), ( fit_model, first_level_design, [ ("interscan_interval", "interscan_interval"), ("session_info", "session_info"), ("contrasts", "contrasts"), ], ), (first_level_design, generate_model, [("fsf_files", "fsf_file")]), (first_level_design, generate_model, [("ev_files", "ev_files")]), ]) if detrend_poly: workflow.connect([ ( generate_model, correct_matrices, [("design_file", "design_matrix")], ), ( correct_matrices, plot_matrices, [("design_matrix", "mat_file")], ), ( correct_matrices, estimate_model, [("design_matrix", "design_file")], ), ]) else: workflow.connect([ (generate_model, plot_matrices, [("design_file", "mat_file")]), ( generate_model, estimate_model, [("design_file", "design_file")], ), ]) workflow.connect([ (getter, plot_matrices, [("entities", "entities")]), (generate_model, plot_matrices, [("con_file", "con_file")]), (fit_model, estimate_model, [("functional_data", "in_file")]), (generate_model, estimate_model, [("con_file", "tcon_file")]), ( estimate_model, calculate_p, [(("zstats", utils.flatten), "in_file")], ), ( estimate_model, collate, [ ("copes", "effect_maps"), ("varcopes", "variance_maps"), ("zstats", "zscore_maps"), ("tstats", "tstat_maps"), ], ), (calculate_p, collate, [("out_file", "pvalue_maps")]), ( collate, collate_outputs, [ ("effect_maps", "effect_maps"), ("variance_maps", "variance_maps"), ("zscore_maps", "zscore_maps"), ("pvalue_maps", "pvalue_maps"), ("tstat_maps", "tstat_maps"), ("contrast_metadata", "metadata"), ], ), ( collate_outputs, ds_contrast_maps, [("out", "in_file"), ("metadata", "entities")], ), ( collate_outputs, wrangle_outputs, [("metadata", "contrast_metadata"), ("out", "contrast_maps")], ), ]) return workflow
def L1PIPE(): # ---1) Import modules import nipype.interfaces.fsl as fsl import nipype.pipeline.engine as pe import nipype.algorithms.modelgen as model import glob from nipype import Function import matplotlib import nipype.interfaces.utility as util import os #--- 2) Specify model node specify_model = pe.Node(interface=model.SpecifyModel(), name="SPECIFY_MODEL") specify_model.inputs.input_units = 'secs' runs=raw_input('Please drag in the pre-processsed functional data\n') runs2= runs.strip('\'"') NIFTIDIR=os.path.split(runs2)[0] specify_model.inputs.functional_runs = [runs2] specify_model.inputs.time_repetition = float(raw_input('Enter the TR (s)\n')) specify_model.inputs.high_pass_filter_cutoff = float(raw_input('Enter the High pass filter cutoff (s)\n')) EVENTFILES=raw_input('Please drag in the directory of 3 column event files') EVENTFILES2=EVENTFILES.strip('\'"') EVENTFILESLIST=glob.glob(EVENTFILES2 + '/*') specify_model.inputs.event_files=sorted(EVENTFILESLIST) #--- 3) Level 1 design node. Designer=pe.Node(interface=fsl.Level1Design(),name='DESIGN') Designer.inputs.interscan_interval = float(specify_model.inputs.time_repetition) Designer.inputs.bases = {'dgamma':{'derivs': False}} Designer.inputs.model_serial_correlations=bool(0) #--- 4) Make some contrasts cont1=('Task', 'T', ['B1INVFEAR.RUN001', 'B1INVINVFEAR.RUN001', 'B1INVINVNEUT.RUN001', 'B1INVNEUT.RUN001', 'B1SCFEAR.RUN001', 'B1SCNEUT.RUN001', 'B1UPFEAR.RUN001', 'B1UPINVFEAR.RUN001', 'B1UPINVNEUT.RUN001', 'B1UPNEUT.RUN001'], [1, 1, 1, 1, 1, 1, 1, 1, 1, 1]) cont2=('Up', 'T', ['B1INVFEAR.RUN001', 'B1INVINVFEAR.RUN001', 'B1INVINVNEUT.RUN001', 'B1INVNEUT.RUN001', 'B1SCFEAR.RUN001', 'B1SCNEUT.RUN001', 'B1UPFEAR.RUN001', 'B1UPINVFEAR.RUN001', 'B1UPINVNEUT.RUN001', 'B1UPNEUT.RUN001'], [0, 0, 0, 0, 0, 0, 1, 0, 0, 1]) cont3=('SC', 'T', ['B1INVFEAR.RUN001', 'B1INVINVFEAR.RUN001', 'B1INVINVNEUT.RUN001', 'B1INVNEUT.RUN001', 'B1SCFEAR.RUN001', 'B1SCNEUT.RUN001', 'B1UPFEAR.RUN001', 'B1UPINVFEAR.RUN001', 'B1UPINVNEUT.RUN001', 'B1UPNEUT.RUN001'], [0, 0, 0, 0, 1, 1, 0, 0, 0, 0]) cont4=('UpvSC', 'T', ['B1INVFEAR.RUN001', 'B1INVINVFEAR.RUN001', 'B1INVINVNEUT.RUN001', 'B1INVNEUT.RUN001', 'B1SCFEAR.RUN001', 'B1SCNEUT.RUN001', 'B1UPFEAR.RUN001', 'B1UPINVFEAR.RUN001', 'B1UPINVNEUT.RUN001', 'B1UPNEUT.RUN001'], [0, 0, 0, 0, -1, -1, 1, 0, 0, 1]) Designer.inputs.contrasts=[cont1, cont2, cont3, cont4] #--- 5) FSL model node Model=pe.Node(interface=fsl.FEATModel(),name='FEATMODEL') #--- 6) FILM GSL node fgls=pe.Node(interface=fsl.FILMGLS(),name='FILM_GLS') fgls.inputs.in_file=runs2 #--- 7) outputnode for the design image (gets binned otherwise) outputnode = pe.Node(interface=util.IdentityInterface(fields=['im','cope','varcope','dof','resid','params','sigmas']),name='outputnode') #--- 8) Plotting node def plot(in_file): from nilearn import image from nilearn import plotting import matplotlib display=plotting.plot_stat_map(stat_map_img = in_file, display_mode='z', cut_coords=10, threshold=float(0)) matplotlib.pyplot.show() plotter=pe.MapNode(Function(input_names=['in_file'],output_names='display',function=plot),iterfield=['in_file'],name='PLOTTER') workflow = pe.Workflow(name='L1PIPE') workflow.connect(specify_model,'session_info',Designer,'session_info') workflow.connect(Designer,'fsf_files',Model,'fsf_file') workflow.connect(Designer,'ev_files',Model,'ev_files') workflow.connect(Model,'design_file',fgls,'design_file') workflow.connect(Model,'con_file',fgls,'tcon_file') workflow.connect(Model,'design_image',outputnode,'im') # Feed the z stats to the plotter. workflow.connect(fgls,'zstats',plotter,'in_file') workflow.connect(fgls,'copes',outputnode,'cope') workflow.connect(fgls,'varcopes',outputnode,'varcope') workflow.connect(fgls,'dof_file',outputnode,'dof') workflow.connect(fgls,'residual4d',outputnode,'resid') workflow.connect(fgls,'param_estimates',outputnode,'params') workflow.connect(fgls,'sigmasquareds',outputnode,'sigmas') workflow.base_dir = NIFTIDIR workflow.write_graph(graph2use='exec') workflow.run()
def create_first(name='modelfit'): """First level task-fMRI modelling workflow Parameters ---------- name : name of workflow. Default = 'modelfit' Inputs ------ inputspec.session_info : inputspec.interscan_interval : inputspec.contrasts : inputspec.film_threshold : inputspec.functional_data : inputspec.bases : inputspec.model_serial_correlations : Outputs ------- outputspec.copes : outputspec.varcopes : outputspec.dof_file : outputspec.pfiles : outputspec.parameter_estimates : outputspec.zstats : outputspec.tstats : outputspec.design_image : outputspec.design_file : outputspec.design_cov : Returns ------- workflow : first-level workflow """ import nipype.interfaces.fsl as fsl # fsl import nipype.pipeline.engine as pe import nipype.interfaces.utility as util modelfit = pe.Workflow(name=name) inputspec = pe.Node(util.IdentityInterface(fields=[ 'session_info', 'interscan_interval', 'contrasts', 'film_threshold', 'functional_data', 'bases', 'model_serial_correlations' ]), name='inputspec') level1design = pe.Node(interface=fsl.Level1Design(), name="create_level1_design") modelgen = pe.MapNode(interface=fsl.FEATModel(), name='generate_model', iterfield=['fsf_file', 'ev_files']) modelestimate = pe.MapNode(interface=fsl.FILMGLS(smooth_autocorr=True, mask_size=5), name='estimate_model', iterfield=['design_file', 'in_file']) conestimate = pe.MapNode(interface=fsl.ContrastMgr(), name='estimate_contrast', iterfield=[ 'tcon_file', 'param_estimates', 'sigmasquareds', 'corrections', 'dof_file' ]) ztopval = pe.MapNode(interface=fsl.ImageMaths(op_string='-ztop', suffix='_pval'), name='z2pval', iterfield=['in_file']) outputspec = pe.Node(util.IdentityInterface(fields=[ 'copes', 'varcopes', 'dof_file', 'pfiles', 'parameter_estimates', 'zstats', 'tstats', 'design_image', 'design_file', 'design_cov', 'sigmasquareds' ]), name='outputspec') # Utility function pop_lambda = lambda x: x[0] # Setup the connections modelfit.connect([ (inputspec, level1design, [('interscan_interval', 'interscan_interval'), ('session_info', 'session_info'), ('contrasts', 'contrasts'), ('bases', 'bases'), ('model_serial_correlations', 'model_serial_correlations')]), (inputspec, modelestimate, [('film_threshold', 'threshold'), ('functional_data', 'in_file')]), (level1design, modelgen, [('fsf_files', 'fsf_file'), ('ev_files', 'ev_files')]), (modelgen, modelestimate, [('design_file', 'design_file')]), (modelgen, conestimate, [('con_file', 'tcon_file')]), (modelestimate, conestimate, [('param_estimates', 'param_estimates'), ('sigmasquareds', 'sigmasquareds'), ('corrections', 'corrections'), ('dof_file', 'dof_file')]), (conestimate, ztopval, [(('zstats', pop_lambda), 'in_file')]), (ztopval, outputspec, [('out_file', 'pfiles')]), (modelestimate, outputspec, [('param_estimates', 'parameter_estimates'), ('dof_file', 'dof_file'), ('sigmasquareds', 'sigmasquareds')]), (conestimate, outputspec, [('copes', 'copes'), ('varcopes', 'varcopes'), ('tstats', 'tstats'), ('zstats', 'zstats')]) ]) modelfit.connect(modelgen, 'design_image', outputspec, 'design_image') modelfit.connect(modelgen, 'design_file', outputspec, 'design_file') modelfit.connect(modelgen, 'design_cov', outputspec, 'design_cov') return modelfit
def model_fitting(source_img, prepped_img, subject_info, aroma, task, args, mask_file, run_number): # Get the necessary parameters outputdir = args.outputdir fwhm = args.fwhm cthresh = args.cthresh alpha = args.alpha # Make a task directory in the output folder if run_number > 0: taskdir = os.path.join(outputdir, task + "_run-0" + str(run_number + 1)) else: taskdir = os.path.join(outputdir, task) if not os.path.exists(taskdir): os.mkdir(taskdir) os.mkdir(os.path.join(taskdir, 'stats')) os.mkdir(os.path.join(taskdir, 'figs')) processed_image = preprocess(aroma, fwhm, prepped_img, mask_file, taskdir, task) task_vs_baseline = [ task + " vs baseline", 'T', [task, 'baseline'], [1, -1] ] # set up contrasts contrasts = [task_vs_baseline] """ Model fitting workflow Inputs:: inputspec.session_info : info generated by modelgen.SpecifyModel inputspec.interscan_interval : interscan interval inputspec.contrasts : list of contrasts inputspec.film_threshold : image threshold for FILM estimation inputspec.model_serial_correlations inputspec.bases Outputs:: outputspec.copes outputspec.varcopes outputspec.dof_file outputspec.zfiles outputspec.parameter_estimates """ modelfit = pe.Workflow(name='modelfit', base_dir=taskdir) modelspec = pe.Node(interface=model.SpecifyModel(), name="modelspec") # generate design info inputspec = pe.Node(util.IdentityInterface(fields=[ 'session_info', 'interscan_interval', 'contrasts', 'film_threshold', 'functional_data', 'bases', 'model_serial_correlations' ]), name='inputspec') level1design = pe.Node(interface=fsl.Level1Design(), name="level1design") modelgen = pe.MapNode(interface=fsl.FEATModel(), name='modelgen', iterfield=['fsf_file', 'ev_files']) modelestimate = pe.MapNode( interface=fsl.FILMGLS(smooth_autocorr=True, mask_size=5), name='modelestimate', iterfield=['design_file', 'in_file', 'tcon_file']) merge_contrasts = pe.MapNode(interface=util.Merge(2), name='merge_contrasts', iterfield=['in1']) outputspec = pe.Node(util.IdentityInterface(fields=[ 'copes', 'varcopes', 'dof_file', 'zfiles', 'parameter_estimates' ]), name='outputspec') modelfit.connect([ (modelspec, inputspec, [('session_info', 'session_info')]), (inputspec, level1design, [('interscan_interval', 'interscan_interval'), ('session_info', 'session_info'), ('contrasts', 'contrasts'), ('bases', 'bases'), ('model_serial_correlations', 'model_serial_correlations')]), (inputspec, modelestimate, [('film_threshold', 'threshold'), ('functional_data', 'in_file')]), (level1design, modelgen, [('fsf_files', 'fsf_file'), ('ev_files', 'ev_files')]), (modelgen, modelestimate, [('design_file', 'design_file')]), (merge_contrasts, outputspec, [('out', 'zfiles')]), (modelestimate, outputspec, [('param_estimates', 'parameter_estimates'), ('dof_file', 'dof_file')]), ]) modelfit.connect([ (modelgen, modelestimate, [('con_file', 'tcon_file'), ('fcon_file', 'fcon_file')]), (modelestimate, merge_contrasts, [('zstats', 'in1'), ('zfstats', 'in2')]), (modelestimate, outputspec, [('copes', 'copes'), ('varcopes', 'varcopes')]), ]) # Define inputs to workflow modelspec.inputs.functional_runs = processed_image inputspec.inputs.functional_data = processed_image modelspec.inputs.subject_info = subject_info modelspec.inputs.input_units = 'secs' modelspec.inputs.time_repetition = source_img.entities['RepetitionTime'] modelspec.inputs.high_pass_filter_cutoff = 90 inputspec.inputs.model_serial_correlations = True inputspec.inputs.film_threshold = 10.0 inputspec.inputs.interscan_interval = source_img.entities['RepetitionTime'] inputspec.inputs.bases = { 'gamma': { 'gammasigma': 3, 'gammadelay': 6, 'derivs': True } } inputspec.inputs.contrasts = contrasts # Run the model-fitting pipeline. Main outputs are a feat directory (w/ functional img) and a design.mat file res = modelfit.run() # outputs output_txt = open(os.path.join(taskdir, task + '_outputs.txt'), 'w') print_outputs(output_txt, res) # The third node, FILM's, first element (i.e. only element) of its 'zstats' output z_img = list(res.nodes)[2].result.outputs.zstats[0] # Use False Discovery Rate theory to correct for multiple comparisons fdr_thresh_img, fdr_threshold = thresholding.map_threshold( stat_img=z_img, mask_img=mask_file, alpha=alpha, height_control='fdr', cluster_threshold=cthresh) print("Thresholding at FDR corrected threshold of " + str(fdr_threshold)) fdr_thresh_img_path = os.path.join(taskdir, task + '_fdr_thresholded_z.nii.gz') nibabel.save(fdr_thresh_img, fdr_thresh_img_path) # Do a cluster analysis using the FDR corrected threshold on the original z_img print("Performing cluster analysis.") cl = fsl.Cluster(in_file=z_img, threshold=fdr_threshold) cluster_file = os.path.join(taskdir, 'stats', task + "_cluster_stats.txt") cluster_analysis(cluster_file, cl) # Resample the result image with AFNI resample_fdr_thresh_img_path = os.path.join( taskdir, task + '_fdr_thresholded_z_resample.nii.gz') print("Resampling thresholded image to MNI space") resample = afni.Resample(master=template, out_file=resample_fdr_thresh_img_path, in_file=fdr_thresh_img_path) resample.run() os.remove(fdr_thresh_img_path) print("Image to be returned: " + resample_fdr_thresh_img_path) return resample_fdr_thresh_img_path
run specific fsf file for analysis """ level1design = pe.Node(interface=fsl.Level1Design(), name="fsfdesign") """ e. Use :class:`nipype.interfaces.fsl.FEATModel` to generate a run specific mat file for use by FILMGLS """ modelgen = pe.Node(interface=fsl.FEATModel(), name='modelgen') """ f. Use :class:`nipype.interfaces.fsl.FILMGLS` to estimate a model specified by a mat file and a functional run """ modelestimate = pe.Node(interface=fsl.FILMGLS(), name='modelestimate') #iterfield = ['design_file','in_file']) modelfit.connect([ (modelspec, level1design, [('session_info', 'session_info')]), (level1design, modelgen, [('fsf_files', 'fsf_file'), ('ev_files', 'ev_files')]), (modelgen, modelestimate, [('design_file', 'design_file')]), ]) """ The nipype tutorial contains data for two subjects. Subject data is in two subdirectories, ``s1`` and ``s2``. Each subject directory contains four functional volumes: f3.nii, f5.nii, f7.nii, f10.nii. And one anatomical volume named struct.nii. Below we set some variables to inform the ``datasource`` about the
#Generate FEAT specific files NodeHash_8241250 = pe.Node(interface=fsl.Level1Design(), name='NodeName_8241250') NodeHash_8241250.inputs.bases = {'dgamma': {'derivs': False}} NodeHash_8241250.inputs.contrasts = [ ('con-incon', 'T', ['congruent_correct', 'congruent_correct'], [-1, 1]) ] NodeHash_8241250.inputs.interscan_interval = 2.0 NodeHash_8241250.inputs.model_serial_correlations = True #Wraps command **feat_model** NodeHash_8b12580 = pe.Node(interface=fsl.FEATModel(), name='NodeName_8b12580') #Wraps command **film_gls** NodeHash_5015c80 = pe.Node(interface=fsl.FILMGLS(), name='NodeName_5015c80') #Generic datasink module to store structured outputs NodeHash_8a104d0 = pe.Node(interface=io.DataSink(), name='NodeName_8a104d0') NodeHash_8a104d0.inputs.base_directory = '/tmp/FIRSTLEVEL' #Create a workflow to connect all those nodes analysisflow = nipype.Workflow('MyWorkflow') analysisflow.connect(NodeHash_3042f20, 'subject_info', NodeHash_6bef320, 'subject_info') analysisflow.connect(NodeHash_32c4e30, 'events', NodeHash_3042f20, 'in_file') analysisflow.connect(NodeHash_6bef320, 'session_info', NodeHash_8241250, 'session_info') analysisflow.connect(NodeHash_8241250, 'fsf_files', NodeHash_8b12580, 'fsf_file') analysisflow.connect(NodeHash_8241250, 'ev_files', NodeHash_8b12580,
file for use by FILMGLS """ modelgen = pe.Node( interface=fsl.FEATModel(), name='modelgen', ) """ Use :class:`nipype.interfaces.fsl.FILMGLS` to estimate a model specified by a mat file and a functional run """ mask = pe.Node(interface= fsl.maths.ApplyMask(), name = 'mask') modelestimate = pe.Node( interface=fsl.FILMGLS(smooth_autocorr=True, mask_size=5, threshold=1000), name='modelestimate', ) # %% modelfit.connect([ (infosource, selectfiles, [('subject_id', 'subject_id')]), (selectfiles, runinfo, [('events','events_file'),('regressors','regressors_file')]), (selectfiles, skip,[('func','in_file')]), (skip,susan,[('roi_file','in_file')]), (susan, runinfo, [('smoothed_file', 'in_file')]), (susan, modelspec, [('smoothed_file', 'functional_runs')]), (runinfo, modelspec, [('info', 'subject_info'), ('realign_file', 'realignment_parameters')]), (modelspec, level1design, [('session_info', 'session_info')]),
level1design = Node(interface=fsl.model.Level1Design(), name = "level1Design") level1design.inputs.interscan_interval = 1 level1design.inputs.bases = {'dgamma':{'derivs': True}} level1design.inputs.model_serial_correlations=True level1design.inputs.contrasts=contrasts modelgen = MapNode(interface=fsl.model.FEATModel(), name = "modelgen", iterfield = ['fsf_file','ev_files']) # in_file is fsf_file=level1design_results.outputs.fsf_files, ev_files=level1design_results.outputs.ev_file mask = MapNode(interface=fsl.maths.ApplyMask(), name = "mask", iterfield =['mask_file', 'in_file']) # in_file is subject info filmgls= MapNode(interface=fsl.FILMGLS(), name = "filmgls", iterfield=['design_file', 'in_file', 'tcon_file']) filmgls.inputs.autocorr_noestimate = True filmgls.inputs.results_dir = 'stats' ''' in_file=mask_results.outputs.out_file, design_file = modelgen_results.outputs.design_file, tcon_file = modelgen_results.outputs.con_file, fcon_file = modelgen_results.outputs.fcon_file, ''' wFSL = Workflow(name="l1FSL", base_dir="/media/Data/work")