def create_ffx_workflow(name="mni_ffx", space="mni", contrasts=None, exp_info=None): """Return a workflow object to execute a fixed-effects mode.""" if contrasts is None: contrasts = [] if exp_info is None: exp_info = lyman.default_experiment_parameters() inputnode = Node(IdentityInterface([ "copes", "varcopes", "masks", "means", "dofs", "ss_files", "anatomy", "reg_file", "timeseries" ]), name="inputnode") # Fit the fixedfx model for each contrast ffxmodel = Node(FFXModel(contrasts=contrasts), "ffxmodel") # Calculate the fixed effects Rsquared maps ffxsummary = Node(FFXSummary(), "ffxsummary") # Plot the fixedfx results report = Node(FFXReport(space=space), "report") # Save the experiment info saveparams = Node(SaveParameters(exp_info=exp_info), "saveparams") outputnode = Node( IdentityInterface([ "flame_results", "r2_files", "tsnr_file", "mean_file", "summary_report", "json_file", "zstat_report" ]), "outputs") ffx = Workflow(name=name) ffx.connect([ (inputnode, ffxmodel, [("copes", "copes"), ("varcopes", "varcopes"), ("dofs", "dofs"), ("masks", "masks"), ("reg_file", "reg_file")]), (inputnode, ffxsummary, [("ss_files", "ss_files"), ("means", "means"), ("timeseries", "timeseries")]), (inputnode, report, [("anatomy", "anatomy"), ("masks", "masks")]), (inputnode, saveparams, [("timeseries", "in_file")]), (ffxmodel, report, [("zstat_files", "zstat_files")]), (ffxsummary, report, [("r2_files", "r2_files"), ("tsnr_file", "tsnr_file")]), (ffxmodel, outputnode, [("flame_results", "flame_results")]), (ffxsummary, outputnode, [("r2_files", "r2_files"), ("tsnr_file", "tsnr_file"), ("mean_file", "mean_file")]), (report, outputnode, [("summary_files", "summary_report"), ("zstat_files", "zstat_report")]), (saveparams, outputnode, [("json_file", "json_file")]), ]) return ffx, inputnode, outputnode
def create_preprocessing_workflow(name="preproc", exp_info=None): """Return a Nipype workflow for fMRI preprocessing. This mostly follows the preprocessing in FSL, although some of the processing has been moved into pure Python. Parameters ---------- name : string workflow object name exp_info : dict dictionary with experimental information """ preproc = Workflow(name) if exp_info is None: exp_info = lyman.default_experiment_parameters() # Define the inputs for the preprocessing workflow in_fields = ["timeseries", "subject_id"] if exp_info["whole_brain_template"]: in_fields.append("whole_brain") if exp_info["fieldmap_template"]: in_fields.append("fieldmap") inputnode = Node(IdentityInterface(in_fields), "inputs") # Remove equilibrium frames and convert to float prepare = MapNode(PrepTimeseries(), "in_file", "prep_timeseries") prepare.inputs.frames_to_toss = exp_info["frames_to_toss"] # Unwarp using fieldmap images if exp_info["fieldmap_template"]: unwarp = create_unwarp_workflow(fieldmap_pe=exp_info["fieldmap_pe"]) # Spatial realignment realign = create_realignment_workflow() # Temporal interpolation if exp_info["temporal_interp"]: slicetime = create_slicetime_workflow( TR=exp_info["TR"], slice_order=exp_info["slice_order"], interleaved=exp_info["interleaved"], ) # Estimate a registration from funtional to anatomical space coregister = create_bbregister_workflow(partial_brain=bool( exp_info["whole_brain_template"]), init_with=exp_info["coreg_init"]) # Skullstrip the brain using the Freesurfer segmentation skullstrip = create_skullstrip_workflow() # Smooth intelligently in the volume susan = create_susan_smooth() susan.inputs.inputnode.fwhm = exp_info["smooth_fwhm"] # Scale and filter the timeseries filter_smooth = create_filtering_workflow("filter_smooth", exp_info["hpf_cutoff"], exp_info["TR"], "smoothed_timeseries") filter_rough = create_filtering_workflow("filter_rough", exp_info["hpf_cutoff"], exp_info["TR"], "unsmoothed_timeseries") # Automatically detect motion and intensity outliers artifacts = MapNode(ArtifactDetection(), ["timeseries", "mask_file", "motion_file"], "artifacts") artifacts.inputs.intensity_thresh = exp_info["intensity_threshold"] artifacts.inputs.motion_thresh = exp_info["motion_threshold"] artifacts.inputs.spike_thresh = exp_info["spike_threshold"] # Extract nuisance variables from anatomical sources confounds = create_confound_extraction_workflow("confounds", exp_info["wm_components"]) # Save the experiment info for this run saveparams = MapNode(SaveParameters(exp_info=exp_info), "in_file", "saveparams") preproc.connect([ (inputnode, prepare, [("timeseries", "in_file")]), (realign, artifacts, [("outputs.motion_file", "motion_file")]), (realign, coregister, [("outputs.timeseries", "inputs.timeseries")]), (inputnode, coregister, [("subject_id", "inputs.subject_id")]), (inputnode, skullstrip, [("subject_id", "inputs.subject_id")]), (coregister, skullstrip, [("outputs.tkreg_mat", "inputs.reg_file")]), (skullstrip, artifacts, [("outputs.mask_file", "mask_file")]), (skullstrip, susan, [("outputs.mask_file", "inputnode.mask_file"), ("outputs.timeseries", "inputnode.in_files")]), (susan, filter_smooth, [("outputnode.smoothed_files", "inputs.timeseries")]), (skullstrip, filter_smooth, [("outputs.mask_file", "inputs.mask_file") ]), (skullstrip, filter_rough, [("outputs.timeseries", "inputs.timeseries") ]), (skullstrip, filter_rough, [("outputs.mask_file", "inputs.mask_file") ]), (filter_rough, artifacts, [("outputs.timeseries", "timeseries")]), (filter_rough, confounds, [("outputs.timeseries", "inputs.timeseries") ]), (inputnode, confounds, [("subject_id", "inputs.subject_id")]), (skullstrip, confounds, [("outputs.mask_file", "inputs.brain_mask")]), (coregister, confounds, [("outputs.tkreg_mat", "inputs.reg_file")]), (inputnode, saveparams, [("timeseries", "in_file")]), ]) # Optionally add a connection for unwarping if bool(exp_info["fieldmap_template"]): preproc.connect([ (inputnode, unwarp, [("fieldmap", "inputs.fieldmap")]), (prepare, unwarp, [("out_file", "inputs.timeseries")]), (unwarp, realign, [("outputs.timeseries", "inputs.timeseries")]) ]) else: preproc.connect([ (prepare, realign, [("out_file", "inputs.timeseries")]), ]) # Optionally add a connection for slice time correction if exp_info["temporal_interp"]: preproc.connect([ (realign, slicetime, [("outputs.timeseries", "inputs.timeseries") ]), (slicetime, skullstrip, [("outputs.timeseries", "inputs.timeseries")]), ]) else: preproc.connect([ (realign, skullstrip, [("outputs.timeseries", "inputs.timeseries") ]), ]) # Optionally connect the whole brain template if bool(exp_info["whole_brain_template"]): preproc.connect([(inputnode, coregister, [ ("whole_brain_template", "inputs.whole_brain_template") ])]) # Define the outputs of the top-level workflow output_fields = [ "smoothed_timeseries", "unsmoothed_timeseries", "example_func", "mean_func", "functional_mask", "realign_report", "mask_report", "artifact_report", "confound_file", "flirt_affine", "tkreg_affine", "coreg_report", "json_file" ] if bool(exp_info["fieldmap_template"]): output_fields.append("unwarp_report") outputnode = Node(IdentityInterface(output_fields), "outputs") preproc.connect([ (realign, outputnode, [("outputs.example_func", "example_func"), ("outputs.report", "realign_report")]), (skullstrip, outputnode, [("outputs.mask_file", "functional_mask"), ("outputs.report", "mask_report")]), (artifacts, outputnode, [("out_files", "artifact_report")]), (coregister, outputnode, [("outputs.tkreg_mat", "tkreg_affine"), ("outputs.flirt_mat", "flirt_affine"), ("outputs.report", "coreg_report")]), (filter_smooth, outputnode, [("outputs.timeseries", "smoothed_timeseries")]), (filter_rough, outputnode, [("outputs.timeseries", "unsmoothed_timeseries"), ("outputs.mean_file", "mean_func")]), (confounds, outputnode, [("outputs.confound_file", "confound_file")]), (saveparams, outputnode, [("json_file", "json_file")]), ]) if bool(exp_info["fieldmap_template"]): preproc.connect([ (unwarp, outputnode, [("outputs.report", "unwarp_report")]), ]) return preproc, inputnode, outputnode
def create_volume_mixedfx_workflow(name="volume_group", subject_list=None, regressors=None, contrasts=None, exp_info=None): # Handle default arguments if subject_list is None: subject_list = [] if regressors is None: regressors = dict(group_mean=[]) if contrasts is None: contrasts = [["group_mean", "T", ["group_mean"], [1]]] if exp_info is None: exp_info = lyman.default_experiment_parameters() # Define workflow inputs inputnode = Node( IdentityInterface(["l1_contrast", "copes", "varcopes", "dofs"]), "inputnode") # Merge the fixed effect summary images into one 4D image merge = Node(MergeAcrossSubjects(regressors=regressors), "merge") # Make a simple design design = Node(fsl.MultipleRegressDesign(contrasts=contrasts), "design") # Fit the mixed effects model flameo = Node(fsl.FLAMEO(run_mode=exp_info["flame_mode"]), "flameo") # Estimate the smoothness of the data smoothest = Node(fsl.SmoothEstimate(), "smoothest") # Correct for multiple comparisons cluster = Node( fsl.Cluster(threshold=exp_info["cluster_zthresh"], pthreshold=exp_info["grf_pthresh"], out_threshold_file=True, out_index_file=True, out_localmax_txt_file=True, peak_distance=exp_info["peak_distance"], use_mm=True), "cluster") # Project the mask and thresholded zstat onto the surface surfproj = create_surface_projection_workflow(exp_info=exp_info) # Segment the z stat image with a watershed algorithm watershed = Node(Watershed(), "watershed") # Make static report images in the volume report = Node(MFXReport(), "report") report.inputs.subjects = subject_list # Save the experiment info saveparams = Node(SaveParameters(exp_info=exp_info), "saveparams") # Define the workflow outputs outputnode = Node( IdentityInterface([ "copes", "varcopes", "mask_file", "flameo_stats", "thresh_zstat", "surf_zstat", "surf_mask", "cluster_image", "seg_file", "peak_file", "lut_file", "report", "json_file" ]), "outputnode") # Define and connect up the workflow group = Workflow(name) group.connect([ (inputnode, merge, [("copes", "cope_files"), ("varcopes", "varcope_files"), ("dofs", "dof_files")]), (inputnode, saveparams, [("copes", "in_file")]), (merge, flameo, [("cope_file", "cope_file"), ("varcope_file", "var_cope_file"), ("dof_file", "dof_var_cope_file"), ("mask_file", "mask_file")]), (merge, design, [("regressors", "regressors")]), (design, flameo, [("design_con", "t_con_file"), ("design_grp", "cov_split_file"), ("design_mat", "design_file")]), (flameo, smoothest, [("zstats", "zstat_file")]), (merge, smoothest, [("mask_file", "mask_file")]), (smoothest, cluster, [("dlh", "dlh"), ("volume", "volume")]), (flameo, cluster, [("zstats", "in_file")]), (cluster, watershed, [("threshold_file", "zstat_file"), ("localmax_txt_file", "localmax_file")]), (merge, report, [("mask_file", "mask_file"), ("cope_file", "cope_file")]), (flameo, report, [("zstats", "zstat_file")]), (cluster, report, [("threshold_file", "zstat_thresh_file"), ("localmax_txt_file", "localmax_file")]), (watershed, report, [("seg_file", "seg_file")]), (merge, surfproj, [("mask_file", "inputs.mask_file")]), (cluster, surfproj, [("threshold_file", "inputs.zstat_file")]), (merge, outputnode, [("cope_file", "copes"), ("varcope_file", "varcopes"), ("mask_file", "mask_file")]), (flameo, outputnode, [("stats_dir", "flameo_stats")]), (cluster, outputnode, [("threshold_file", "thresh_zstat"), ("index_file", "cluster_image")]), (watershed, outputnode, [("seg_file", "seg_file"), ("peak_file", "peak_file"), ("lut_file", "lut_file")]), (surfproj, outputnode, [("outputs.surf_zstat", "surf_zstat"), ("outputs.surf_mask", "surf_mask")]), (report, outputnode, [("out_files", "report")]), (saveparams, outputnode, [("json_file", "json_file")]), ]) return group, inputnode, outputnode
def create_timeseries_model_workflow(name="model", exp_info=None): # Default experiment parameters for generating graph image, testing, etc. if exp_info is None: exp_info = lyman.default_experiment_parameters() # Define constant inputs inputs = ["realign_file", "nuisance_file", "artifact_file", "timeseries"] # Possibly add the design and regressor files to the inputs if exp_info["design_name"] is not None: inputs.append("design_file") if exp_info["regressor_file"] is not None: inputs.append("regressor_file") # Define the workflow inputs inputnode = Node(IdentityInterface(inputs), "inputs") # Set up the experimental design modelsetup = MapNode( ModelSetup(exp_info=exp_info), ["timeseries", "realign_file", "nuisance_file", "artifact_file"], "modelsetup") # For some nodes, make it possible to request extra memory mem_request = {"qsub_args": "-l h_vmem=%dG" % exp_info["memory_request"]} # Use film_gls to estimate the timeseries model modelestimate = MapNode( fsl.FILMGLS(smooth_autocorr=True, mask_size=5, threshold=100), ["design_file", "in_file", "tcon_file"], "modelestimate") modelestimate.plugin_args = mem_request # Compute summary statistics about the model fit modelsummary = MapNode(ModelSummary(), ["design_matrix_pkl", "timeseries", "pe_files"], "modelsummary") modelsummary.plugin_args = mem_request # Save the experiment info for this run # Save the experiment info for this run saveparams = MapNode(SaveParameters(exp_info=exp_info), "in_file", "saveparams") # Report on the results of the model # Note: see below for a conditional iterfield modelreport = MapNode( ModelReport(), ["timeseries", "sigmasquareds_file", "tsnr_file", "r2_files"], "modelreport") # Define the workflow outputs outputnode = Node( IdentityInterface([ "results", "copes", "varcopes", "zstats", "r2_files", "ss_files", "tsnr_file", "report", "design_mat", "contrast_mat", "design_pkl", "design_report", "json_file" ]), "outputs") # Define the workflow and connect the nodes model = Workflow(name=name) model.connect([ (inputnode, modelsetup, [("realign_file", "realign_file"), ("nuisance_file", "nuisance_file"), ("artifact_file", "artifact_file"), ("timeseries", "timeseries")]), (inputnode, modelestimate, [("timeseries", "in_file")]), (inputnode, saveparams, [("timeseries", "in_file")]), (modelsetup, modelestimate, [("design_matrix_file", "design_file"), ("contrast_file", "tcon_file")]), (modelsetup, modelsummary, [("design_matrix_pkl", "design_matrix_pkl") ]), (inputnode, modelsummary, [("timeseries", "timeseries")]), (modelestimate, modelsummary, [("param_estimates", "pe_files")]), (inputnode, modelreport, [("timeseries", "timeseries")]), (modelestimate, modelreport, [("sigmasquareds", "sigmasquareds_file") ]), (modelsummary, modelreport, [("r2_files", "r2_files"), ("tsnr_file", "tsnr_file")]), (modelsetup, outputnode, [("design_matrix_file", "design_mat"), ("contrast_file", "contrast_mat"), ("design_matrix_pkl", "design_pkl"), ("report", "design_report")]), (saveparams, outputnode, [("json_file", "json_file")]), (modelestimate, outputnode, [("results_dir", "results"), ("copes", "copes"), ("varcopes", "varcopes"), ("zstats", "zstats")]), (modelsummary, outputnode, [("r2_files", "r2_files"), ("ss_files", "ss_files"), ("tsnr_file", "tsnr_file")]), (modelreport, outputnode, [("out_files", "report")]), ]) if exp_info["design_name"] is not None: model.connect(inputnode, "design_file", modelsetup, "design_file") if exp_info["regressor_file"] is not None: model.connect(inputnode, "regressor_file", modelsetup, "regressor_file") if exp_info["contrasts"]: model.connect(modelestimate, "zstats", modelreport, "zstat_files") modelreport.iterfield.append("zstat_files") return model, inputnode, outputnode