Ejemplo n.º 1
0
def create_ffx_workflow(name="mni_ffx",
                        space="mni",
                        contrasts=None,
                        exp_info=None):
    """Return a workflow object to execute a fixed-effects mode."""
    if contrasts is None:
        contrasts = []
    if exp_info is None:
        exp_info = lyman.default_experiment_parameters()

    inputnode = Node(IdentityInterface([
        "copes", "varcopes", "masks", "means", "dofs", "ss_files", "anatomy",
        "reg_file", "timeseries"
    ]),
                     name="inputnode")

    # Fit the fixedfx model for each contrast
    ffxmodel = Node(FFXModel(contrasts=contrasts), "ffxmodel")

    # Calculate the fixed effects Rsquared maps
    ffxsummary = Node(FFXSummary(), "ffxsummary")

    # Plot the fixedfx results
    report = Node(FFXReport(space=space), "report")

    # Save the experiment info
    saveparams = Node(SaveParameters(exp_info=exp_info), "saveparams")

    outputnode = Node(
        IdentityInterface([
            "flame_results", "r2_files", "tsnr_file", "mean_file",
            "summary_report", "json_file", "zstat_report"
        ]), "outputs")

    ffx = Workflow(name=name)
    ffx.connect([
        (inputnode, ffxmodel, [("copes", "copes"), ("varcopes", "varcopes"),
                               ("dofs", "dofs"), ("masks", "masks"),
                               ("reg_file", "reg_file")]),
        (inputnode, ffxsummary, [("ss_files", "ss_files"), ("means", "means"),
                                 ("timeseries", "timeseries")]),
        (inputnode, report, [("anatomy", "anatomy"), ("masks", "masks")]),
        (inputnode, saveparams, [("timeseries", "in_file")]),
        (ffxmodel, report, [("zstat_files", "zstat_files")]),
        (ffxsummary, report, [("r2_files", "r2_files"),
                              ("tsnr_file", "tsnr_file")]),
        (ffxmodel, outputnode, [("flame_results", "flame_results")]),
        (ffxsummary, outputnode, [("r2_files", "r2_files"),
                                  ("tsnr_file", "tsnr_file"),
                                  ("mean_file", "mean_file")]),
        (report, outputnode, [("summary_files", "summary_report"),
                              ("zstat_files", "zstat_report")]),
        (saveparams, outputnode, [("json_file", "json_file")]),
    ])

    return ffx, inputnode, outputnode
Ejemplo n.º 2
0
def create_surface_projection_workflow(name="surfproj", exp_info=None):
    """Project the group mask and thresholded zstat file onto the surface."""
    if exp_info is None:
        exp_info = lyman.default_experiment_parameters()

    inputnode = Node(IdentityInterface(["zstat_file", "mask_file"]), "inputs")

    # Sample the zstat image to the surface
    hemisource = Node(IdentityInterface(["mni_hemi"]), "hemisource")
    hemisource.iterables = ("mni_hemi", ["lh", "rh"])

    zstatproj = Node(freesurfer.SampleToSurface(
        sampling_method=exp_info["sampling_method"],
        sampling_range=exp_info["sampling_range"],
        sampling_units=exp_info["sampling_units"],
        smooth_surf=exp_info["surf_smooth"],
        subject_id="fsaverage",
        mni152reg=True,
        target_subject="fsaverage"),
        "zstatproj")

    # Sample the mask to the surface
    maskproj = Node(freesurfer.SampleToSurface(
        sampling_range=exp_info["sampling_range"],
        sampling_units=exp_info["sampling_units"],
        subject_id="fsaverage",
        mni152reg=True,
        target_subject="fsaverage"),
        "maskproj")
    if exp_info["sampling_method"] == "point":
        maskproj.inputs.sampling_method = "point"
    else:
        maskproj.inputs.sampling_method = "max"

    outputnode = Node(IdentityInterface(["surf_zstat",
                                         "surf_mask"]), "outputs")

    # Define and connect the workflow
    proj = Workflow(name)
    proj.connect([
        (inputnode, zstatproj,
            [("zstat_file", "source_file")]),
        (inputnode, maskproj,
            [("mask_file", "source_file")]),
        (hemisource, zstatproj,
            [("mni_hemi", "hemi")]),
        (hemisource, maskproj,
            [("mni_hemi", "hemi")]),
        (zstatproj, outputnode,
            [("out_file", "surf_zstat")]),
        (maskproj, outputnode,
            [("out_file", "surf_mask")]),
        ])

    return proj
Ejemplo n.º 3
0
def create_surface_projection_workflow(name="surfproj", exp_info=None):
    """Project the group mask and thresholded zstat file onto the surface."""
    if exp_info is None:
        exp_info = lyman.default_experiment_parameters()

    inputnode = Node(IdentityInterface(["zstat_file", "mask_file"]), "inputs")

    # Sample the zstat image to the surface
    hemisource = Node(IdentityInterface(["mni_hemi"]), "hemisource")
    hemisource.iterables = ("mni_hemi", ["lh", "rh"])

    zstatproj = Node(
        freesurfer.SampleToSurface(sampling_method=exp_info["sampling_method"],
                                   sampling_range=exp_info["sampling_range"],
                                   sampling_units=exp_info["sampling_units"],
                                   smooth_surf=exp_info["surf_smooth"],
                                   subject_id="fsaverage",
                                   mni152reg=True,
                                   target_subject="fsaverage"), "zstatproj")

    # Sample the mask to the surface
    maskproj = Node(
        freesurfer.SampleToSurface(sampling_range=exp_info["sampling_range"],
                                   sampling_units=exp_info["sampling_units"],
                                   subject_id="fsaverage",
                                   mni152reg=True,
                                   target_subject="fsaverage"), "maskproj")
    if exp_info["sampling_method"] == "point":
        maskproj.inputs.sampling_method = "point"
    else:
        maskproj.inputs.sampling_method = "max"

    outputnode = Node(IdentityInterface(["surf_zstat", "surf_mask"]),
                      "outputs")

    # Define and connect the workflow
    proj = Workflow(name)
    proj.connect([
        (inputnode, zstatproj, [("zstat_file", "source_file")]),
        (inputnode, maskproj, [("mask_file", "source_file")]),
        (hemisource, zstatproj, [("mni_hemi", "hemi")]),
        (hemisource, maskproj, [("mni_hemi", "hemi")]),
        (zstatproj, outputnode, [("out_file", "surf_zstat")]),
        (maskproj, outputnode, [("out_file", "surf_mask")]),
    ])

    return proj
Ejemplo n.º 4
0
def create_surface_ols_workflow(name="surface_group",
                                subject_list=None,
                                exp_info=None):
    """Workflow to project ffx copes onto surface and run ols."""
    if subject_list is None:
        subject_list = []
    if exp_info is None:
        exp_info = lyman.default_experiment_parameters()

    inputnode = Node(
        IdentityInterface(["l1_contrast", "copes", "reg_file", "subject_id"]),
        "inputnode")

    hemisource = Node(IdentityInterface(["hemi"]), "hemisource")
    hemisource.iterables = ("hemi", ["lh", "rh"])

    # Sample the volume-encoded native data onto the fsaverage surface
    # manifold with projection + spherical transform
    surfsample = MapNode(
        fs.SampleToSurface(sampling_method=exp_info["sampling_method"],
                           sampling_range=exp_info["sampling_range"],
                           sampling_units=exp_info["sampling_units"],
                           smooth_surf=exp_info["surf_smooth"],
                           target_subject="fsaverage"),
        ["subject_id", "reg_file", "source_file"], "surfsample")

    # Remove subjects with completely empty images
    removeempty = Node(RemoveEmpty(), "removeempty")

    # Concatenate the subject files into a 4D image
    mergecope = Node(fs.Concatenate(), "mergecope")

    # Run the one-sample OLS model
    glmfit = Node(
        fs.GLMFit(one_sample=True,
                  surf=True,
                  cortex=True,
                  glm_dir="_glm_results",
                  subject_id="fsaverage"), "glmfit")

    # Use the cached Monte-Carlo simulations for correction
    cluster = Node(
        Function(["y_file", "glm_dir", "sign", "cluster_zthresh", "p_thresh"],
                 ["glm_dir", "thresholded_file"], glm_corrections, imports),
        "cluster")
    cluster.inputs.cluster_zthresh = exp_info["cluster_zthresh"]
    cluster.inputs.p_thresh = exp_info["grf_pthresh"]
    cluster.inputs.sign = exp_info["surf_corr_sign"]

    # Return the outputs
    outputnode = Node(IdentityInterface(["glm_dir", "sig_file"]), "outputnode")

    # Define and connect the workflow
    group = Workflow(name)
    group.connect([
        (inputnode, surfsample, [("copes", "source_file"),
                                 ("reg_file", "reg_file"),
                                 ("subject_id", "subject_id")]),
        (hemisource, surfsample, [("hemi", "hemi")]),
        (surfsample, removeempty, [("out_file", "in_files")]),
        (removeempty, mergecope, [("out_files", "in_files")]),
        (mergecope, glmfit, [("concatenated_file", "in_file")]),
        (hemisource, glmfit, [("hemi", "hemi")]),
        (mergecope, cluster, [("concatenated_file", "y_file")]),
        (glmfit, cluster, [("glm_dir", "glm_dir")]),
        (glmfit, outputnode, [("glm_dir", "glm_dir")]),
        (cluster, outputnode, [("thresholded_file", "sig_file")]),
    ])

    return group, inputnode, outputnode
Ejemplo n.º 5
0
def create_volume_mixedfx_workflow(name="volume_group",
                                   subject_list=None,
                                   regressors=None,
                                   contrasts=None,
                                   exp_info=None):

    # Handle default arguments
    if subject_list is None:
        subject_list = []
    if regressors is None:
        regressors = dict(group_mean=[])
    if contrasts is None:
        contrasts = [["group_mean", "T", ["group_mean"], [1]]]
    if exp_info is None:
        exp_info = lyman.default_experiment_parameters()

    # Define workflow inputs
    inputnode = Node(IdentityInterface(["l1_contrast",
                                        "copes",
                                        "varcopes",
                                        "dofs"]),
                     "inputnode")

    # Merge the fixed effect summary images into one 4D image
    merge = Node(MergeAcrossSubjects(), "merge")

    # Make a simple design
    design = Node(fsl.MultipleRegressDesign(regressors=regressors,
                                            contrasts=contrasts),
                  "design")

    # Fit the mixed effects model
    flameo = Node(fsl.FLAMEO(run_mode=exp_info["flame_mode"]), "flameo")

    # Estimate the smoothness of the data
    smoothest = Node(fsl.SmoothEstimate(), "smoothest")

    # Correct for multiple comparisons
    cluster = Node(fsl.Cluster(threshold=exp_info["cluster_zthresh"],
                               pthreshold=exp_info["grf_pthresh"],
                               out_threshold_file=True,
                               out_index_file=True,
                               out_localmax_txt_file=True,
                               peak_distance=exp_info["peak_distance"],
                               use_mm=True),
                   "cluster")

    # Project the mask and thresholded zstat onto the surface
    surfproj = create_surface_projection_workflow(exp_info=exp_info)

    # Segment the z stat image with a watershed algorithm
    watershed = Node(Watershed(), "watershed")

    # Make static report images in the volume
    report = Node(MFXReport(), "report")
    report.inputs.subjects = subject_list

    # Save the experiment info
    saveparams = Node(SaveParameters(exp_info=exp_info), "saveparams")

    # Define the workflow outputs
    outputnode = Node(IdentityInterface(["copes",
                                         "varcopes",
                                         "mask_file",
                                         "flameo_stats",
                                         "thresh_zstat",
                                         "surf_zstat",
                                         "surf_mask",
                                         "cluster_image",
                                         "seg_file",
                                         "peak_file",
                                         "lut_file",
                                         "report",
                                         "json_file"]),
                      "outputnode")

    # Define and connect up the workflow
    group = Workflow(name)
    group.connect([
        (inputnode, merge,
            [("copes", "cope_files"),
             ("varcopes", "varcope_files"),
             ("dofs", "dof_files")]),
        (inputnode, saveparams,
            [("copes", "in_file")]),
        (merge, flameo,
            [("cope_file", "cope_file"),
             ("varcope_file", "var_cope_file"),
             ("dof_file", "dof_var_cope_file"),
             ("mask_file", "mask_file")]),
        (design, flameo,
            [("design_con", "t_con_file"),
             ("design_grp", "cov_split_file"),
             ("design_mat", "design_file")]),
        (flameo, smoothest,
            [("zstats", "zstat_file")]),
        (merge, smoothest,
            [("mask_file", "mask_file")]),
        (smoothest, cluster,
            [("dlh", "dlh"),
             ("volume", "volume")]),
        (flameo, cluster,
            [("zstats", "in_file")]),
        (cluster, watershed,
            [("threshold_file", "zstat_file"),
             ("localmax_txt_file", "localmax_file")]),
        (merge, report,
            [("mask_file", "mask_file"),
             ("cope_file", "cope_file")]),
        (flameo, report,
            [("zstats", "zstat_file")]),
        (cluster, report,
            [("threshold_file", "zstat_thresh_file"),
             ("localmax_txt_file", "localmax_file")]),
        (watershed, report,
            [("seg_file", "seg_file")]),
        (merge, surfproj,
            [("mask_file", "inputs.mask_file")]),
        (cluster, surfproj,
            [("threshold_file", "inputs.zstat_file")]),
        (merge, outputnode,
            [("cope_file", "copes"),
             ("varcope_file", "varcopes"),
             ("mask_file", "mask_file")]),
        (flameo, outputnode,
            [("stats_dir", "flameo_stats")]),
        (cluster, outputnode,
            [("threshold_file", "thresh_zstat"),
             ("index_file", "cluster_image")]),
        (watershed, outputnode,
            [("seg_file", "seg_file"),
             ("peak_file", "peak_file"),
             ("lut_file", "lut_file")]),
        (surfproj, outputnode,
            [("outputs.surf_zstat", "surf_zstat"),
             ("outputs.surf_mask", "surf_mask")]),
        (report, outputnode,
            [("out_files", "report")]),
        (saveparams, outputnode,
            [("json_file", "json_file")]),
        ])

    return group, inputnode, outputnode
Ejemplo n.º 6
0
def create_preprocessing_workflow(name="preproc", exp_info=None):
    """Return a Nipype workflow for fMRI preprocessing.

    This mostly follows the preprocessing in FSL, although some
    of the processing has been moved into pure Python.

    Parameters
    ----------
    name : string
        workflow object name
    exp_info : dict
        dictionary with experimental information

    """
    preproc = Workflow(name)

    if exp_info is None:
        exp_info = lyman.default_experiment_parameters()

    # Define the inputs for the preprocessing workflow
    in_fields = ["timeseries", "subject_id"]

    if exp_info["whole_brain_template"]:
        in_fields.append("whole_brain")

    if exp_info["fieldmap_template"]:
        in_fields.append("fieldmap")

    inputnode = Node(IdentityInterface(in_fields), "inputs")

    # Remove equilibrium frames and convert to float
    prepare = MapNode(PrepTimeseries(), "in_file", "prep_timeseries")
    prepare.inputs.frames_to_toss = exp_info["frames_to_toss"]

    # Unwarp using fieldmap images
    if exp_info["fieldmap_template"]:
        unwarp = create_unwarp_workflow(fieldmap_pe=exp_info["fieldmap_pe"])

    # Spatial realignment
    realign = create_realignment_workflow()

    # Temporal interpolation
    if exp_info["temporal_interp"]:
        slicetime = create_slicetime_workflow(
            TR=exp_info["TR"],
            slice_order=exp_info["slice_order"],
            interleaved=exp_info["interleaved"],
        )

    # Estimate a registration from funtional to anatomical space
    coregister = create_bbregister_workflow(partial_brain=bool(
        exp_info["whole_brain_template"]),
                                            init_with=exp_info["coreg_init"])

    # Skullstrip the brain using the Freesurfer segmentation
    skullstrip = create_skullstrip_workflow()

    # Smooth intelligently in the volume
    susan = create_susan_smooth()
    susan.inputs.inputnode.fwhm = exp_info["smooth_fwhm"]

    # Scale and filter the timeseries
    filter_smooth = create_filtering_workflow("filter_smooth",
                                              exp_info["hpf_cutoff"],
                                              exp_info["TR"],
                                              "smoothed_timeseries")

    filter_rough = create_filtering_workflow("filter_rough",
                                             exp_info["hpf_cutoff"],
                                             exp_info["TR"],
                                             "unsmoothed_timeseries")

    # Automatically detect motion and intensity outliers
    artifacts = MapNode(ArtifactDetection(),
                        ["timeseries", "mask_file", "motion_file"],
                        "artifacts")
    artifacts.inputs.intensity_thresh = exp_info["intensity_threshold"]
    artifacts.inputs.motion_thresh = exp_info["motion_threshold"]
    artifacts.inputs.spike_thresh = exp_info["spike_threshold"]

    # Extract nuisance variables from anatomical sources
    confounds = create_confound_extraction_workflow("confounds",
                                                    exp_info["wm_components"])

    # Save the experiment info for this run
    saveparams = MapNode(SaveParameters(exp_info=exp_info), "in_file",
                         "saveparams")

    preproc.connect([
        (inputnode, prepare, [("timeseries", "in_file")]),
        (realign, artifacts, [("outputs.motion_file", "motion_file")]),
        (realign, coregister, [("outputs.timeseries", "inputs.timeseries")]),
        (inputnode, coregister, [("subject_id", "inputs.subject_id")]),
        (inputnode, skullstrip, [("subject_id", "inputs.subject_id")]),
        (coregister, skullstrip, [("outputs.tkreg_mat", "inputs.reg_file")]),
        (skullstrip, artifacts, [("outputs.mask_file", "mask_file")]),
        (skullstrip, susan, [("outputs.mask_file", "inputnode.mask_file"),
                             ("outputs.timeseries", "inputnode.in_files")]),
        (susan, filter_smooth, [("outputnode.smoothed_files",
                                 "inputs.timeseries")]),
        (skullstrip, filter_smooth, [("outputs.mask_file", "inputs.mask_file")
                                     ]),
        (skullstrip, filter_rough, [("outputs.timeseries", "inputs.timeseries")
                                    ]),
        (skullstrip, filter_rough, [("outputs.mask_file", "inputs.mask_file")
                                    ]),
        (filter_rough, artifacts, [("outputs.timeseries", "timeseries")]),
        (filter_rough, confounds, [("outputs.timeseries", "inputs.timeseries")
                                   ]),
        (inputnode, confounds, [("subject_id", "inputs.subject_id")]),
        (skullstrip, confounds, [("outputs.mask_file", "inputs.brain_mask")]),
        (coregister, confounds, [("outputs.tkreg_mat", "inputs.reg_file")]),
        (inputnode, saveparams, [("timeseries", "in_file")]),
    ])

    # Optionally add a connection for unwarping
    if bool(exp_info["fieldmap_template"]):
        preproc.connect([
            (inputnode, unwarp, [("fieldmap", "inputs.fieldmap")]),
            (prepare, unwarp, [("out_file", "inputs.timeseries")]),
            (unwarp, realign, [("outputs.timeseries", "inputs.timeseries")])
        ])
    else:
        preproc.connect([
            (prepare, realign, [("out_file", "inputs.timeseries")]),
        ])

    # Optionally add a connection for slice time correction
    if exp_info["temporal_interp"]:
        preproc.connect([
            (realign, slicetime, [("outputs.timeseries", "inputs.timeseries")
                                  ]),
            (slicetime, skullstrip, [("outputs.timeseries",
                                      "inputs.timeseries")]),
        ])
    else:
        preproc.connect([
            (realign, skullstrip, [("outputs.timeseries", "inputs.timeseries")
                                   ]),
        ])

    # Optionally connect the whole brain template
    if bool(exp_info["whole_brain_template"]):
        preproc.connect([(inputnode, coregister, [
            ("whole_brain_template", "inputs.whole_brain_template")
        ])])

    # Define the outputs of the top-level workflow
    output_fields = [
        "smoothed_timeseries", "unsmoothed_timeseries", "example_func",
        "mean_func", "functional_mask", "realign_report", "mask_report",
        "artifact_report", "confound_file", "flirt_affine", "tkreg_affine",
        "coreg_report", "json_file"
    ]

    if bool(exp_info["fieldmap_template"]):
        output_fields.append("unwarp_report")

    outputnode = Node(IdentityInterface(output_fields), "outputs")

    preproc.connect([
        (realign, outputnode, [("outputs.example_func", "example_func"),
                               ("outputs.report", "realign_report")]),
        (skullstrip, outputnode, [("outputs.mask_file", "functional_mask"),
                                  ("outputs.report", "mask_report")]),
        (artifacts, outputnode, [("out_files", "artifact_report")]),
        (coregister, outputnode, [("outputs.tkreg_mat", "tkreg_affine"),
                                  ("outputs.flirt_mat", "flirt_affine"),
                                  ("outputs.report", "coreg_report")]),
        (filter_smooth, outputnode, [("outputs.timeseries",
                                      "smoothed_timeseries")]),
        (filter_rough, outputnode, [("outputs.timeseries",
                                     "unsmoothed_timeseries"),
                                    ("outputs.mean_file", "mean_func")]),
        (confounds, outputnode, [("outputs.confound_file", "confound_file")]),
        (saveparams, outputnode, [("json_file", "json_file")]),
    ])

    if bool(exp_info["fieldmap_template"]):
        preproc.connect([
            (unwarp, outputnode, [("outputs.report", "unwarp_report")]),
        ])

    return preproc, inputnode, outputnode
Ejemplo n.º 7
0
def create_volume_mixedfx_workflow(name="volume_group",
                                   subject_list=None,
                                   regressors=None,
                                   contrasts=None,
                                   exp_info=None):

    # Handle default arguments
    if subject_list is None:
        subject_list = []
    if regressors is None:
        regressors = dict(group_mean=[])
    if contrasts is None:
        contrasts = [["group_mean", "T", ["group_mean"], [1]]]
    if exp_info is None:
        exp_info = lyman.default_experiment_parameters()

    # Define workflow inputs
    inputnode = Node(
        IdentityInterface(["l1_contrast", "copes", "varcopes", "dofs"]),
        "inputnode")

    # Merge the fixed effect summary images into one 4D image
    merge = Node(MergeAcrossSubjects(regressors=regressors), "merge")

    # Make a simple design
    design = Node(fsl.MultipleRegressDesign(contrasts=contrasts), "design")

    # Fit the mixed effects model
    flameo = Node(fsl.FLAMEO(run_mode=exp_info["flame_mode"]), "flameo")

    # Estimate the smoothness of the data
    smoothest = Node(fsl.SmoothEstimate(), "smoothest")

    # Correct for multiple comparisons
    cluster = Node(
        fsl.Cluster(threshold=exp_info["cluster_zthresh"],
                    pthreshold=exp_info["grf_pthresh"],
                    out_threshold_file=True,
                    out_index_file=True,
                    out_localmax_txt_file=True,
                    peak_distance=exp_info["peak_distance"],
                    use_mm=True), "cluster")

    # Project the mask and thresholded zstat onto the surface
    surfproj = create_surface_projection_workflow(exp_info=exp_info)

    # Segment the z stat image with a watershed algorithm
    watershed = Node(Watershed(), "watershed")

    # Make static report images in the volume
    report = Node(MFXReport(), "report")
    report.inputs.subjects = subject_list

    # Save the experiment info
    saveparams = Node(SaveParameters(exp_info=exp_info), "saveparams")

    # Define the workflow outputs
    outputnode = Node(
        IdentityInterface([
            "copes", "varcopes", "mask_file", "flameo_stats", "thresh_zstat",
            "surf_zstat", "surf_mask", "cluster_image", "seg_file",
            "peak_file", "lut_file", "report", "json_file"
        ]), "outputnode")

    # Define and connect up the workflow
    group = Workflow(name)
    group.connect([
        (inputnode, merge, [("copes", "cope_files"),
                            ("varcopes", "varcope_files"),
                            ("dofs", "dof_files")]),
        (inputnode, saveparams, [("copes", "in_file")]),
        (merge, flameo, [("cope_file", "cope_file"),
                         ("varcope_file", "var_cope_file"),
                         ("dof_file", "dof_var_cope_file"),
                         ("mask_file", "mask_file")]),
        (merge, design, [("regressors", "regressors")]),
        (design, flameo, [("design_con", "t_con_file"),
                          ("design_grp", "cov_split_file"),
                          ("design_mat", "design_file")]),
        (flameo, smoothest, [("zstats", "zstat_file")]),
        (merge, smoothest, [("mask_file", "mask_file")]),
        (smoothest, cluster, [("dlh", "dlh"), ("volume", "volume")]),
        (flameo, cluster, [("zstats", "in_file")]),
        (cluster, watershed, [("threshold_file", "zstat_file"),
                              ("localmax_txt_file", "localmax_file")]),
        (merge, report, [("mask_file", "mask_file"),
                         ("cope_file", "cope_file")]),
        (flameo, report, [("zstats", "zstat_file")]),
        (cluster, report, [("threshold_file", "zstat_thresh_file"),
                           ("localmax_txt_file", "localmax_file")]),
        (watershed, report, [("seg_file", "seg_file")]),
        (merge, surfproj, [("mask_file", "inputs.mask_file")]),
        (cluster, surfproj, [("threshold_file", "inputs.zstat_file")]),
        (merge, outputnode, [("cope_file", "copes"),
                             ("varcope_file", "varcopes"),
                             ("mask_file", "mask_file")]),
        (flameo, outputnode, [("stats_dir", "flameo_stats")]),
        (cluster, outputnode, [("threshold_file", "thresh_zstat"),
                               ("index_file", "cluster_image")]),
        (watershed, outputnode, [("seg_file", "seg_file"),
                                 ("peak_file", "peak_file"),
                                 ("lut_file", "lut_file")]),
        (surfproj, outputnode, [("outputs.surf_zstat", "surf_zstat"),
                                ("outputs.surf_mask", "surf_mask")]),
        (report, outputnode, [("out_files", "report")]),
        (saveparams, outputnode, [("json_file", "json_file")]),
    ])

    return group, inputnode, outputnode
Ejemplo n.º 8
0
def create_preprocessing_workflow(name="preproc", exp_info=None):
    """Return a Nipype workflow for fMRI preprocessing.

    This mostly follows the preprocessing in FSL, although some
    of the processing has been moved into pure Python.

    Parameters
    ----------
    name : string
        workflow object name
    exp_info : dict
        dictionary with experimental information

    """
    preproc = Workflow(name)

    if exp_info is  None:
        exp_info = lyman.default_experiment_parameters()

    # Define the inputs for the preprocessing workflow
    in_fields = ["timeseries", "subject_id"]

    if exp_info["whole_brain_template"]:
        in_fields.append("whole_brain_template")

    inputnode = Node(IdentityInterface(in_fields), "inputs")

    # Remove equilibrium frames and convert to float
    prepare = MapNode(Function(["in_file", "frames_to_toss"],
                               ["out_file"],
                               prep_timeseries,
                               imports),
                      "in_file",
                      "prep_timeseries")
    prepare.inputs.frames_to_toss = exp_info["frames_to_toss"]

    # Motion and slice time correct
    realign = create_realignment_workflow(
        temporal_interp=exp_info["temporal_interp"],
        TR=exp_info["TR"],
        slice_order=exp_info["slice_order"],
        interleaved=exp_info["interleaved"])

    # Run a conservative skull strip and get a brain mask
    skullstrip = create_skullstrip_workflow()

    # Estimate a registration from funtional to anatomical space
    coregister = create_bbregister_workflow(
        partial_brain=bool(exp_info["whole_brain_template"]))

    # Smooth intelligently in the volume
    susan = create_susan_smooth()
    susan.inputs.inputnode.fwhm = exp_info["smooth_fwhm"]

    # Scale and filter the timeseries
    filter_smooth = create_filtering_workflow("filter_smooth",
                                              exp_info["hpf_cutoff"],
                                              exp_info["TR"],
                                              "smoothed_timeseries")

    filter_rough = create_filtering_workflow("filter_rough",
                                              exp_info["hpf_cutoff"],
                                              exp_info["TR"],
                                              "unsmoothed_timeseries")

    # Automatically detect motion and intensity outliers
    artifacts = MapNode(Function(["timeseries",
                                  "mask_file",
                                  "motion_file",
                                  "intensity_thresh",
                                  "motion_thresh"],
                                 ["artifact_report"],
                                 detect_artifacts,
                                 imports),
                        ["timeseries", "mask_file", "motion_file"],
                        "artifacts")
    artifacts.inputs.intensity_thresh = exp_info["intensity_threshold"]
    artifacts.inputs.motion_thresh = exp_info["motion_threshold"]

    # Save the experiment info for this run
    dumpjson = MapNode(Function(["exp_info", "timeseries"], ["json_file"],
                                 dump_exp_info, imports),
                                "timeseries",
                                "dumpjson")
    dumpjson.inputs.exp_info = exp_info


    preproc.connect([
        (inputnode, prepare,
            [("timeseries", "in_file")]),
        (prepare, realign,
            [("out_file", "inputs.timeseries")]),
        (realign, skullstrip,
            [("outputs.timeseries", "inputs.timeseries")]),
        (realign, artifacts,
            [("outputs.motion_file", "motion_file")]),
        (skullstrip, artifacts,
            [("outputs.mask_file", "mask_file")]),
        (skullstrip, coregister,
            [("outputs.mean_file", "inputs.source_file")]),
        (inputnode, coregister,
            [("subject_id", "inputs.subject_id")]),
        (skullstrip, susan,
            [("outputs.mask_file", "inputnode.mask_file"),
             ("outputs.timeseries", "inputnode.in_files")]),
        (susan, filter_smooth,
            [("outputnode.smoothed_files", "inputs.timeseries")]),
        (skullstrip, filter_smooth,
            [("outputs.mask_file", "inputs.mask_file")]),
        (skullstrip, filter_rough,
            [("outputs.timeseries", "inputs.timeseries")]),
        (skullstrip, filter_rough,
            [("outputs.mask_file", "inputs.mask_file")]),
        (filter_rough, artifacts,
            [("outputs.timeseries", "timeseries")]),
        (inputnode, dumpjson,
            [("timeseries", "timeseries")]),
        ])

    if bool(exp_info["whole_brain_template"]):
        preproc.connect([
            (inputnode, coregister,
                [("whole_brain_template", "inputs.whole_brain_template")])
                        ])

    # Define the outputs of the top-level workflow
    output_fields = ["smoothed_timeseries",
                     "unsmoothed_timeseries",
                     "example_func",
                     "mean_func",
                     "functional_mask",
                     "realign_report",
                     "mask_report",
                     "artifact_report",
                     "flirt_affine",
                     "tkreg_affine",
                     "coreg_report",
                     "json_file"]

    outputnode = Node(IdentityInterface(output_fields), "outputs")

    preproc.connect([
        (realign, outputnode,
            [("outputs.example_func", "example_func"),
             ("outputs.report", "realign_report")]),
        (skullstrip, outputnode,
            [("outputs.mean_file", "mean_func"),
             ("outputs.mask_file", "functional_mask"),
             ("outputs.report", "mask_report")]),
        (artifacts, outputnode,
            [("artifact_report", "artifact_report")]),
        (coregister, outputnode,
            [("outputs.tkreg_mat", "tkreg_affine"),
             ("outputs.flirt_mat", "flirt_affine"),
             ("outputs.report", "coreg_report")]),
        (filter_smooth, outputnode,
            [("outputs.timeseries", "smoothed_timeseries")]),
        (filter_rough, outputnode,
            [("outputs.timeseries", "unsmoothed_timeseries")]),
        (dumpjson, outputnode,
            [("json_file", "json_file")]),
        ])

    return preproc, inputnode, outputnode
Ejemplo n.º 9
0
def create_surface_ols_workflow(name="surface_group",
                                subject_list=None,
                                exp_info=None):
    """Workflow to project ffx copes onto surface and run ols."""
    if subject_list is None:
        subject_list = []
    if exp_info is None:
        exp_info = lyman.default_experiment_parameters()

    inputnode = Node(IdentityInterface(["l1_contrast",
                                        "copes",
                                        "reg_file",
                                        "subject_id"]),
                     "inputnode")

    hemisource = Node(IdentityInterface(["hemi"]), "hemisource")
    hemisource.iterables = ("hemi", ["lh", "rh"])

    # Sample the volume-encoded native data onto the fsaverage surface
    # manifold with projection + spherical transform
    surfsample = MapNode(fs.SampleToSurface(
        sampling_method=exp_info["sampling_method"],
        sampling_range=exp_info["sampling_range"],
        sampling_units=exp_info["sampling_units"],
        smooth_surf=exp_info["surf_smooth"],
        target_subject="fsaverage"),
        ["subject_id", "reg_file", "source_file"], "surfsample")

    # Remove subjects with completely empty images
    removeempty = Node(RemoveEmpty(), "removeempty")

    # Concatenate the subject files into a 4D image
    mergecope = Node(fs.Concatenate(), "mergecope")

    # Run the one-sample OLS model
    glmfit = Node(fs.GLMFit(one_sample=True,
                            surf=True,
                            cortex=True,
                            glm_dir="_glm_results",
                            subject_id="fsaverage"),
                  "glmfit")

    # Use the cached Monte-Carlo simulations for correction
    cluster = Node(Function(["y_file",
                             "glm_dir",
                             "sign",
                             "cluster_zthresh",
                             "p_thresh"],
                            ["glm_dir",
                             "thresholded_file"],
                            glm_corrections,
                            imports),
                   "cluster")
    cluster.inputs.cluster_zthresh = exp_info["cluster_zthresh"]
    cluster.inputs.p_thresh = exp_info["grf_pthresh"]
    cluster.inputs.sign = exp_info["surf_corr_sign"]

    # Return the outputs
    outputnode = Node(IdentityInterface(["glm_dir", "sig_file"]), "outputnode")

    # Define and connect the workflow
    group = Workflow(name)
    group.connect([
        (inputnode, surfsample,
            [("copes", "source_file"),
             ("reg_file", "reg_file"),
             ("subject_id", "subject_id")]),
        (hemisource, surfsample,
            [("hemi", "hemi")]),
        (surfsample, removeempty,
            [("out_file", "in_files")]),
        (removeempty, mergecope,
            [("out_files", "in_files")]),
        (mergecope, glmfit,
            [("concatenated_file", "in_file")]),
        (hemisource, glmfit,
            [("hemi", "hemi")]),
        (mergecope, cluster,
            [("concatenated_file", "y_file")]),
        (glmfit, cluster,
            [("glm_dir", "glm_dir")]),
        (glmfit, outputnode,
            [("glm_dir", "glm_dir")]),
        (cluster, outputnode,
            [("thresholded_file", "sig_file")]),
        ])

    return group, inputnode, outputnode
Ejemplo n.º 10
0
def create_ffx_workflow(name="mni_ffx", space="mni",
                        contrasts=None, exp_info=None):
    """Return a workflow object to execute a fixed-effects mode."""
    if contrasts is None:
        contrasts = []
    if exp_info is None:
        exp_info = lyman.default_experiment_parameters()

    inputnode = Node(IdentityInterface(["copes",
                                        "varcopes",
                                        "masks",
                                        "means",
                                        "dofs",
                                        "ss_files",
                                        "anatomy",
                                        "reg_file",
                                        "timeseries"]),
                     name="inputnode")

    # Fit the fixedfx model for each contrast
    ffxmodel = Node(FFXModel(contrasts=contrasts), "ffxmodel")

    # Calculate the fixed effects Rsquared maps
    ffxsummary = Node(FFXSummary(), "ffxsummary")

    # Plot the fixedfx results
    report = Node(FFXReport(space=space), "report")

    # Save the experiment info
    saveparams = Node(SaveParameters(exp_info=exp_info), "saveparams")

    outputnode = Node(IdentityInterface(["flame_results",
                                         "r2_files",
                                         "tsnr_file",
                                         "summary_report",
                                         "json_file",
                                         "zstat_report"]),
                      "outputs")

    ffx = Workflow(name=name)
    ffx.connect([
        (inputnode, ffxmodel,
            [("copes", "copes"),
             ("varcopes", "varcopes"),
             ("dofs", "dofs"),
             ("masks", "masks"),
             ("reg_file", "reg_file")]),
        (inputnode, ffxsummary,
            [("ss_files", "ss_files"),
             ("means", "means"),
             ("timeseries", "timeseries")]),
        (inputnode, report,
            [("anatomy", "anatomy"),
             ("masks", "masks")]),
        (inputnode, saveparams,
            [("timeseries", "in_file")]),
        (ffxmodel, report,
            [("zstat_files", "zstat_files")]),
        (ffxsummary, report,
            [("r2_files", "r2_files"),
             ("tsnr_file", "tsnr_file")]),
        (ffxmodel, outputnode,
            [("flame_results", "flame_results")]),
        (ffxsummary, outputnode,
            [("r2_files", "r2_files"),
             ("tsnr_file", "tsnr_file")]),
        (report, outputnode,
            [("summary_files", "summary_report"),
             ("zstat_files", "zstat_report")]),
        (saveparams, outputnode,
            [("json_file", "json_file")]),
    ])

    return ffx, inputnode, outputnode
Ejemplo n.º 11
0
def create_timeseries_model_workflow(name="model", exp_info=None):

    # Default experiment parameters for generating graph image, testing, etc.
    if exp_info is None:
        exp_info = lyman.default_experiment_parameters()

    # Define constant inputs
    inputs = ["realign_file", "nuisance_file", "artifact_file", "timeseries"]

    # Possibly add the design and regressor files to the inputs
    if exp_info["design_name"] is not None:
        inputs.append("design_file")
    if exp_info["regressor_file"] is not None:
        inputs.append("regressor_file")

    # Define the workflow inputs
    inputnode = Node(IdentityInterface(inputs), "inputs")

    # Set up the experimental design
    modelsetup = MapNode(
        ModelSetup(exp_info=exp_info),
        ["timeseries", "realign_file", "nuisance_file", "artifact_file"],
        "modelsetup")

    # For some nodes, make it possible to request extra memory
    mem_request = {"qsub_args": "-l h_vmem=%dG" % exp_info["memory_request"]}

    # Use film_gls to estimate the timeseries model
    modelestimate = MapNode(
        fsl.FILMGLS(smooth_autocorr=True, mask_size=5, threshold=100),
        ["design_file", "in_file", "tcon_file"], "modelestimate")
    modelestimate.plugin_args = mem_request

    # Compute summary statistics about the model fit
    modelsummary = MapNode(ModelSummary(),
                           ["design_matrix_pkl", "timeseries", "pe_files"],
                           "modelsummary")
    modelsummary.plugin_args = mem_request

    # Save the experiment info for this run
    # Save the experiment info for this run
    saveparams = MapNode(SaveParameters(exp_info=exp_info), "in_file",
                         "saveparams")

    # Report on the results of the model
    # Note: see below for a conditional iterfield
    modelreport = MapNode(
        ModelReport(),
        ["timeseries", "sigmasquareds_file", "tsnr_file", "r2_files"],
        "modelreport")

    # Define the workflow outputs
    outputnode = Node(
        IdentityInterface([
            "results", "copes", "varcopes", "zstats", "r2_files", "ss_files",
            "tsnr_file", "report", "design_mat", "contrast_mat", "design_pkl",
            "design_report", "json_file"
        ]), "outputs")

    # Define the workflow and connect the nodes
    model = Workflow(name=name)
    model.connect([
        (inputnode, modelsetup, [("realign_file", "realign_file"),
                                 ("nuisance_file", "nuisance_file"),
                                 ("artifact_file", "artifact_file"),
                                 ("timeseries", "timeseries")]),
        (inputnode, modelestimate, [("timeseries", "in_file")]),
        (inputnode, saveparams, [("timeseries", "in_file")]),
        (modelsetup, modelestimate, [("design_matrix_file", "design_file"),
                                     ("contrast_file", "tcon_file")]),
        (modelsetup, modelsummary, [("design_matrix_pkl", "design_matrix_pkl")
                                    ]),
        (inputnode, modelsummary, [("timeseries", "timeseries")]),
        (modelestimate, modelsummary, [("param_estimates", "pe_files")]),
        (inputnode, modelreport, [("timeseries", "timeseries")]),
        (modelestimate, modelreport, [("sigmasquareds", "sigmasquareds_file")
                                      ]),
        (modelsummary, modelreport, [("r2_files", "r2_files"),
                                     ("tsnr_file", "tsnr_file")]),
        (modelsetup, outputnode, [("design_matrix_file", "design_mat"),
                                  ("contrast_file", "contrast_mat"),
                                  ("design_matrix_pkl", "design_pkl"),
                                  ("report", "design_report")]),
        (saveparams, outputnode, [("json_file", "json_file")]),
        (modelestimate, outputnode, [("results_dir", "results"),
                                     ("copes", "copes"),
                                     ("varcopes", "varcopes"),
                                     ("zstats", "zstats")]),
        (modelsummary, outputnode, [("r2_files", "r2_files"),
                                    ("ss_files", "ss_files"),
                                    ("tsnr_file", "tsnr_file")]),
        (modelreport, outputnode, [("out_files", "report")]),
    ])

    if exp_info["design_name"] is not None:
        model.connect(inputnode, "design_file", modelsetup, "design_file")
    if exp_info["regressor_file"] is not None:
        model.connect(inputnode, "regressor_file", modelsetup,
                      "regressor_file")
    if exp_info["contrasts"]:
        model.connect(modelestimate, "zstats", modelreport, "zstat_files")
        modelreport.iterfield.append("zstat_files")

    return model, inputnode, outputnode
Ejemplo n.º 12
0
Archivo: model.py Proyecto: toddt/lyman
def create_timeseries_model_workflow(name="model", exp_info=None):

    # Default experiment parameters for generating graph inamge, testing, etc.
    if exp_info is None:
        exp_info = default_experiment_parameters()

    # Define constant inputs
    inputs = ["design_file", "realign_file", "artifact_file", "timeseries"]

    # Possibly add the regressor file to the inputs
    if exp_info["regressor_file"] is not None:
        inputs.append("regressor_file")

    # Define the workflow inputs
    inputnode = Node(IdentityInterface(inputs), "inputs")

    # Set up the experimental design
    modelsetup = MapNode(Function(["exp_info",
                                   "design_file",
                                   "realign_file",
                                   "artifact_file",
                                   "regressor_file",
                                   "run"],
                                  ["design_matrix_file",
                                   "contrast_file",
                                   "design_matrix_pkl",
                                   "report"],
                                  setup_model,
                                  imports),
                          ["realign_file", "artifact_file", "run"],
                          "modelsetup")
    modelsetup.inputs.exp_info = exp_info
    if exp_info["regressor_file"] is None:
        modelsetup.inputs.regressor_file = None

    # Use film_gls to estimate the timeseries model
    modelestimate = MapNode(fsl.FILMGLS(smooth_autocorr=True,
                                        mask_size=5,
                                        threshold=1000),
                            ["design_file", "in_file"],
                            "modelestimate")

    # Run the contrast estimation routine
    contrastestimate = MapNode(fsl.ContrastMgr(),
                               ["tcon_file",
                                "dof_file",
                                "corrections",
                                "param_estimates",
                                "sigmasquareds"],
                               "contrastestimate")

    calcrsquared = MapNode(Function(["design_matrix_pkl",
                                     "timeseries",
                                     "pe_files"],
                                    ["r2_files",
                                     "ss_files"],
                                    compute_rsquareds,
                                    imports),
                           ["design_matrix_pkl",
                            "timeseries",
                            "pe_files"],
                           "calcrsquared")
    calcrsquared.plugin_args = dict(qsub_args="-l h_vmem=8G")

    # Save the experiment info for this run
    dumpjson = MapNode(Function(["exp_info", "timeseries"], ["json_file"],
                                dump_exp_info, imports),
                    "timeseries",
                    "dumpjson")
    dumpjson.inputs.exp_info = exp_info

    # Report on the results of the model
    modelreport = MapNode(Function(["timeseries",
                                    "sigmasquareds_file",
                                    "zstat_files",
                                    "r2_files"],
                                   ["report"],
                                   report_model,
                                   imports),
                          ["timeseries", "sigmasquareds_file",
                           "zstat_files", "r2_files"],
                          "modelreport")

    # Define the workflow outputs
    outputnode = Node(IdentityInterface(["results",
                                         "copes",
                                         "varcopes",
                                         "zstats",
                                         "r2_files",
                                         "ss_files",
                                         "report",
                                         "design_mat",
                                         "contrast_mat",
                                         "design_pkl",
                                         "design_report",
                                         "json_file"]),
                      "outputs")

    # Define the workflow and connect the nodes
    model = Workflow(name=name)
    model.connect([
        (inputnode, modelsetup,
            [("design_file", "design_file"),
             ("realign_file", "realign_file"),
             ("artifact_file", "artifact_file"),
             (("timeseries", run_indices), "run")]),
        (inputnode, modelestimate,
            [("timeseries", "in_file")]),
        (inputnode, dumpjson,
            [("timeseries", "timeseries")]),
        (modelsetup, modelestimate,
            [("design_matrix_file", "design_file")]),
        (modelestimate, contrastestimate,
            [("dof_file", "dof_file"),
             ("corrections", "corrections"),
             ("param_estimates", "param_estimates"),
             ("sigmasquareds", "sigmasquareds")]),
        (modelsetup, contrastestimate,
            [("contrast_file", "tcon_file")]),
        (modelsetup, calcrsquared,
            [("design_matrix_pkl", "design_matrix_pkl")]),
        (inputnode, calcrsquared,
            [("timeseries", "timeseries")]),
        (modelestimate, calcrsquared,
            [("param_estimates", "pe_files")]),
        (inputnode, modelreport,
            [("timeseries", "timeseries")]),
        (modelestimate, modelreport,
            [("sigmasquareds", "sigmasquareds_file")]),
        (contrastestimate, modelreport,
            [("zstats", "zstat_files")]),
        (calcrsquared, modelreport,
            [("r2_files", "r2_files")]),
        (modelsetup, outputnode,
            [("design_matrix_file", "design_mat"),
             ("contrast_file", "contrast_mat"),
             ("design_matrix_pkl", "design_pkl"),
             ("report", "design_report")]),
        (dumpjson, outputnode,
            [("json_file", "json_file")]),
        (modelestimate, outputnode,
            [("results_dir", "results")]),
        (contrastestimate, outputnode,
            [("copes", "copes"),
             ("varcopes", "varcopes"),
             ("zstats", "zstats")]),
        (calcrsquared, outputnode,
            [("r2_files", "r2_files"),
             ("ss_files", "ss_files")]),
        (modelreport, outputnode,
            [("report", "report")]),
        ])

    if exp_info["regressor_file"] is not None:
        model.connect([
            (inputnode, modelsetup,
                [("regressor_file", "regressor_file")])
                       ])

    return model, inputnode, outputnode
Ejemplo n.º 13
0
def create_timeseries_model_workflow(name="model", exp_info=None):

    # Default experiment parameters for generating graph image, testing, etc.
    if exp_info is None:
        exp_info = lyman.default_experiment_parameters()

    # Define constant inputs
    inputs = ["realign_file", "artifact_file", "timeseries"]

    # Possibly add the design and regressor files to the inputs
    if exp_info["design_name"] is not None:
        inputs.append("design_file")
    if exp_info["regressor_file"] is not None:
        inputs.append("regressor_file")

    # Define the workflow inputs
    inputnode = Node(IdentityInterface(inputs), "inputs")

    # Set up the experimental design
    modelsetup = MapNode(ModelSetup(exp_info=exp_info),
                         ["timeseries", "realign_file", "artifact_file"],
                         "modelsetup")

    # For some nodes, make it possible to request extra memory
    mem_request = {"qsub_args": "-l h_vmem=%dG" % exp_info["memory_request"]}

    # Use film_gls to estimate the timeseries model
    modelestimate = MapNode(fsl.FILMGLS(smooth_autocorr=True,
                                        mask_size=5,
                                        threshold=100),
                            ["design_file", "in_file"],
                            "modelestimate")
    modelestimate.plugin_args = mem_request

    # Run the contrast estimation routine
    contrastestimate = MapNode(fsl.ContrastMgr(),
                               ["tcon_file",
                                "dof_file",
                                "corrections",
                                "param_estimates",
                                "sigmasquareds"],
                               "contrastestimate")
    contrastestimate.plugin_args = mem_request

    # Compute summary statistics about the model fit
    modelsummary = MapNode(ModelSummary(),
                           ["design_matrix_pkl",
                            "timeseries",
                            "pe_files"],
                           "modelsummary")
    modelsummary.plugin_args = mem_request

    # Save the experiment info for this run
    # Save the experiment info for this run
    saveparams = MapNode(SaveParameters(exp_info=exp_info),
                         "in_file", "saveparams")

    # Report on the results of the model
    # Note: see below for a conditional iterfield
    modelreport = MapNode(ModelReport(),
                          ["timeseries", "sigmasquareds_file",
                           "tsnr_file", "r2_files"],
                          "modelreport")

    # Define the workflow outputs
    outputnode = Node(IdentityInterface(["results",
                                         "copes",
                                         "varcopes",
                                         "zstats",
                                         "r2_files",
                                         "ss_files",
                                         "tsnr_file",
                                         "report",
                                         "design_mat",
                                         "contrast_mat",
                                         "design_pkl",
                                         "design_report",
                                         "json_file"]),
                      "outputs")

    # Define the workflow and connect the nodes
    model = Workflow(name=name)
    model.connect([
        (inputnode, modelsetup,
            [("realign_file", "realign_file"),
             ("artifact_file", "artifact_file"),
             ("timeseries", "timeseries")]),
        (inputnode, modelestimate,
            [("timeseries", "in_file")]),
        (inputnode, saveparams,
            [("timeseries", "in_file")]),
        (modelsetup, modelestimate,
            [("design_matrix_file", "design_file")]),
        (modelestimate, contrastestimate,
            [("dof_file", "dof_file"),
             ("corrections", "corrections"),
             ("param_estimates", "param_estimates"),
             ("sigmasquareds", "sigmasquareds")]),
        (modelsetup, contrastestimate,
            [("contrast_file", "tcon_file")]),
        (modelsetup, modelsummary,
            [("design_matrix_pkl", "design_matrix_pkl")]),
        (inputnode, modelsummary,
            [("timeseries", "timeseries")]),
        (modelestimate, modelsummary,
            [("param_estimates", "pe_files")]),
        (inputnode, modelreport,
            [("timeseries", "timeseries")]),
        (modelestimate, modelreport,
            [("sigmasquareds", "sigmasquareds_file")]),
        (modelsummary, modelreport,
            [("r2_files", "r2_files"),
             ("tsnr_file", "tsnr_file")]),
        (modelsetup, outputnode,
            [("design_matrix_file", "design_mat"),
             ("contrast_file", "contrast_mat"),
             ("design_matrix_pkl", "design_pkl"),
             ("report", "design_report")]),
        (saveparams, outputnode,
            [("json_file", "json_file")]),
        (modelestimate, outputnode,
            [("results_dir", "results")]),
        (contrastestimate, outputnode,
            [("copes", "copes"),
             ("varcopes", "varcopes"),
             ("zstats", "zstats")]),
        (modelsummary, outputnode,
            [("r2_files", "r2_files"),
             ("ss_files", "ss_files"),
             ("tsnr_file", "tsnr_file")]),
        (modelreport, outputnode,
            [("out_files", "report")]),
        ])

    if exp_info["design_name"] is not None:
        model.connect(inputnode, "design_file",
                      modelsetup, "design_file")
    if exp_info["regressor_file"] is not None:
        model.connect(inputnode, "regressor_file",
                      modelsetup, "regressor_file")
    if exp_info["contrasts"]:
        model.connect(contrastestimate, "zstats",
                      modelreport, "zstat_files")
        modelreport.iterfield.append("zstat_files")

    return model, inputnode, outputnode
Ejemplo n.º 14
0
def create_preprocessing_workflow(name="preproc", exp_info=None):
    """Return a Nipype workflow for fMRI preprocessing.

    This mostly follows the preprocessing in FSL, although some
    of the processing has been moved into pure Python.

    Parameters
    ----------
    name : string
        workflow object name
    exp_info : dict
        dictionary with experimental information

    """
    preproc = Workflow(name)

    if exp_info is None:
        exp_info = lyman.default_experiment_parameters()

    # Define the inputs for the preprocessing workflow
    in_fields = ["timeseries", "subject_id"]

    if exp_info["whole_brain_template"]:
        in_fields.append("whole_brain")

    if exp_info["fieldmap_template"]:
        in_fields.append("fieldmap")

    inputnode = Node(IdentityInterface(in_fields), "inputs")

    # Remove equilibrium frames and convert to float
    prepare = MapNode(PrepTimeseries(), "in_file", "prep_timeseries")
    prepare.inputs.frames_to_toss = exp_info["frames_to_toss"]

    # Unwarp using fieldmap images
    if exp_info["fieldmap_template"]:
        unwarp = create_unwarp_workflow(fieldmap_pe=exp_info["fieldmap_pe"])

    # Motion and slice time correct
    realign = create_realignment_workflow(
        temporal_interp=exp_info["temporal_interp"],
        TR=exp_info["TR"],
        slice_order=exp_info["slice_order"],
        interleaved=exp_info["interleaved"],
    )

    # Estimate a registration from funtional to anatomical space
    coregister = create_bbregister_workflow(
        partial_brain=bool(exp_info["whole_brain_template"]), init_with=exp_info["coreg_init"]
    )

    # Skullstrip the brain using the Freesurfer segmentation
    skullstrip = create_skullstrip_workflow()

    # Smooth intelligently in the volume
    susan = create_susan_smooth()
    susan.inputs.inputnode.fwhm = exp_info["smooth_fwhm"]

    # Scale and filter the timeseries
    filter_smooth = create_filtering_workflow(
        "filter_smooth", exp_info["hpf_cutoff"], exp_info["TR"], "smoothed_timeseries"
    )

    filter_rough = create_filtering_workflow(
        "filter_rough", exp_info["hpf_cutoff"], exp_info["TR"], "unsmoothed_timeseries"
    )

    # Automatically detect motion and intensity outliers
    artifacts = MapNode(ArtifactDetection(), ["timeseries", "mask_file", "motion_file"], "artifacts")
    artifacts.inputs.intensity_thresh = exp_info["intensity_threshold"]
    artifacts.inputs.motion_thresh = exp_info["motion_threshold"]
    artifacts.inputs.spike_thresh = exp_info["spike_threshold"]

    # Extract nuisance variables from anatomical sources
    confounds = create_confound_extraction_workflow("confounds", exp_info["wm_components"])

    # Save the experiment info for this run
    saveparams = MapNode(SaveParameters(exp_info=exp_info), "in_file", "saveparams")

    preproc.connect(
        [
            (inputnode, prepare, [("timeseries", "in_file")]),
            (realign, artifacts, [("outputs.motion_file", "motion_file")]),
            (realign, coregister, [("outputs.timeseries", "inputs.timeseries")]),
            (inputnode, coregister, [("subject_id", "inputs.subject_id")]),
            (realign, skullstrip, [("outputs.timeseries", "inputs.timeseries")]),
            (inputnode, skullstrip, [("subject_id", "inputs.subject_id")]),
            (coregister, skullstrip, [("outputs.tkreg_mat", "inputs.reg_file")]),
            (skullstrip, artifacts, [("outputs.mask_file", "mask_file")]),
            (
                skullstrip,
                susan,
                [("outputs.mask_file", "inputnode.mask_file"), ("outputs.timeseries", "inputnode.in_files")],
            ),
            (susan, filter_smooth, [("outputnode.smoothed_files", "inputs.timeseries")]),
            (skullstrip, filter_smooth, [("outputs.mask_file", "inputs.mask_file")]),
            (skullstrip, filter_rough, [("outputs.timeseries", "inputs.timeseries")]),
            (skullstrip, filter_rough, [("outputs.mask_file", "inputs.mask_file")]),
            (filter_rough, artifacts, [("outputs.timeseries", "timeseries")]),
            (filter_rough, confounds, [("outputs.timeseries", "inputs.timeseries")]),
            (inputnode, confounds, [("subject_id", "inputs.subject_id")]),
            (skullstrip, confounds, [("outputs.mask_file", "inputs.brain_mask")]),
            (coregister, confounds, [("outputs.tkreg_mat", "inputs.reg_file")]),
            (inputnode, saveparams, [("timeseries", "in_file")]),
        ]
    )

    # Optionally add a connection for unwarping
    if bool(exp_info["fieldmap_template"]):
        preproc.connect(
            [
                (inputnode, unwarp, [("fieldmap", "inputs.fieldmap")]),
                (prepare, unwarp, [("out_file", "inputs.timeseries")]),
                (unwarp, realign, [("outputs.timeseries", "inputs.timeseries")]),
            ]
        )
    else:
        preproc.connect([(prepare, realign, [("out_file", "inputs.timeseries")])])

    # Optionally connect the whole brain template
    if bool(exp_info["whole_brain_template"]):
        preproc.connect([(inputnode, coregister, [("whole_brain_template", "inputs.whole_brain_template")])])

    # Define the outputs of the top-level workflow
    output_fields = [
        "smoothed_timeseries",
        "unsmoothed_timeseries",
        "example_func",
        "mean_func",
        "functional_mask",
        "realign_report",
        "mask_report",
        "artifact_report",
        "confound_file",
        "flirt_affine",
        "tkreg_affine",
        "coreg_report",
        "json_file",
    ]

    if bool(exp_info["fieldmap_template"]):
        output_fields.append("unwarp_report")

    outputnode = Node(IdentityInterface(output_fields), "outputs")

    preproc.connect(
        [
            (realign, outputnode, [("outputs.example_func", "example_func"), ("outputs.report", "realign_report")]),
            (skullstrip, outputnode, [("outputs.mask_file", "functional_mask"), ("outputs.report", "mask_report")]),
            (artifacts, outputnode, [("out_files", "artifact_report")]),
            (
                coregister,
                outputnode,
                [
                    ("outputs.tkreg_mat", "tkreg_affine"),
                    ("outputs.flirt_mat", "flirt_affine"),
                    ("outputs.report", "coreg_report"),
                ],
            ),
            (filter_smooth, outputnode, [("outputs.timeseries", "smoothed_timeseries")]),
            (
                filter_rough,
                outputnode,
                [("outputs.timeseries", "unsmoothed_timeseries"), ("outputs.mean_file", "mean_func")],
            ),
            (confounds, outputnode, [("outputs.confound_file", "confound_file")]),
            (saveparams, outputnode, [("json_file", "json_file")]),
        ]
    )

    if bool(exp_info["fieldmap_template"]):
        preproc.connect([(unwarp, outputnode, [("outputs.report", "unwarp_report")])])

    return preproc, inputnode, outputnode