Beispiel #1
0
def define_preproc_workflow(info, subjects, sessions, qc=True):

    # --- Workflow parameterization and data input

    scan_info = info.scan_info
    experiment = info.experiment_name

    iterables = generate_iterables(scan_info, experiment, subjects, sessions)
    subject_iterables, session_iterables, run_iterables = iterables

    subject_iterables = subjects

    subject_source = Node(IdentityInterface(["subject"]),
                          name="subject_source",
                          iterables=("subject", subject_iterables))

    session_source = Node(IdentityInterface(["subject", "session"]),
                          name="session_source",
                          itersource=("subject_source", "subject"),
                          iterables=("session", session_iterables))

    run_source = Node(IdentityInterface(["subject", "session", "run"]),
                      name="run_source",
                      itersource=("session_source", "session"),
                      iterables=("run", run_iterables))

    session_input = Node(SessionInput(data_dir=info.data_dir,
                                      proc_dir=info.proc_dir,
                                      fm_template=info.fm_template,
                                      phase_encoding=info.phase_encoding),
                         "session_input")

    run_input = Node(RunInput(experiment=experiment,
                              data_dir=info.data_dir,
                              proc_dir=info.proc_dir,
                              sb_template=info.sb_template,
                              ts_template=info.ts_template,
                              crop_frames=info.crop_frames),
                     name="run_input")

    # --- Warpfield estimation using topup

    # Distortion warpfield estimation
    #  TODO figure out how to parameterize for testing
    # topup_config = op.realpath(op.join(__file__, "../../../topup_fast.cnf"))
    topup_config = "b02b0.cnf"
    estimate_distortions = Node(fsl.TOPUP(config=topup_config),
                                "estimate_distortions")

    # Post-process the TOPUP outputs
    finalize_unwarping = Node(FinalizeUnwarping(), "finalize_unwarping")

    # --- Registration of SE-EPI (without distortions) to Freesurfer anatomy

    fm2anat = Node(fs.BBRegister(init="fsl",
                                 contrast_type="t2",
                                 registered_file=True,
                                 out_fsl_file="sess2anat.mat",
                                 out_reg_file="sess2anat.dat"),
                   "fm2anat")

    fm2anat_qc = Node(AnatRegReport(data_dir=info.data_dir), "fm2anat_qc")

    # --- Registration of SBRef to SE-EPI (with distortions)

    sb2fm = Node(fsl.FLIRT(dof=6, interp="spline"), "sb2fm")

    sb2fm_qc = Node(CoregGIF(out_file="coreg.gif"), "sb2fm_qc")

    # --- Motion correction of time series to SBRef (with distortions)

    ts2sb = Node(fsl.MCFLIRT(save_mats=True, save_plots=True),
                 "ts2sb")

    ts2sb_qc = Node(RealignmentReport(), "ts2sb_qc")

    # --- Combined motion correction, unwarping, and template registration

    # Combine pre-and post-warp linear transforms
    combine_premats = MapNode(fsl.ConvertXFM(concat_xfm=True),
                              "in_file", "combine_premats")

    combine_postmats = Node(fsl.ConvertXFM(concat_xfm=True),
                            "combine_postmats")

    # Transform Jacobian images into the template space
    transform_jacobian = Node(fsl.ApplyWarp(relwarp=True),
                              "transform_jacobian")

    # Apply rigid transforms and nonlinear warpfield to time series frames
    restore_timeseries = MapNode(fsl.ApplyWarp(interp="spline", relwarp=True),
                                 ["in_file", "premat"],
                                 "restore_timeseries")

    # Apply rigid transforms and nonlinear warpfield to template frames
    restore_template = MapNode(fsl.ApplyWarp(interp="spline", relwarp=True),
                               ["in_file", "premat", "field_file"],
                               "restore_template")

    # Perform final preprocessing operations on timeseries
    finalize_timeseries = Node(FinalizeTimeseries(experiment=experiment),
                               "finalize_timeseries")

    # Perform final preprocessing operations on template
    finalize_template = JoinNode(FinalizeTemplate(experiment=experiment),
                                 name="finalize_template",
                                 joinsource="run_source",
                                 joinfield=["mean_files", "tsnr_files",
                                            "mask_files", "noise_files"])

    # --- Workflow ouptut

    save_info = Node(SaveInfo(info_dict=info.trait_get()), "save_info")

    template_output = Node(DataSink(base_directory=info.proc_dir,
                                    parameterization=False),
                           "template_output")

    timeseries_output = Node(DataSink(base_directory=info.proc_dir,
                                      parameterization=False),
                             "timeseries_output")

    # === Assemble pipeline

    cache_base = op.join(info.cache_dir, info.experiment_name)
    workflow = Workflow(name="preproc", base_dir=cache_base)

    # Connect processing nodes

    processing_edges = [

        (subject_source, session_source,
            [("subject", "subject")]),
        (subject_source, run_source,
            [("subject", "subject")]),
        (session_source, run_source,
            [("session", "session")]),
        (session_source, session_input,
            [("session", "session")]),
        (run_source, run_input,
            [("run", "run")]),

        # Phase-encode distortion estimation

        (session_input, estimate_distortions,
            [("fm_file", "in_file"),
             ("phase_encoding", "encoding_direction"),
             ("readout_times", "readout_times")]),
        (session_input, finalize_unwarping,
            [("fm_file", "raw_file"),
             ("phase_encoding", "phase_encoding")]),
        (estimate_distortions, finalize_unwarping,
            [("out_corrected", "corrected_file"),
             ("out_warps", "warp_files"),
             ("out_jacs", "jacobian_files")]),

        # Registration of corrected SE-EPI to anatomy

        (session_input, fm2anat,
            [("subject", "subject_id")]),
        (finalize_unwarping, fm2anat,
            [("corrected_file", "source_file")]),

        # Registration of each frame to SBRef image

        (run_input, ts2sb,
            [("ts_file", "in_file"),
             ("sb_file", "ref_file")]),
        (ts2sb, finalize_timeseries,
            [("par_file", "mc_file")]),

        # Registration of SBRef volume to SE-EPI fieldmap

        (run_input, sb2fm,
            [("sb_file", "in_file")]),
        (finalize_unwarping, sb2fm,
            [("raw_file", "reference"),
             ("mask_file", "ref_weight")]),

        # Single-interpolation spatial realignment and unwarping

        (ts2sb, combine_premats,
            [("mat_file", "in_file")]),
        (sb2fm, combine_premats,
            [("out_matrix_file", "in_file2")]),
        (fm2anat, combine_postmats,
            [("out_fsl_file", "in_file")]),
        (session_input, combine_postmats,
            [("reg_file", "in_file2")]),

        (run_input, transform_jacobian,
            [("anat_file", "ref_file")]),
        (finalize_unwarping, transform_jacobian,
            [("jacobian_file", "in_file")]),
        (combine_postmats, transform_jacobian,
            [("out_file", "premat")]),

        (run_input, restore_timeseries,
            [("ts_frames", "in_file")]),
        (run_input, restore_timeseries,
            [("anat_file", "ref_file")]),
        (combine_premats, restore_timeseries,
            [("out_file", "premat")]),
        (finalize_unwarping, restore_timeseries,
            [("warp_file", "field_file")]),
        (combine_postmats, restore_timeseries,
            [("out_file", "postmat")]),
        (run_input, finalize_timeseries,
            [("run_tuple", "run_tuple"),
             ("anat_file", "anat_file"),
             ("seg_file", "seg_file"),
             ("mask_file", "mask_file")]),
        (transform_jacobian, finalize_timeseries,
            [("out_file", "jacobian_file")]),
        (restore_timeseries, finalize_timeseries,
            [("out_file", "in_files")]),

        (session_input, restore_template,
            [("fm_frames", "in_file"),
             ("anat_file", "ref_file")]),
        (estimate_distortions, restore_template,
            [("out_mats", "premat"),
             ("out_warps", "field_file")]),
        (combine_postmats, restore_template,
            [("out_file", "postmat")]),
        (session_input, finalize_template,
            [("session_tuple", "session_tuple"),
             ("seg_file", "seg_file"),
             ("anat_file", "anat_file")]),
        (transform_jacobian, finalize_template,
            [("out_file", "jacobian_file")]),
        (restore_template, finalize_template,
            [("out_file", "in_files")]),

        (finalize_timeseries, finalize_template,
            [("mean_file", "mean_files"),
             ("tsnr_file", "tsnr_files"),
             ("mask_file", "mask_files"),
             ("noise_file", "noise_files")]),

        # --- Persistent data storage

        # Ouputs associated with each scanner run

        (finalize_timeseries, timeseries_output,
            [("output_path", "container"),
             ("out_file", "@func"),
             ("mean_file", "@mean"),
             ("mask_file", "@mask"),
             ("tsnr_file", "@tsnr"),
             ("noise_file", "@noise"),
             ("mc_file", "@mc")]),

        # Ouputs associated with the session template

        (finalize_template, template_output,
            [("output_path", "container"),
             ("out_file", "@func"),
             ("mean_file", "@mean"),
             ("tsnr_file", "@tsnr"),
             ("mask_file", "@mask"),
             ("noise_file", "@noise")]),

    ]
    workflow.connect(processing_edges)

    # Optionally connect QC nodes

    qc_edges = [

        # Registration of each frame to SBRef image

        (run_input, ts2sb_qc,
            [("sb_file", "target_file")]),
        (ts2sb, ts2sb_qc,
            [("par_file", "realign_params")]),

        # Registration of corrected SE-EPI to anatomy

        (session_input, fm2anat_qc,
            [("subject", "subject_id")]),
        (fm2anat, fm2anat_qc,
            [("registered_file", "in_file"),
             ("min_cost_file", "cost_file")]),

        # Registration of SBRef volume to SE-EPI fieldmap

        (sb2fm, sb2fm_qc,
            [("out_file", "in_file")]),
        (finalize_unwarping, sb2fm_qc,
            [("raw_file", "ref_file")]),

        # Ouputs associated with each scanner run

        (run_source, save_info,
            [("run", "parameterization")]),
        (save_info, timeseries_output,
            [("info_file", "qc.@info_json")]),

        (run_input, timeseries_output,
            [("ts_plot", "qc.@raw_gif")]),
        (sb2fm_qc, timeseries_output,
            [("out_file", "qc.@sb2fm_gif")]),
        (ts2sb_qc, timeseries_output,
            [("params_plot", "qc.@params_plot"),
             ("target_plot", "qc.@target_plot")]),
        (finalize_timeseries, timeseries_output,
            [("out_gif", "qc.@ts_gif"),
             ("out_png", "qc.@ts_png"),
             ("mask_plot", "qc.@mask_plot"),
             ("mean_plot", "qc.@ts_mean_plot"),
             ("tsnr_plot", "qc.@ts_tsnr_plot"),
             ("noise_plot", "qc.@noise_plot")]),

        # Outputs associated with the session template

        (finalize_unwarping, template_output,
            [("warp_plot", "qc.@warp_png"),
             ("unwarp_gif", "qc.@unwarp_gif")]),
        (fm2anat_qc, template_output,
            [("out_file", "qc.@reg_png")]),
        (finalize_template, template_output,
            [("out_plot", "qc.@func_png"),
             ("mean_plot", "qc.@mean"),
             ("tsnr_plot", "qc.@tsnr"),
             ("mask_plot", "qc.@mask"),
             ("noise_plot", "qc.@noise")]),

    ]

    if qc:
        workflow.connect(qc_edges)

    return workflow
import nipype.pipeline.engine as pe
from nipype import Node, JoinNode, Workflow
from nipype.interfaces.utility import IdentityInterface
from nipype.interfaces import (ants, dcm2nii, fsl)

#make workflow object
wf = Workflow(name='preprocess')
#infospec is a node that makes a subject identity. identity matrix is what Subject.py was trying to acomplish
inputspec = Node(IdentityInterface(fields=['image']),
                    name='inputspec')
#these look like the images that will be processed in this node. Subject.seq might work here
inputspec.iterables = [('image',
                       ['img1.nii', 'img2.nii', 'img3.nii'])]
#not sure what this is, applys img2flt (?) need to ask Ryan
img2flt = Node(fsl.ImageMaths(out_data_type='float'),
                  name='img2flt')
#connects the first node with the second node
wf.connect(inputspec, 'image', img2flt, 'in_file')
#3rd node
average = JoinNode(ants.AverageImages(), joinsource='inputspec',
                      joinfield='images', name='average')
#add new node to wf
wf.connect(img2flt, 'out_file', average, 'images')
#new node
realign = Node(fsl.FLIRT(), name='realign')
#add to wf after img2flt & after average
wf.connect(img2flt, 'out_file', realign, 'in_file')
wf.connect(average, 'output_average_image', realign, 'reference')
strip = Node(fsl.BET(), name='strip')
wf.connect(realign, 'out_file', strip, 'in_file')
Beispiel #3
0
def define_model_results_workflow(info, subjects, qc=True):

    # TODO I am copying a lot from above ...

    # --- Workflow parameterization and data input

    # We just need two levels of iterables here: one subject-level and
    # one "flat" run-level iterable (i.e. all runs collapsing over
    # sessions). Unlike in the model fit workflow, we always want to process
    # all sessions.

    scan_info = info.scan_info
    experiment = info.experiment_name
    model = info.model_name

    iterables = generate_iterables(scan_info, experiment, subjects)
    subject_iterables, run_iterables = iterables

    subject_source = Node(IdentityInterface(["subject"]),
                          name="subject_source",
                          iterables=("subject", subject_iterables))

    run_source = Node(IdentityInterface(["subject", "run"]),
                      name="run_source",
                      itersource=("subject_source", "subject"),
                      iterables=("run", run_iterables))

    data_input = Node(
        ModelResultsInput(experiment=experiment,
                          model=model,
                          proc_dir=info.proc_dir), "data_input")

    # --- Run-level contrast estimation

    estimate_contrasts = Node(EstimateContrasts(info=info.trait_get()),
                              "estimate_contrasts")

    # --- Subject-level contrast estimation

    model_results = JoinNode(
        ModelResults(info=info.trait_get()),
        name="model_results",
        joinsource="run_source",
        joinfield=["contrast_files", "variance_files", "name_files"])

    # --- Data output

    save_info = Node(SaveInfo(info_dict=info.trait_get()), "save_info")

    run_output = Node(
        DataSink(base_directory=info.proc_dir, parameterization=False),
        "run_output")

    results_path = Node(
        ModelResultsPath(proc_dir=info.proc_dir,
                         experiment=experiment,
                         model=model), "results_path")

    subject_output = Node(
        DataSink(base_directory=info.proc_dir, parameterization=False),
        "subject_output")

    # === Assemble pipeline

    cache_base = op.join(info.cache_dir, experiment)
    workflow = Workflow(name="model_results", base_dir=cache_base)

    # Connect processing nodes

    processing_edges = [
        (subject_source, run_source, [("subject", "subject")]),
        (subject_source, data_input, [("subject", "subject")]),
        (run_source, data_input, [("run", "run_tuple")]),
        (data_input, estimate_contrasts, [("mask_file", "mask_file"),
                                          ("beta_file", "beta_file"),
                                          ("error_file", "error_file"),
                                          ("ols_file", "ols_file"),
                                          ("design_file", "design_file")]),
        (data_input, model_results, [("anat_file", "anat_file")]),
        (estimate_contrasts, model_results,
         [("contrast_file", "contrast_files"),
          ("variance_file", "variance_files"), ("name_file", "name_files")]),
        (run_source, save_info, [("run", "parameterization")]),
        (save_info, run_output, [("info_file", "qc.@info_json")]),
        (data_input, run_output, [("output_path", "container")]),
        (estimate_contrasts, run_output, [("contrast_file", "@contrast"),
                                          ("variance_file", "@variance"),
                                          ("tstat_file", "@tstat"),
                                          ("name_file", "@names")]),
        (subject_source, results_path, [("subject", "subject")]),
        (results_path, subject_output, [("output_path", "container")]),
        (model_results, subject_output, [("result_directories", "@results")]),
    ]
    workflow.connect(processing_edges)

    return workflow
Beispiel #4
0
def define_template_workflow(info, subjects, qc=True):

    # --- Workflow parameterization

    subject_source = Node(IdentityInterface(["subject"]),
                          name="subject_source",
                          iterables=("subject", subjects))

    # Data input
    template_input = Node(TemplateInput(data_dir=info.data_dir),
                          "template_input")

    # --- Definition of functional template space

    crop_image = Node(fs.ApplyMask(args="-bb 4"), "crop_image")

    zoom_image = Node(fs.MRIConvert(resample_type="cubic",
                                    out_type="niigz",
                                    vox_size=info.voxel_size,
                                    ),
                      "zoom_image")

    reorient_image = Node(fsl.Reorient2Std(out_file="anat.nii.gz"),
                          "reorient_image")

    generate_reg = Node(fs.Tkregister2(fsl_out="anat2func.mat",
                                       reg_file="anat2func.dat",
                                       reg_header=True),
                        "generate_reg")

    invert_reg = Node(fs.Tkregister2(reg_file="func2anat.dat",
                                     reg_header=True),
                      "invert_reg")

    # --- Identification of surface vertices

    hemi_source = Node(IdentityInterface(["hemi"]), "hemi_source",
                       iterables=("hemi", ["lh", "rh"]))

    tag_surf = Node(fs.Surface2VolTransform(surf_name="graymid",
                                            transformed_file="ribbon.nii.gz",
                                            vertexvol_file="vertices.nii.gz",
                                            mkmask=True),
                    "tag_surf")

    mask_cortex = Node(MaskWithLabel(fill_value=-1), "mask_cortex")

    combine_hemis = JoinNode(fsl.Merge(dimension="t",
                                       merged_file="surf.nii.gz"),
                             name="combine_hemis",
                             joinsource="hemi_source",
                             joinfield="in_files")

    make_ribbon = Node(MakeRibbon(), "make_ribbon")

    # --- Segementation of anatomical tissue in functional space

    transform_wmparc = Node(fs.ApplyVolTransform(inverse=True,
                                                 interp="nearest",
                                                 args="--keep-precision"),
                            "transform_wmparc")

    anat_segment = Node(AnatomicalSegmentation(), "anat_segment")

    # --- Template QC

    template_qc = Node(TemplateReport(), "template_qc")

    # --- Workflow ouptut

    save_info = Node(SaveInfo(info_dict=info.trait_get()), "save_info")

    template_output = Node(DataSink(base_directory=info.proc_dir,
                                    parameterization=False),
                           "template_output")

    # === Assemble pipeline

    workflow = Workflow(name="template", base_dir=info.cache_dir)

    processing_edges = [

        (subject_source, template_input,
            [("subject", "subject")]),
        (template_input, crop_image,
            [("norm_file", "in_file"),
             ("wmparc_file", "mask_file")]),
        (crop_image, zoom_image,
            [("out_file", "in_file")]),
        (zoom_image, reorient_image,
            [("out_file", "in_file")]),

        (subject_source, generate_reg,
            [("subject", "subject_id")]),
        (template_input, generate_reg,
            [("norm_file", "moving_image")]),
        (reorient_image, generate_reg,
            [("out_file", "target_image")]),

        (subject_source, invert_reg,
            [("subject", "subject_id")]),
        (template_input, invert_reg,
            [("norm_file", "target_image")]),
        (reorient_image, invert_reg,
            [("out_file", "moving_image")]),

        (hemi_source, tag_surf,
            [("hemi", "hemi")]),
        (invert_reg, tag_surf,
            [("reg_file", "reg_file")]),
        (reorient_image, tag_surf,
            [("out_file", "template_file")]),
        (template_input, mask_cortex,
            [("label_files", "label_files")]),
        (hemi_source, mask_cortex,
            [("hemi", "hemi")]),
        (tag_surf, mask_cortex,
            [("vertexvol_file", "in_file")]),
        (mask_cortex, combine_hemis,
            [("out_file", "in_files")]),
        (combine_hemis, make_ribbon,
            [("merged_file", "in_file")]),

        (reorient_image, transform_wmparc,
            [("out_file", "source_file")]),
        (template_input, transform_wmparc,
            [("wmparc_file", "target_file")]),
        (invert_reg, transform_wmparc,
            [("reg_file", "reg_file")]),
        (reorient_image, anat_segment,
            [("out_file", "anat_file")]),
        (transform_wmparc, anat_segment,
            [("transformed_file", "wmparc_file")]),
        (combine_hemis, anat_segment,
            [("merged_file", "surf_file")]),

        (template_input, template_output,
            [("output_path", "container")]),
        (reorient_image, template_output,
            [("out_file", "@anat")]),
        (generate_reg, template_output,
            [("fsl_file", "@anat2func")]),
        (anat_segment, template_output,
            [("seg_file", "@seg"),
             ("lut_file", "@lut"),
             ("edge_file", "@edge"),
             ("mask_file", "@mask")]),
        (combine_hemis, template_output,
            [("merged_file", "@surf")]),
        (make_ribbon, template_output,
            [("out_file", "@ribon")]),

    ]
    workflow.connect(processing_edges)

    # Optionally connect QC nodes

    qc_edges = [

        (reorient_image, template_qc,
            [("out_file", "anat_file")]),
        (combine_hemis, template_qc,
            [("merged_file", "surf_file")]),
        (anat_segment, template_qc,
            [("lut_file", "lut_file"),
             ("seg_file", "seg_file"),
             ("edge_file", "edge_file"),
             ("mask_file", "mask_file")]),

        (subject_source, save_info,
            [("subject", "parameterization")]),
        (save_info, template_output,
            [("info_file", "qc.@info_json")]),

        (template_qc, template_output,
            [("seg_plot", "qc.@seg_plot"),
             ("mask_plot", "qc.@mask_plot"),
             ("edge_plot", "qc.@edge_plot"),
             ("surf_plot", "qc.@surf_plot"),
             ("anat_plot", "qc.@anat_plot")]),

    ]
    if qc:
        workflow.connect(qc_edges)

    return workflow
Beispiel #5
0
def create_grvx_workflow(parameters):

    parameters['paths']['output'].mkdir(exist_ok=True, parents=True)

    parameters['timestamp'] = datetime.now().isoformat()
    parameters_json = parameters['paths']['output'] / 'parameters.json'
    with parameters_json.open('w') as f:
        json_dump(parameters, f, indent=2, cls=JSONEncoder_path)

    bids = bids_node(parameters)

    node_reconall = Node(ReconAll(), name='freesurfer')
    node_reconall.inputs.subjects_dir = str(parameters['paths']['freesurfer_subjects_dir'])
    node_reconall.inputs.flags = ['-cw256', ]

    node_corr = Node(function_corr, name='corr_fmri_ecog')
    node_corr.inputs.pvalue = parameters['corr']['pvalue']

    node_corr_allfreq = Node(function_corr_allfreq, name='corr_fmri_ecog_allfreq')
    node_corr_allfreq.inputs.pvalue = parameters['corr']['pvalue']
    node_corr_allfreq.inputs.min_n_sign_elec = parameters['corr']['min_n_sign_elec']

    node_corr_summary = JoinNode(
        function_corr_summary,
        name='corr_fmri_ecog_summary',
        joinsource='bids',
        joinfield=('in_files', 'ecog_files', 'fmri_files'),
        )

    w_fmri = workflow_fmri(parameters)
    w_ieeg = workflow_ieeg(parameters)

    w = Workflow('workflow')
    w.base_dir = str(parameters['paths']['output'])

    if parameters['fmri']['graymatter']:
        w.connect(bids, 'subject', node_reconall, 'subject_id')  # we might use freesurfer for other stuff too
        w.connect(bids, 'anat', node_reconall, 'T1_files')
        w.connect(node_reconall, 'ribbon', w_fmri, 'graymatter.ribbon')

    w.connect(bids, 'ieeg', w_ieeg, 'read.ieeg')
    w.connect(bids, 'elec', w_ieeg, 'read.electrodes')

    w.connect(bids, 'anat', w_fmri, 'bet.in_file')
    w.connect(bids, 'func', w_fmri, 'feat_design.func')

    w.connect(bids, 'elec', w_fmri, 'at_elec.electrodes')

    w.connect(w_ieeg, 'ecog_compare.tsv_compare', node_corr, 'ecog_file')
    w.connect(w_fmri, 'at_elec.fmri_vals', node_corr, 'fmri_file')

    w.connect(w_ieeg, 'ecog_compare_allfreq.compare', node_corr_allfreq, 'ecog_file')
    w.connect(w_fmri, 'at_elec.fmri_vals', node_corr_allfreq, 'fmri_file')

    w.connect(node_corr, 'out_file', node_corr_summary, 'in_files')
    w.connect(w_ieeg, 'ecog_compare.tsv_compare', node_corr_summary, 'ecog_files')
    w.connect(w_fmri, 'at_elec.fmri_vals', node_corr_summary, 'fmri_files')

    w.write_graph(graph2use='flat')
    log_dir = parameters['paths']['output'] / 'log'

    config.update_config({
        'logging': {
            'log_directory': log_dir,
            'log_to_file': True,
            },
        })

    rmtree(log_dir, ignore_errors=True)
    log_dir.mkdir()
    logging.update_logging(config)

    return w
Beispiel #6
0
def create_flatten_identity_join_node(name: str, fields: t.List[str],
                                      joinsource: t.Union[Node, str], flatten_fields: t.List[str]) -> JoinNode:
    return JoinNode(FlattenIdentityInterface(fields=fields, flatten_fields=flatten_fields),
                    name=name, joinsource=joinsource, joinfield=fields)
Beispiel #7
0
    def __init__(self, bids_dir: str, subjects: t.List[str],
                 tasks: t.List[str], conf_raw: t.List[str],
                 conf_json: t.List[str], tr_dic: dict,
                 pipelines_paths: t.List[str], high_pass: float,
                 low_pass: float):
        self.fmri_prep_aroma_files = []
        self.fmri_prep_files = []
        # 1) --- Itersources for all further processing
        # Inputs: fulfilled
        self.pipelineselector = Node(PipelineSelector(),
                                     name="PipelineSelector")
        self.pipelineselector.iterables = ('pipeline_path', pipelines_paths)
        # Outputs: pipeline, pipeline_name, low_pass, high_pass

        # Inputs: fulfilled
        self.subjectselector = Node(IdentityInterface(fields=['subject']),
                                    name="SubjectSelector")
        self.subjectselector.iterables = ('subject', subjects)
        # Outputs: subject

        # Inputs: fulfilled
        self.taskselector = Node(IdentityInterface(fields=['task']),
                                 name="TaskSelector")
        self.taskselector.iterables = ('task', tasks)
        # Outputs: task

        # 2) --- Loading BIDS files

        # Inputs: subject, session, task
        self.bidsgrabber = Node(BIDSGrab(conf_raw_files=conf_raw,
                                         conf_json_files=conf_json),
                                name="BidsGrabber")
        # Outputs: fmri_prep, fmri_prep_aroma, conf_raw, conf_json

        # 3) --- Confounds preprocessing

        # Inputs: pipeline, conf_raw, conf_json
        self.prep_conf = Node(Confounds(output_dir=temps.mkdtemp('prep_conf')),
                              name="ConfPrep")
        # Outputs: conf_prep, conf_summary

        # 4) --- Denoising
        # Inputs: fmri_prep, fmri_prep_aroma, conf_prep, pipeline, entity, tr_dict
        self.denoise = Node(Denoise(high_pass=high_pass,
                                    low_pass=low_pass,
                                    tr_dict=tr_dic,
                                    output_dir=temps.mkdtemp('denoise')),
                            name="Denoiser",
                            mem_gb=12)
        # Outputs: fmri_denoised

        # 5) --- Connectivity estimation

        # Inputs: fmri_denoised
        self.connectivity = Node(
            Connectivity(output_dir=temps.mkdtemp('connectivity')),
            name='ConnCalc')
        # Outputs: conn_mat, carpet_plot

        # 6) --- Group confounds

        # Inputs: conf_summary, pipeline_name

        self.group_conf_summary = JoinNode(
            GroupConfounds(output_dir=temps.mkdtemp('group_conf_summary'), ),
            joinfield=["conf_summary_json_files"],
            joinsource=self.subjectselector,
            name="GroupConf")

        # Outputs: group_conf_summary

        # 7) --- Group connectivity

        # Inputs: corr_mat, pipeline_name

        self.group_connectivity = JoinNode(GroupConnectivity(
            output_dir=temps.mkdtemp('group_connectivity'), ),
                                           joinfield=["corr_mat"],
                                           joinsource=self.subjectselector,
                                           name="GroupConn")

        # Outputs: group_corr_mat

        # 8) --- Quality measures

        # Inputs: group_corr_mat, group_conf_summary, pipeline_name

        self.quality_measures = Node(QualityMeasures(
            output_dir=temps.mkdtemp('quality_measures'),
            distance_matrix=get_distance_matrix_file_path()),
                                     name="QualityMeasures")
        # Outputs: fc_fd_summary, edges_weight, edges_weight_clean
        self.quality_measures_join = create_flatten_identity_join_node(
            name='JoinQualityMeasuresOverPipeline',
            joinsource=self.pipelineselector,
            fields=[
                'excluded_subjects', 'warnings', 'corr_matrix_plot',
                'corr_matrix_no_high_motion_plot'
            ],
            flatten_fields=['warnings'])
        # 10) --- Quality measures across pipelines

        # Inputs: fc_fd_summary, edges_weight
        self.pipelines_join = JoinNode(IdentityInterface(fields=['pipelines']),
                                       name='JoinPipelines',
                                       joinsource=self.pipelineselector,
                                       joinfield=['pipelines'])
        self.pipelines_quality_measures = JoinNode(
            PipelinesQualityMeasures(
                output_dir=temps.mkdtemp('pipelines_quality_measures'),
                # TODO: Replace with datasinks for needed output
            ),
            joinsource=self.pipelineselector,
            joinfield=[
                'fc_fd_summary', 'edges_weight', 'edges_weight_clean',
                'fc_fd_corr_values', 'fc_fd_corr_values_clean'
            ],
            name="PipelinesQualityMeasures")
        self.pipeline_quality_measures_join_tasks = create_flatten_identity_join_node(
            name="JoinPipelinesQualityMeasuresOverTasks",
            joinsource=self.taskselector,
            fields=[
                'warnings', 'excluded_subjects',
                'plot_pipelines_edges_density',
                'plot_pipelines_edges_density_no_high_motion',
                'plot_pipelines_fc_fd_pearson',
                'plot_pipelines_fc_fd_pearson_no_high_motion',
                'plot_pipelines_fc_fd_uncorr',
                'plot_pipelines_distance_dependence',
                'plot_pipelines_distance_dependence_no_high_motion',
                'plot_pipelines_tdof_loss', 'corr_matrix_plot',
                'corr_matrix_no_high_motion_plot'
            ],
            flatten_fields=[
                'warnings', 'excluded_subjects', 'corr_matrix_plot',
                'corr_matrix_no_high_motion_plot'
            ])
        # Outputs: pipelines_fc_fd_summary, pipelines_edges_weight
        # 11) --- Report from data
        report_dir = os.path.join(bids_dir, 'derivatives', 'fmridenoise',
                                  'report')
        os.makedirs(report_dir, exist_ok=True)
        self.report_creator = Node(ReportCreator(runtime_info=RuntimeInfo(
            input_args=str(reduce(lambda x, y: f"{x} {y}", sys.argv)),
            version=get_versions().get('version')),
                                                 output_dir=report_dir),
                                   name='ReportCreator')
        self.report_creator.inputs.tasks = tasks
        # 12) --- Save derivatives
        base_entities = {'bids_dir': bids_dir, 'derivative': 'fmridenoise'}
        self.ds_confounds = Node(BIDSDataSink(base_entities=base_entities),
                                 name="ds_confounds")
        self.ds_denoise = Node(BIDSDataSink(base_entities=base_entities),
                               name="ds_denoise")
        self.ds_connectivity_corr_mat = Node(
            BIDSDataSink(base_entities=base_entities), name="ds_connectivity")
        self.ds_connectivity_carpet_plot = Node(
            BIDSDataSink(base_entities=base_entities), name="ds_carpet_plot")
        self.ds_connectivity_matrix_plot = Node(
            BIDSDataSink(base_entities=base_entities), name="ds_matrix_plot")
        self.ds_group_conf_summary = Node(
            BIDSDataSink(base_entities=base_entities),
            name="ds_group_conf_summary")
        self.ds_group_connectivity = Node(
            BIDSDataSink(base_entities=base_entities),
            name="ds_group_connectivity")
        self.ds_qm_motion_plot = Node(
            BIDSDataSink(base_entities=base_entities),
            name="ds_quality_measures_motion_plot")
        self.ds_qm_corr_matrix_plot_no_high = Node(
            BIDSDataSink(base_entities=base_entities),
            name="ds_quality_measures_corr_matrix_plot_no_high")
        self.ds_qm_corr_matrix_plot = Node(
            BIDSDataSink(base_entities=base_entities),
            name="ds_quality_measures_corr_matrix_plot")
        self.ds_pqm_fc_fd_summary = Node(
            BIDSDataSink(base_entities=base_entities),
            name="ds_pipeline_qm_fc_fd_summery")
        self.ds_pqm_edges_weight = Node(
            BIDSDataSink(base_entities=base_entities),
            name='ds_pipeline_qm_edges_weight')
        self.ds_pqm_edges_weight_clean = Node(
            BIDSDataSink(base_entities=base_entities),
            name='ds_pipeline_qm_edges_weight_clean')
        self.ds_pqm_plot_edges_density = Node(
            BIDSDataSink(base_entities=base_entities),
            name='ds_pipeline_qm_plot_edges_density')
        self.ds_pqm_plot_edges_density_no_high = Node(
            BIDSDataSink(base_entities=base_entities),
            name='ds_pipeline_qm_plot_edges_density_no_high')
        self.ds_pqm_plot_fc_fd = Node(
            BIDSDataSink(base_entities=base_entities),
            name='ds_pipeline_qm_plot_fc_fd')
        self.ds_pqm_plot_fc_fd_no_high = Node(
            BIDSDataSink(base_entities=base_entities),
            name='ds_pipeline_qm_plot_fc_fd_no_high')
        self.ds_pqm_plot_fc_fd_uncorr = Node(
            BIDSDataSink(base_entities=base_entities),
            name='ds_pipeline_qm_plot_fc_fd_uncorr')
        self.ds_pqm_plot_distance_dependence = Node(
            BIDSDataSink(base_entities=base_entities),
            name='ds_pipeline_qm_plot_distance_dependence')
        self.ds_pqm_plot_distance_dependence_no_high = Node(
            BIDSDataSink(base_entities=base_entities),
            name='ds_pipeline_qm_plot_distance_dependence_no_high')
        self.ds_pqm_plot_tdof_loss = Node(
            BIDSDataSink(base_entities=base_entities),
            name='ds_pipeline_qm_plot_tdof_loss')

        self.connections = [
            # bidsgrabber
            (self.subjectselector, self.bidsgrabber, [('subject', 'subject')]),
            (self.taskselector, self.bidsgrabber, [('task', 'task')]),
            # prep_conf
            (self.pipelineselector, self.prep_conf, [('pipeline', 'pipeline')]
             ),
            (self.bidsgrabber, self.prep_conf, [('conf_raw', 'conf_raw'),
                                                ('conf_json', 'conf_json')]),
            # denoise
            (self.prep_conf, self.denoise, [('conf_prep', 'conf_prep')]),
            (self.pipelineselector, self.denoise, [('pipeline', 'pipeline')]),
            # group conf summary
            (self.prep_conf, self.group_conf_summary,
             [('conf_summary', 'conf_summary_json_files')]),
            # connectivity
            (self.denoise, self.connectivity, [('fmri_denoised',
                                                'fmri_denoised')]),
            # group connectivity
            (self.connectivity, self.group_connectivity, [("corr_mat",
                                                           "corr_mat")]),
            # quality measures
            (self.pipelineselector, self.quality_measures, [('pipeline',
                                                             'pipeline')]),
            (self.group_connectivity, self.quality_measures,
             [('group_corr_mat', 'group_corr_mat')]),
            (self.group_conf_summary, self.quality_measures,
             [('group_conf_summary', 'group_conf_summary')]),
            # quality measure join over pipelines
            (self.quality_measures, self.quality_measures_join, [
                ('excluded_subjects', 'excluded_subjects'),
                ('warnings', 'warnings'),
                ('corr_matrix_plot', 'corr_matrix_plot'),
                ('corr_matrix_no_high_motion_plot',
                 'corr_matrix_no_high_motion_plot')
            ]),
            # pipeline quality measures
            (self.quality_measures, self.pipelines_quality_measures,
             [('fc_fd_summary', 'fc_fd_summary'),
              ('edges_weight', 'edges_weight'),
              ('edges_weight_clean', 'edges_weight_clean'),
              ('fc_fd_corr_values', 'fc_fd_corr_values'),
              ('fc_fd_corr_values_clean', 'fc_fd_corr_values_clean')]),
            (self.taskselector, self.pipelines_quality_measures, [('task',
                                                                   'task')]),
            # pipelines_join
            (self.pipelineselector, self.pipelines_join, [('pipeline',
                                                           'pipelines')]),
            # pipeline_quality_measures_join
            (self.pipelines_quality_measures,
             self.pipeline_quality_measures_join_tasks, [
                 ('pipelines_fc_fd_summary', 'pipelines_fc_fd_summary'),
                 ('plot_pipelines_edges_density',
                  'plot_pipelines_edges_density'),
                 ('plot_pipelines_edges_density_no_high_motion',
                  'plot_pipelines_edges_density_no_high_motion'),
                 ('plot_pipelines_fc_fd_pearson',
                  'plot_pipelines_fc_fd_pearson'),
                 ('plot_pipelines_fc_fd_pearson_no_high_motion',
                  'plot_pipelines_fc_fd_pearson_no_high_motion'),
                 ('plot_pipelines_fc_fd_uncorr',
                  'plot_pipelines_fc_fd_uncorr'),
                 ('plot_pipelines_distance_dependence',
                  'plot_pipelines_distance_dependence'),
                 ('plot_pipelines_distance_dependence_no_high_motion',
                  'plot_pipelines_distance_dependence_no_high_motion'),
                 ('plot_pipelines_tdof_loss', 'plot_pipelines_tdof_loss'),
             ]),
            (self.quality_measures_join,
             self.pipeline_quality_measures_join_tasks, [
                 ('excluded_subjects', 'excluded_subjects'),
                 ('warnings', 'warnings'),
                 ('corr_matrix_plot', 'corr_matrix_plot'),
                 ('corr_matrix_no_high_motion_plot',
                  'corr_matrix_no_high_motion_plot')
             ]),
            # report creator
            (self.pipelines_join, self.report_creator, [('pipelines',
                                                         'pipelines')]),
            # all datasinks
            # # ds_denoise
            (self.denoise, self.ds_denoise, [("fmri_denoised", "in_file")]),
            # # ds_connectivity
            (self.connectivity, self.ds_connectivity_corr_mat, [("corr_mat",
                                                                 "in_file")]),
            (self.connectivity, self.ds_connectivity_matrix_plot,
             [("matrix_plot", "in_file")]),
            (self.connectivity, self.ds_connectivity_carpet_plot,
             [("carpet_plot", "in_file")]),
            # # ds_confounds
            (self.prep_conf, self.ds_confounds, [("conf_prep", "in_file")]),
            # # ds_group_conf
            (self.group_conf_summary, self.ds_group_conf_summary,
             [('group_conf_summary', 'in_file')]),
            # # ds_group_connectivity
            (self.group_connectivity, self.ds_group_connectivity,
             [('group_corr_mat', 'in_file')]),
            # # ds_quality_measures
            (self.quality_measures, self.ds_qm_motion_plot, [('motion_plot',
                                                              'in_file')]),
            (self.quality_measures, self.ds_qm_corr_matrix_plot,
             [('corr_matrix_plot', 'in_file')]),
            (self.quality_measures, self.ds_qm_corr_matrix_plot_no_high,
             [('corr_matrix_no_high_motion_plot', 'in_file')]),
            # # ds_pipelines_quality_measures
            (self.pipelines_quality_measures, self.ds_pqm_fc_fd_summary,
             [('pipelines_fc_fd_summary', 'in_file')]),
            (self.pipelines_quality_measures, self.ds_pqm_edges_weight,
             [('pipelines_edges_weight', 'in_file')]),
            (self.pipelines_quality_measures, self.ds_pqm_edges_weight_clean,
             [('pipelines_edges_weight_clean', 'in_file')]),
            (self.pipelines_quality_measures, self.ds_pqm_plot_edges_density,
             [('plot_pipelines_edges_density', 'in_file')]),
            (self.pipelines_quality_measures,
             self.ds_pqm_plot_edges_density_no_high, [
                 ('plot_pipelines_edges_density_no_high_motion', 'in_file')
             ]),
            (self.pipelines_quality_measures, self.ds_pqm_plot_fc_fd,
             [('plot_pipelines_fc_fd_pearson', 'in_file')]),
            (self.pipelines_quality_measures, self.ds_pqm_plot_fc_fd_no_high,
             [('plot_pipelines_fc_fd_pearson_no_high_motion', 'in_file')]),
            (self.pipelines_quality_measures, self.ds_pqm_plot_fc_fd_uncorr,
             [('plot_pipelines_fc_fd_uncorr', 'in_file')]),
            (self.pipelines_quality_measures,
             self.ds_pqm_plot_distance_dependence, [
                 ('plot_pipelines_distance_dependence', 'in_file')
             ]),
            (self.pipelines_quality_measures,
             self.ds_pqm_plot_distance_dependence_no_high, [
                 ('plot_pipelines_distance_dependence_no_high_motion',
                  'in_file')
             ]),
            (self.pipelines_quality_measures, self.ds_pqm_plot_tdof_loss,
             [('plot_pipelines_tdof_loss', 'in_file')])
        ]
        self.last_join = self.pipeline_quality_measures_join_tasks