def qap_functional_temporal_workflow(workflow, resource_pool, config):

    # resource pool should have:
    #     functional_brain_mask
    #     func_motion_correct
    #     coordinate_transformation

    import os
    import sys

    import nipype.interfaces.io as nio
    import nipype.pipeline.engine as pe

    import nipype.interfaces.utility as util

    from qap_workflows_utils import qap_functional_temporal, write_to_csv

    """
    if "mean_functional" not in resource_pool.keys():

        from functional_preproc import mean_functional_workflow

        workflow, resource_pool = \
            mean_functional_workflow(workflow, resource_pool, config)
    """

    if "functional_brain_mask" not in resource_pool.keys():

        from functional_preproc import functional_brain_mask_workflow

        workflow, resource_pool = functional_brain_mask_workflow(workflow, resource_pool, config)

    if ("func_motion_correct" not in resource_pool.keys()) or (
        "coordinate_transformation" not in resource_pool.keys() and "mcflirt_rel_rms" not in resource_pool.keys()
    ):

        from functional_preproc import func_motion_correct_workflow

        workflow, resource_pool = func_motion_correct_workflow(workflow, resource_pool, config)

    temporal = pe.Node(
        util.Function(
            input_names=[
                "func_motion_correct",
                "func_brain_mask",
                "coord_xfm_matrix",
                "subject_id",
                "session_id",
                "scan_id",
                "site_name",
            ],
            output_names=["qc"],
            function=qap_functional_temporal,
        ),
        name="qap_functional_temporal",
    )

    temporal_to_csv = pe.Node(
        util.Function(input_names=["sub_qap_dict"], output_names=["outfile"], function=write_to_csv),
        name="qap_functional_temporal_to_csv",
    )

    if len(resource_pool["func_motion_correct"]) == 2:
        node, out_file = resource_pool["func_motion_correct"]
        workflow.connect(node, out_file, temporal, "func_motion_correct")
    else:
        temporal.inputs.func_motion_correct = resource_pool["func_motion_correct"]

    if len(resource_pool["functional_brain_mask"]) == 2:
        node, out_file = resource_pool["functional_brain_mask"]
        workflow.connect(node, out_file, temporal, "func_brain_mask")
    else:
        temporal.inputs.func_brain_mask = resource_pool["functional_brain_mask"]

    if "mcflirt_rel_rms" in resource_pool.keys():

        temporal.inputs.coord_xfm_matrix = resource_pool["mcflirt_rel_rms"]

    else:

        if len(resource_pool["coordinate_transformation"]) == 2:
            node, out_file = resource_pool["coordinate_transformation"]
            workflow.connect(node, out_file, temporal, "coord_xfm_matrix")
        else:
            temporal.inputs.coord_xfm_matrix = resource_pool["coordinate_transformation"]

    # Subject infos

    temporal.inputs.subject_id = config["subject_id"]
    temporal.inputs.session_id = config["session_id"]
    temporal.inputs.scan_id = config["scan_id"]

    if "site_name" in config.keys():
        temporal.inputs.site_name = config["site_name"]

    workflow.connect(temporal, "qc", temporal_to_csv, "sub_qap_dict")

    resource_pool["qap_functional_temporal"] = (temporal_to_csv, "outfile")

    return workflow, resource_pool
def qap_functional_temporal_workflow(workflow, resource_pool, config):

    # resource pool should have:
    #     functional_brain_mask
    #     func_motion_correct
    #     coordinate_transformation

    import os
    import sys
    import nipype.interfaces.io as nio
    import nipype.pipeline.engine as pe
    import nipype.interfaces.utility as niu
    import nipype.algorithms.misc as nam

    from qap_workflows_utils import qap_functional_temporal
    from temporal_qc import fd_jenkinson
    from qap.viz.interfaces import PlotMosaic, PlotFD

    def _getfirst(inlist):
        if isinstance(inlist, list):
            return inlist[0]

        return inlist

    # if 'mean_functional' not in resource_pool.keys():
    #     from functional_preproc import mean_functional_workflow
    #     workflow, resource_pool = \
    #         mean_functional_workflow(workflow, resource_pool, config)

    if "functional_brain_mask" not in resource_pool.keys():
        from functional_preproc import functional_brain_mask_workflow

        workflow, resource_pool = functional_brain_mask_workflow(workflow, resource_pool, config)

    if ("func_motion_correct" not in resource_pool.keys()) or (
        "coordinate_transformation" not in resource_pool.keys() and "mcflirt_rel_rms" not in resource_pool.keys()
    ):
        from functional_preproc import func_motion_correct_workflow

        workflow, resource_pool = func_motion_correct_workflow(workflow, resource_pool, config)

    fd = pe.Node(
        niu.Function(input_names=["in_file"], output_names=["out_file"], function=fd_jenkinson), name="generate_FD_file"
    )

    if "mcflirt_rel_rms" in resource_pool.keys():
        fd.inputs.in_file = resource_pool["mcflirt_rel_rms"]
    else:
        if len(resource_pool["coordinate_transformation"]) == 2:
            node, out_file = resource_pool["coordinate_transformation"]
            workflow.connect(node, out_file, fd, "in_file")
        else:
            fd.inputs.in_file = resource_pool["coordinate_transformation"]

    temporal = pe.Node(
        niu.Function(
            input_names=[
                "func_motion_correct",
                "func_brain_mask",
                "tsnr_volume",
                "fd_file",
                "subject_id",
                "session_id",
                "scan_id",
                "site_name",
            ],
            output_names=["qc"],
            function=qap_functional_temporal,
        ),
        name="qap_functional_temporal",
    )
    temporal.inputs.subject_id = config["subject_id"]
    temporal.inputs.session_id = config["session_id"]
    temporal.inputs.scan_id = config["scan_id"]
    workflow.connect(fd, "out_file", temporal, "fd_file")

    if "site_name" in config.keys():
        temporal.inputs.site_name = config["site_name"]

    tsnr = pe.Node(nam.TSNR(), name="compute_tsnr")
    if len(resource_pool["func_motion_correct"]) == 2:
        node, out_file = resource_pool["func_motion_correct"]
        workflow.connect(node, out_file, tsnr, "in_file")
        workflow.connect(node, out_file, temporal, "func_motion_correct")
    else:
        from workflow_utils import check_input_resources

        check_input_resources(resource_pool, "func_motion_correct")
        input_file = resource_pool["func_motion_correct"]
        tsnr.inputs.in_file = input_file
        temporal.inputs.func_motion_correct = input_file

    if len(resource_pool["functional_brain_mask"]) == 2:
        node, out_file = resource_pool["functional_brain_mask"]
        workflow.connect(node, out_file, temporal, "func_brain_mask")
    else:
        temporal.inputs.func_brain_mask = resource_pool["functional_brain_mask"]

    # Write mosaic and FD plot
    if config.get("write_report", False):
        plot = pe.Node(PlotMosaic(), name="plot_mosaic")
        plot.inputs.subject = config["subject_id"]

        metadata = [config["session_id"], config["scan_id"]]
        if "site_name" in config.keys():
            metadata.append(config["site_name"])

        plot.inputs.metadata = metadata
        plot.inputs.title = "tSNR volume"
        workflow.connect(tsnr, "tsnr_file", plot, "in_file")

        # Enable this if we want masks
        # if len(resource_pool['functional_brain_mask']) == 2:
        #     node, out_file = resource_pool['functional_brain_mask']
        #     workflow.connect(node, out_file, plot, 'in_mask')
        # else:
        #     plot.inputs.in_mask = resource_pool['functional_brain_mask']
        resource_pool["qap_mosaic"] = (plot, "out_file")

        fdplot = pe.Node(PlotFD(), name="plot_fd")
        fdplot.inputs.subject = config["subject_id"]
        fdplot.inputs.metadata = metadata
        workflow.connect(fd, "out_file", fdplot, "in_file")
        resource_pool["qap_fd"] = (fdplot, "out_file")

    out_csv = op.join(config["output_directory"], "qap_functional_temporal.csv")
    temporal_to_csv = pe.Node(nam.AddCSVRow(in_file=out_csv), name="qap_functional_temporal_to_csv")

    workflow.connect(tsnr, "tsnr_file", temporal, "tsnr_volume")
    workflow.connect(temporal, "qc", temporal_to_csv, "_outputs")
    resource_pool["qap_functional_temporal"] = (temporal_to_csv, "csv_file")
    return workflow, resource_pool
def qap_functional_spatial_workflow(workflow, resource_pool, config):

    # resource pool should have:
    #     mean_functional
    #     functional_brain_mask

    import os
    import sys

    import nipype.interfaces.io as nio
    import nipype.pipeline.engine as pe

    import nipype.interfaces.utility as util

    from qap_workflows_utils import qap_functional_spatial, write_to_csv

    from workflow_utils import check_input_resources

    if "mean_functional" not in resource_pool.keys():

        from functional_preproc import mean_functional_workflow

        workflow, resource_pool = mean_functional_workflow(workflow, resource_pool, config)

    if "functional_brain_mask" not in resource_pool.keys():

        from functional_preproc import functional_brain_mask_workflow

        workflow, resource_pool = functional_brain_mask_workflow(workflow, resource_pool, config)

    spatial_epi = pe.Node(
        util.Function(
            input_names=[
                "mean_epi",
                "func_brain_mask",
                "direction",
                "subject_id",
                "session_id",
                "scan_id",
                "site_name",
            ],
            output_names=["qc"],
            function=qap_functional_spatial,
        ),
        name="qap_functional_spatial",
    )

    spatial_epi_to_csv = pe.Node(
        util.Function(input_names=["sub_qap_dict"], output_names=["outfile"], function=write_to_csv),
        name="qap_functional_spatial_to_csv",
    )

    if len(resource_pool["mean_functional"]) == 2:
        node, out_file = resource_pool["mean_functional"]
        workflow.connect(node, out_file, spatial_epi, "mean_epi")
    else:
        spatial_epi.inputs.mean_epi = resource_pool["mean_functional"]

    if len(resource_pool["functional_brain_mask"]) == 2:
        node, out_file = resource_pool["functional_brain_mask"]
        workflow.connect(node, out_file, spatial_epi, "func_brain_mask")
    else:
        spatial_epi.inputs.func_brain_mask = resource_pool["functional_brain_mask"]

    # Subject infos
    if "ghost_direction" not in config.keys():
        config["ghost_direction"] = "y"

    spatial_epi.inputs.direction = config["ghost_direction"]

    spatial_epi.inputs.subject_id = config["subject_id"]
    spatial_epi.inputs.session_id = config["session_id"]
    spatial_epi.inputs.scan_id = config["scan_id"]

    if "site_name" in config.keys():
        spatial_epi.inputs.site_name = config["site_name"]

    workflow.connect(spatial_epi, "qc", spatial_epi_to_csv, "sub_qap_dict")

    resource_pool["qap_functional_spatial"] = (spatial_epi_to_csv, "outfile")

    return workflow, resource_pool
def qap_functional_spatial_workflow(workflow, resource_pool, config):

    # resource pool should have:
    #     mean_functional
    #     functional_brain_mask

    import os
    import sys

    import nipype.interfaces.io as nio
    import nipype.pipeline.engine as pe

    import nipype.algorithms.misc as nam
    import nipype.interfaces.utility as niu
    import nipype.algorithms.misc as nam

    from qap_workflows_utils import qap_functional_spatial
    from qap.viz.interfaces import PlotMosaic

    from workflow_utils import check_input_resources

    if "mean_functional" not in resource_pool.keys():
        from functional_preproc import mean_functional_workflow

        workflow, resource_pool = mean_functional_workflow(workflow, resource_pool, config)

    if "functional_brain_mask" not in resource_pool.keys():
        from functional_preproc import functional_brain_mask_workflow

        workflow, resource_pool = functional_brain_mask_workflow(workflow, resource_pool, config)

    spatial_epi = pe.Node(
        niu.Function(
            input_names=[
                "mean_epi",
                "func_brain_mask",
                "direction",
                "subject_id",
                "session_id",
                "scan_id",
                "site_name",
            ],
            output_names=["qc"],
            function=qap_functional_spatial,
        ),
        name="qap_functional_spatial",
    )

    # Subject infos
    if "ghost_direction" not in config.keys():
        config["ghost_direction"] = "y"

    spatial_epi.inputs.direction = config["ghost_direction"]
    spatial_epi.inputs.subject_id = config["subject_id"]
    spatial_epi.inputs.session_id = config["session_id"]
    spatial_epi.inputs.scan_id = config["scan_id"]

    if "site_name" in config.keys():
        spatial_epi.inputs.site_name = config["site_name"]

    if len(resource_pool["mean_functional"]) == 2:
        node, out_file = resource_pool["mean_functional"]
        workflow.connect(node, out_file, spatial_epi, "mean_epi")
    else:
        spatial_epi.inputs.mean_epi = resource_pool["mean_functional"]

    if len(resource_pool["functional_brain_mask"]) == 2:
        node, out_file = resource_pool["functional_brain_mask"]
        workflow.connect(node, out_file, spatial_epi, "func_brain_mask")
    else:
        spatial_epi.inputs.func_brain_mask = resource_pool["functional_brain_mask"]

    if config.get("write_report", False):
        plot = pe.Node(PlotMosaic(), name="plot_mosaic")
        plot.inputs.subject = config["subject_id"]

        metadata = [config["session_id"], config["scan_id"]]
        if "site_name" in config.keys():
            metadata.append(config["site_name"])

        plot.inputs.metadata = metadata
        plot.inputs.title = "Mean EPI"

        if len(resource_pool["mean_functional"]) == 2:
            node, out_file = resource_pool["mean_functional"]
            workflow.connect(node, out_file, plot, "in_file")
        else:
            plot.inputs.in_file = resource_pool["mean_functional"]

        # Enable this if we want masks
        # if len(resource_pool['functional_brain_mask']) == 2:
        #     node, out_file = resource_pool['functional_brain_mask']
        #     workflow.connect(node, out_file, plot, 'in_mask')
        # else:
        #     plot.inputs.in_mask = resource_pool['functional_brain_mask']
        resource_pool["qap_mosaic"] = (plot, "out_file")

    out_csv = op.join(config["output_directory"], "qap_functional_spatial.csv")
    spatial_epi_to_csv = pe.Node(nam.AddCSVRow(in_file=out_csv), name="qap_functional_spatial_to_csv")
    workflow.connect(spatial_epi, "qc", spatial_epi_to_csv, "_outputs")
    resource_pool["qap_functional_spatial"] = (spatial_epi_to_csv, "csv_file")

    return workflow, resource_pool
Exemple #5
0
def qap_functional_temporal_workflow(workflow, resource_pool, config):

    # resource pool should have:
    #     functional_brain_mask
    #     func_motion_correct
    #     coordinate_transformation

    import os
    import sys
    import nipype.interfaces.io as nio
    import nipype.pipeline.engine as pe
    import nipype.interfaces.utility as niu
    import nipype.algorithms.misc as nam

    from qap_workflows_utils import qap_functional_temporal
    from temporal_qc import fd_jenkinson
    from qap.viz.interfaces import PlotMosaic, PlotFD

    def _getfirst(inlist):
        if isinstance(inlist, list):
            return inlist[0]

        return inlist

    # if 'mean_functional' not in resource_pool.keys():
    #     from functional_preproc import mean_functional_workflow
    #     workflow, resource_pool = \
    #         mean_functional_workflow(workflow, resource_pool, config)

    if 'functional_brain_mask' not in resource_pool.keys():
        from functional_preproc import functional_brain_mask_workflow
        workflow, resource_pool = \
            functional_brain_mask_workflow(workflow, resource_pool, config)

    if ('func_motion_correct' not in resource_pool.keys()) or \
        ('coordinate_transformation' not in resource_pool.keys() and
            'mcflirt_rel_rms' not in resource_pool.keys()):
        from functional_preproc import func_motion_correct_workflow
        workflow, resource_pool = \
            func_motion_correct_workflow(workflow, resource_pool, config)

    fd = pe.Node(niu.Function(input_names=['in_file'],
                              output_names=['out_file'],
                              function=fd_jenkinson),
                 name='generate_FD_file')

    if 'mcflirt_rel_rms' in resource_pool.keys():
        fd.inputs.in_file = resource_pool['mcflirt_rel_rms']
    else:
        if len(resource_pool['coordinate_transformation']) == 2:
            node, out_file = resource_pool['coordinate_transformation']
            workflow.connect(node, out_file, fd, 'in_file')
        else:
            fd.inputs.in_file = resource_pool['coordinate_transformation']

    temporal = pe.Node(niu.Function(input_names=[
        'func_motion_correct', 'func_brain_mask', 'tsnr_volume', 'fd_file',
        'subject_id', 'session_id', 'scan_id', 'site_name'
    ],
                                    output_names=['qc'],
                                    function=qap_functional_temporal),
                       name='qap_functional_temporal')
    temporal.inputs.subject_id = config['subject_id']
    temporal.inputs.session_id = config['session_id']
    temporal.inputs.scan_id = config['scan_id']
    workflow.connect(fd, 'out_file', temporal, 'fd_file')

    if 'site_name' in config.keys():
        temporal.inputs.site_name = config['site_name']

    tsnr = pe.Node(nam.TSNR(), name='compute_tsnr')
    if len(resource_pool['func_motion_correct']) == 2:
        node, out_file = resource_pool['func_motion_correct']
        workflow.connect(node, out_file, tsnr, 'in_file')
        workflow.connect(node, out_file, temporal, 'func_motion_correct')
    else:
        from workflow_utils import check_input_resources
        check_input_resources(resource_pool, 'func_motion_correct')
        input_file = resource_pool['func_motion_correct']
        tsnr.inputs.in_file = input_file
        temporal.inputs.func_motion_correct = input_file

    if len(resource_pool['functional_brain_mask']) == 2:
        node, out_file = resource_pool['functional_brain_mask']
        workflow.connect(node, out_file, temporal, 'func_brain_mask')
    else:
        temporal.inputs.func_brain_mask = \
            resource_pool['functional_brain_mask']

    # Write mosaic and FD plot
    if config.get('write_report', False):
        plot = pe.Node(PlotMosaic(), name='plot_mosaic')
        plot.inputs.subject = config['subject_id']

        metadata = [config['session_id'], config['scan_id']]
        if 'site_name' in config.keys():
            metadata.append(config['site_name'])

        plot.inputs.metadata = metadata
        plot.inputs.title = 'tSNR volume'
        workflow.connect(tsnr, 'tsnr_file', plot, 'in_file')

        # Enable this if we want masks
        # if len(resource_pool['functional_brain_mask']) == 2:
        #     node, out_file = resource_pool['functional_brain_mask']
        #     workflow.connect(node, out_file, plot, 'in_mask')
        # else:
        #     plot.inputs.in_mask = resource_pool['functional_brain_mask']
        resource_pool['qap_mosaic'] = (plot, 'out_file')

        fdplot = pe.Node(PlotFD(), name='plot_fd')
        fdplot.inputs.subject = config['subject_id']
        fdplot.inputs.metadata = metadata
        workflow.connect(fd, 'out_file', fdplot, 'in_file')
        resource_pool['qap_fd'] = (fdplot, 'out_file')

    out_csv = op.join(config['output_directory'],
                      'qap_functional_temporal.csv')
    temporal_to_csv = pe.Node(nam.AddCSVRow(in_file=out_csv),
                              name='qap_functional_temporal_to_csv')

    workflow.connect(tsnr, 'tsnr_file', temporal, 'tsnr_volume')
    workflow.connect(temporal, 'qc', temporal_to_csv, '_outputs')
    resource_pool['qap_functional_temporal'] = (temporal_to_csv, 'csv_file')
    return workflow, resource_pool
Exemple #6
0
def qap_functional_spatial_workflow(workflow, resource_pool, config):

    # resource pool should have:
    #     mean_functional
    #     functional_brain_mask

    import os
    import sys

    import nipype.interfaces.io as nio
    import nipype.pipeline.engine as pe

    import nipype.algorithms.misc as nam
    import nipype.interfaces.utility as niu
    import nipype.algorithms.misc as nam

    from qap_workflows_utils import qap_functional_spatial
    from qap.viz.interfaces import PlotMosaic

    from workflow_utils import check_input_resources

    if 'mean_functional' not in resource_pool.keys():
        from functional_preproc import mean_functional_workflow
        workflow, resource_pool = \
            mean_functional_workflow(workflow, resource_pool, config)

    if 'functional_brain_mask' not in resource_pool.keys():
        from functional_preproc import functional_brain_mask_workflow
        workflow, resource_pool = \
            functional_brain_mask_workflow(workflow, resource_pool, config)

    spatial_epi = pe.Node(niu.Function(input_names=[
        'mean_epi', 'func_brain_mask', 'direction', 'subject_id', 'session_id',
        'scan_id', 'site_name'
    ],
                                       output_names=['qc'],
                                       function=qap_functional_spatial),
                          name='qap_functional_spatial')

    # Subject infos
    if 'ghost_direction' not in config.keys():
        config['ghost_direction'] = 'y'

    spatial_epi.inputs.direction = config['ghost_direction']
    spatial_epi.inputs.subject_id = config['subject_id']
    spatial_epi.inputs.session_id = config['session_id']
    spatial_epi.inputs.scan_id = config['scan_id']

    if 'site_name' in config.keys():
        spatial_epi.inputs.site_name = config['site_name']

    if len(resource_pool['mean_functional']) == 2:
        node, out_file = resource_pool['mean_functional']
        workflow.connect(node, out_file, spatial_epi, 'mean_epi')
    else:
        spatial_epi.inputs.mean_epi = resource_pool['mean_functional']

    if len(resource_pool['functional_brain_mask']) == 2:
        node, out_file = resource_pool['functional_brain_mask']
        workflow.connect(node, out_file, spatial_epi, 'func_brain_mask')
    else:
        spatial_epi.inputs.func_brain_mask = \
            resource_pool['functional_brain_mask']

    if config.get('write_report', False):
        plot = pe.Node(PlotMosaic(), name='plot_mosaic')
        plot.inputs.subject = config['subject_id']

        metadata = [config['session_id'], config['scan_id']]
        if 'site_name' in config.keys():
            metadata.append(config['site_name'])

        plot.inputs.metadata = metadata
        plot.inputs.title = 'Mean EPI'

        if len(resource_pool['mean_functional']) == 2:
            node, out_file = resource_pool['mean_functional']
            workflow.connect(node, out_file, plot, 'in_file')
        else:
            plot.inputs.in_file = resource_pool['mean_functional']

        # Enable this if we want masks
        # if len(resource_pool['functional_brain_mask']) == 2:
        #     node, out_file = resource_pool['functional_brain_mask']
        #     workflow.connect(node, out_file, plot, 'in_mask')
        # else:
        #     plot.inputs.in_mask = resource_pool['functional_brain_mask']
        resource_pool['qap_mosaic'] = (plot, 'out_file')

    out_csv = op.join(config['output_directory'], 'qap_functional_spatial.csv')
    spatial_epi_to_csv = pe.Node(nam.AddCSVRow(in_file=out_csv),
                                 name='qap_functional_spatial_to_csv')
    workflow.connect(spatial_epi, 'qc', spatial_epi_to_csv, '_outputs')
    resource_pool['qap_functional_spatial'] = (spatial_epi_to_csv, 'csv_file')

    return workflow, resource_pool
Exemple #7
0
def invert_functional_brain_mask_workflow(workflow, resource_pool, config,
    name="_"):
    """Build and run a Nipype workflow to generate a background mask of a
    functional scan (the inversion of the functional brain mask) using AFNI's
    3dCalc.

    - If any resources/outputs required by this workflow are not in the
      resource pool, this workflow will call pre-requisite workflow builder
      functions to further populate the pipeline with workflows which will
      calculate/generate these necessary pre-requisites.

    Expected Resources in Resource Pool
      - functional_brain_mask: The binary brain mask of the functional time
                               series.

    New Resources Added to Resource Pool
      - inverted_functional_brain_mask: The inversion of the functional brain
                                        mask, a binary brain mask of the
                                        background of the functional time
                                        series.

    Workflow Steps:
      1. AFNI's 3dcalc to invert the functional brain mask

    :type workflow: Nipype workflow object
    :param workflow: A Nipype workflow object which can already contain other
                     connected nodes; this function will insert the following
                     workflow into this one provided.
    :type resource_pool: dict
    :param resource_pool: A dictionary defining input files and pointers to
                          Nipype node outputs / workflow connections; the keys
                          are the resource names.
    :type config: dict
    :param config: A dictionary defining the configuration settings for the
                   workflow, such as directory paths or toggled options.
    :type name: str
    :param name: (default: "_") A string to append to the end of each node
                 name.
    :rtype: Nipype workflow object
    :return: The Nipype workflow originally provided, but with this function's
              sub-workflow connected into it.
    :rtype: dict
    :return: The resource pool originally provided, but updated (if
             applicable) with the newest outputs and connections.
    """

    import copy
    import nipype.pipeline.engine as pe
    from nipype.interfaces.afni import preprocess

    if "functional_brain_mask" not in resource_pool.keys():

        from functional_preproc import functional_brain_mask_workflow
        old_rp = copy.copy(resource_pool)
        workflow, resource_pool = \
            functional_brain_mask_workflow(workflow, resource_pool, config, name)
        if resource_pool == old_rp:
            return workflow, resource_pool
  
    # 3dcalc to invert the binary functional brain mask
    invert_mask = pe.Node(interface=preprocess.Calc(), 
                          name='invert_mask%s' % name)

    invert_mask.inputs.expr = "iszero(a)"
    invert_mask.inputs.outputtype = "NIFTI_GZ"

    # functional_brain_mask -> 3dcalc        
    if len(resource_pool["functional_brain_mask"]) == 2:
        node, out_file = resource_pool["functional_brain_mask"]
        workflow.connect(node, out_file, invert_mask, 'in_file_a')
    else:
        invert_mask.inputs.in_file_a = resource_pool["functional_brain_mask"]

    resource_pool["inverted_functional_brain_mask"] = (invert_mask, 'out_file')

    return workflow, resource_pool
Exemple #8
0
def run_functional_brain_mask(func_reorient, out_dir=None, run=True):
    """Run the 'functional_brain_mask_workflow' function to execute the 
    modular workflow with the provided inputs.

    :type func_reorient: str
    :param func_reorient: Filepath to the deobliqued, reoriented functional
                          timeseries.
    :type out_dir: str
    :param out_dir: (default: None) The output directory to write the results
                    to; if left as None, will write to the current directory.
    :type run: bool
    :param run: (default: True) Will run the workflow; if set to False, will
                connect the Nipype workflow and return the workflow object
                instead.
    :rtype: str
    :return: (if run=True) The filepath of the generated anatomical_reorient
             file.
    :rtype: Nipype workflow object
    :return: (if run=False) The connected Nipype workflow object.
    :rtype: str
    :return: (if run=False) The base directory of the workflow if it were to
             be run.
    """

    import os
    import glob

    import nipype.interfaces.io as nio
    import nipype.pipeline.engine as pe

    output = "functional_brain_mask"

    workflow = pe.Workflow(name='%s_workflow' % output)

    if not out_dir:
        out_dir = os.getcwd()

    workflow_dir = os.path.join(out_dir, "workflow_output", output)

    workflow.base_dir = workflow_dir

    resource_pool = {}
    config = {}
    num_cores_per_subject = 1

    resource_pool["func_reorient"] = func_reorient
    
    workflow, resource_pool = \
            functional_brain_mask_workflow(workflow, resource_pool, config)

    ds = pe.Node(nio.DataSink(), name='datasink_%s' % output)
    ds.inputs.base_directory = workflow_dir
    
    node, out_file = resource_pool[output]

    workflow.connect(node, out_file, ds, output)

    if run:
        workflow.run(plugin='MultiProc', plugin_args= \
                         {'n_procs': num_cores_per_subject})
        outpath = glob.glob(os.path.join(workflow_dir, "functional_brain" \
                                "_mask", "*"))[0]
        return outpath      
    else:
        return workflow, workflow.base_dir