def anatomical_reorient_workflow(workflow, resource_pool, config):

    # resource pool should have:
    #     anatomical_scan

    import os
    import sys

    import nipype.interfaces.io as nio
    import nipype.pipeline.engine as pe

    import nipype.interfaces.utility as util
    import nipype.interfaces.fsl.maths as fsl

    from nipype.interfaces.afni import preprocess

    from workflow_utils import check_input_resources

    check_input_resources(resource_pool, "anatomical_scan")

    anat_deoblique = pe.Node(interface=preprocess.Refit(), name="anat_deoblique")

    anat_deoblique.inputs.in_file = resource_pool["anatomical_scan"]
    anat_deoblique.inputs.deoblique = True

    anat_reorient = pe.Node(interface=preprocess.Resample(), name="anat_reorient")

    anat_reorient.inputs.orientation = "RPI"
    anat_reorient.inputs.outputtype = "NIFTI_GZ"

    workflow.connect(anat_deoblique, "out_file", anat_reorient, "in_file")

    resource_pool["anatomical_reorient"] = (anat_reorient, "out_file")

    return workflow, resource_pool
Пример #2
0
def anatomical_reorient_workflow(workflow, resource_pool, config):

    # resource pool should have:
    #     anatomical_scan

    import os
    import sys

    import nipype.interfaces.io as nio
    import nipype.pipeline.engine as pe

    import nipype.interfaces.utility as util
    import nipype.interfaces.fsl.maths as fsl

    from nipype.interfaces.afni import preprocess

    from workflow_utils import check_input_resources

    check_input_resources(resource_pool, "anatomical_scan")

    anat_deoblique = pe.Node(interface=preprocess.Refit(),
                             name='anat_deoblique')

    anat_deoblique.inputs.in_file = resource_pool["anatomical_scan"]
    anat_deoblique.inputs.deoblique = True

    anat_reorient = pe.Node(interface=preprocess.Resample(),
                            name='anat_reorient')

    anat_reorient.inputs.orientation = 'RPI'
    anat_reorient.inputs.outputtype = 'NIFTI_GZ'

    workflow.connect(anat_deoblique, 'out_file', anat_reorient, 'in_file')

    resource_pool["anatomical_reorient"] = (anat_reorient, 'out_file')

    return workflow, resource_pool
Пример #3
0
def func_motion_correct_workflow(workflow, resource_pool, config):

    # resource pool should have:
    #     functional_scan

    import os
    import sys

    import nipype.interfaces.io as nio
    import nipype.pipeline.engine as pe

    import nipype.interfaces.utility as util
    import nipype.interfaces.fsl.maths as fsl

    from nipype.interfaces.afni import preprocess

    from workflow_utils import check_input_resources, \
                               check_config_settings

    check_input_resources(resource_pool, "functional_scan")
    check_config_settings(config, "start_idx")
    check_config_settings(config, "stop_idx")
    check_config_settings(config, "slice_timing_correction")

    func_get_idx = pe.Node(util.Function(
        input_names=['in_files', 'stop_idx', 'start_idx'],
        output_names=['stopidx', 'startidx'],
        function=get_idx),
                           name='func_get_idx')

    func_get_idx.inputs.in_files = resource_pool["functional_scan"]
    func_get_idx.inputs.start_idx = config["start_idx"]
    func_get_idx.inputs.stop_idx = config["stop_idx"]

    func_drop_trs = pe.Node(interface=preprocess.Calc(), name='func_drop_trs')

    func_drop_trs.inputs.in_file_a = resource_pool["functional_scan"]
    func_drop_trs.inputs.expr = 'a'
    func_drop_trs.inputs.outputtype = 'NIFTI_GZ'

    workflow.connect(func_get_idx, 'startidx', func_drop_trs, 'start_idx')

    workflow.connect(func_get_idx, 'stopidx', func_drop_trs, 'stop_idx')

    #workflow.connect(func_drop_trs, 'out_file',
    #                outputNode, 'drop_tr')

    func_slice_timing_correction = pe.Node(interface=preprocess.TShift(),
                                           name='func_slice_time_correction')

    func_slice_timing_correction.inputs.outputtype = 'NIFTI_GZ'

    func_deoblique = pe.Node(interface=preprocess.Refit(),
                             name='func_deoblique')

    func_deoblique.inputs.deoblique = True

    if config["slice_timing_correction"] == True:

        workflow.connect(func_drop_trs, 'out_file',
                         func_slice_timing_correction, 'in_file')

        workflow.connect(func_slice_timing_correction, 'out_file',
                         func_deoblique, 'in_file')

    else:

        workflow.connect(func_drop_trs, 'out_file', func_deoblique, 'in_file')

    func_reorient = pe.Node(interface=preprocess.Resample(),
                            name='func_reorient')
    func_reorient.inputs.orientation = 'RPI'
    func_reorient.inputs.outputtype = 'NIFTI_GZ'

    workflow.connect(func_deoblique, 'out_file', func_reorient, 'in_file')

    func_get_mean_RPI = pe.Node(interface=preprocess.TStat(),
                                name='func_get_mean_RPI')
    func_get_mean_RPI.inputs.options = '-mean'
    func_get_mean_RPI.inputs.outputtype = 'NIFTI_GZ'

    workflow.connect(func_reorient, 'out_file', func_get_mean_RPI, 'in_file')

    # calculate motion parameters
    func_motion_correct = pe.Node(interface=preprocess.Volreg(),
                                  name='func_motion_correct')

    func_motion_correct.inputs.args = '-Fourier -twopass'
    func_motion_correct.inputs.zpad = 4
    func_motion_correct.inputs.outputtype = 'NIFTI_GZ'

    workflow.connect(func_reorient, 'out_file', func_motion_correct, 'in_file')

    workflow.connect(func_get_mean_RPI, 'out_file', func_motion_correct,
                     'basefile')

    func_get_mean_motion = func_get_mean_RPI.clone('func_get_mean_motion')

    workflow.connect(func_motion_correct, 'out_file', func_get_mean_motion,
                     'in_file')

    func_motion_correct_A = func_motion_correct.clone('func_motion_correct_A')
    func_motion_correct_A.inputs.md1d_file = 'max_displacement.1D'

    workflow.connect(func_reorient, 'out_file', func_motion_correct_A,
                     'in_file')

    workflow.connect(func_get_mean_motion, 'out_file', func_motion_correct_A,
                     'basefile')

    resource_pool["func_motion_correct"] = (func_motion_correct_A, 'out_file')
    resource_pool["coordinate_transformation"] = \
        (func_motion_correct_A, 'oned_matrix_save')

    return workflow, resource_pool
def qap_functional_temporal_workflow(workflow, resource_pool, config):

    # resource pool should have:
    #     functional_brain_mask
    #     func_motion_correct
    #     coordinate_transformation

    import os
    import sys
    import nipype.interfaces.io as nio
    import nipype.pipeline.engine as pe
    import nipype.interfaces.utility as niu
    import nipype.algorithms.misc as nam

    from qap_workflows_utils import qap_functional_temporal
    from temporal_qc import fd_jenkinson
    from qap.viz.interfaces import PlotMosaic, PlotFD

    def _getfirst(inlist):
        if isinstance(inlist, list):
            return inlist[0]

        return inlist

    # if 'mean_functional' not in resource_pool.keys():
    #     from functional_preproc import mean_functional_workflow
    #     workflow, resource_pool = \
    #         mean_functional_workflow(workflow, resource_pool, config)

    if "functional_brain_mask" not in resource_pool.keys():
        from functional_preproc import functional_brain_mask_workflow

        workflow, resource_pool = functional_brain_mask_workflow(workflow, resource_pool, config)

    if ("func_motion_correct" not in resource_pool.keys()) or (
        "coordinate_transformation" not in resource_pool.keys() and "mcflirt_rel_rms" not in resource_pool.keys()
    ):
        from functional_preproc import func_motion_correct_workflow

        workflow, resource_pool = func_motion_correct_workflow(workflow, resource_pool, config)

    fd = pe.Node(
        niu.Function(input_names=["in_file"], output_names=["out_file"], function=fd_jenkinson), name="generate_FD_file"
    )

    if "mcflirt_rel_rms" in resource_pool.keys():
        fd.inputs.in_file = resource_pool["mcflirt_rel_rms"]
    else:
        if len(resource_pool["coordinate_transformation"]) == 2:
            node, out_file = resource_pool["coordinate_transformation"]
            workflow.connect(node, out_file, fd, "in_file")
        else:
            fd.inputs.in_file = resource_pool["coordinate_transformation"]

    temporal = pe.Node(
        niu.Function(
            input_names=[
                "func_motion_correct",
                "func_brain_mask",
                "tsnr_volume",
                "fd_file",
                "subject_id",
                "session_id",
                "scan_id",
                "site_name",
            ],
            output_names=["qc"],
            function=qap_functional_temporal,
        ),
        name="qap_functional_temporal",
    )
    temporal.inputs.subject_id = config["subject_id"]
    temporal.inputs.session_id = config["session_id"]
    temporal.inputs.scan_id = config["scan_id"]
    workflow.connect(fd, "out_file", temporal, "fd_file")

    if "site_name" in config.keys():
        temporal.inputs.site_name = config["site_name"]

    tsnr = pe.Node(nam.TSNR(), name="compute_tsnr")
    if len(resource_pool["func_motion_correct"]) == 2:
        node, out_file = resource_pool["func_motion_correct"]
        workflow.connect(node, out_file, tsnr, "in_file")
        workflow.connect(node, out_file, temporal, "func_motion_correct")
    else:
        from workflow_utils import check_input_resources

        check_input_resources(resource_pool, "func_motion_correct")
        input_file = resource_pool["func_motion_correct"]
        tsnr.inputs.in_file = input_file
        temporal.inputs.func_motion_correct = input_file

    if len(resource_pool["functional_brain_mask"]) == 2:
        node, out_file = resource_pool["functional_brain_mask"]
        workflow.connect(node, out_file, temporal, "func_brain_mask")
    else:
        temporal.inputs.func_brain_mask = resource_pool["functional_brain_mask"]

    # Write mosaic and FD plot
    if config.get("write_report", False):
        plot = pe.Node(PlotMosaic(), name="plot_mosaic")
        plot.inputs.subject = config["subject_id"]

        metadata = [config["session_id"], config["scan_id"]]
        if "site_name" in config.keys():
            metadata.append(config["site_name"])

        plot.inputs.metadata = metadata
        plot.inputs.title = "tSNR volume"
        workflow.connect(tsnr, "tsnr_file", plot, "in_file")

        # Enable this if we want masks
        # if len(resource_pool['functional_brain_mask']) == 2:
        #     node, out_file = resource_pool['functional_brain_mask']
        #     workflow.connect(node, out_file, plot, 'in_mask')
        # else:
        #     plot.inputs.in_mask = resource_pool['functional_brain_mask']
        resource_pool["qap_mosaic"] = (plot, "out_file")

        fdplot = pe.Node(PlotFD(), name="plot_fd")
        fdplot.inputs.subject = config["subject_id"]
        fdplot.inputs.metadata = metadata
        workflow.connect(fd, "out_file", fdplot, "in_file")
        resource_pool["qap_fd"] = (fdplot, "out_file")

    out_csv = op.join(config["output_directory"], "qap_functional_temporal.csv")
    temporal_to_csv = pe.Node(nam.AddCSVRow(in_file=out_csv), name="qap_functional_temporal_to_csv")

    workflow.connect(tsnr, "tsnr_file", temporal, "tsnr_volume")
    workflow.connect(temporal, "qc", temporal_to_csv, "_outputs")
    resource_pool["qap_functional_temporal"] = (temporal_to_csv, "csv_file")
    return workflow, resource_pool
Пример #5
0
def qap_functional_temporal_workflow(workflow, resource_pool, config):

    # resource pool should have:
    #     functional_brain_mask
    #     func_motion_correct
    #     coordinate_transformation

    import os
    import sys
    import nipype.interfaces.io as nio
    import nipype.pipeline.engine as pe
    import nipype.interfaces.utility as niu
    import nipype.algorithms.misc as nam

    from qap_workflows_utils import qap_functional_temporal
    from temporal_qc import fd_jenkinson
    from qap.viz.interfaces import PlotMosaic, PlotFD

    def _getfirst(inlist):
        if isinstance(inlist, list):
            return inlist[0]

        return inlist

    # if 'mean_functional' not in resource_pool.keys():
    #     from functional_preproc import mean_functional_workflow
    #     workflow, resource_pool = \
    #         mean_functional_workflow(workflow, resource_pool, config)

    if 'functional_brain_mask' not in resource_pool.keys():
        from functional_preproc import functional_brain_mask_workflow
        workflow, resource_pool = \
            functional_brain_mask_workflow(workflow, resource_pool, config)

    if ('func_motion_correct' not in resource_pool.keys()) or \
        ('coordinate_transformation' not in resource_pool.keys() and
            'mcflirt_rel_rms' not in resource_pool.keys()):
        from functional_preproc import func_motion_correct_workflow
        workflow, resource_pool = \
            func_motion_correct_workflow(workflow, resource_pool, config)

    fd = pe.Node(niu.Function(input_names=['in_file'],
                              output_names=['out_file'],
                              function=fd_jenkinson),
                 name='generate_FD_file')

    if 'mcflirt_rel_rms' in resource_pool.keys():
        fd.inputs.in_file = resource_pool['mcflirt_rel_rms']
    else:
        if len(resource_pool['coordinate_transformation']) == 2:
            node, out_file = resource_pool['coordinate_transformation']
            workflow.connect(node, out_file, fd, 'in_file')
        else:
            fd.inputs.in_file = resource_pool['coordinate_transformation']

    temporal = pe.Node(niu.Function(input_names=[
        'func_motion_correct', 'func_brain_mask', 'tsnr_volume', 'fd_file',
        'subject_id', 'session_id', 'scan_id', 'site_name'
    ],
                                    output_names=['qc'],
                                    function=qap_functional_temporal),
                       name='qap_functional_temporal')
    temporal.inputs.subject_id = config['subject_id']
    temporal.inputs.session_id = config['session_id']
    temporal.inputs.scan_id = config['scan_id']
    workflow.connect(fd, 'out_file', temporal, 'fd_file')

    if 'site_name' in config.keys():
        temporal.inputs.site_name = config['site_name']

    tsnr = pe.Node(nam.TSNR(), name='compute_tsnr')
    if len(resource_pool['func_motion_correct']) == 2:
        node, out_file = resource_pool['func_motion_correct']
        workflow.connect(node, out_file, tsnr, 'in_file')
        workflow.connect(node, out_file, temporal, 'func_motion_correct')
    else:
        from workflow_utils import check_input_resources
        check_input_resources(resource_pool, 'func_motion_correct')
        input_file = resource_pool['func_motion_correct']
        tsnr.inputs.in_file = input_file
        temporal.inputs.func_motion_correct = input_file

    if len(resource_pool['functional_brain_mask']) == 2:
        node, out_file = resource_pool['functional_brain_mask']
        workflow.connect(node, out_file, temporal, 'func_brain_mask')
    else:
        temporal.inputs.func_brain_mask = \
            resource_pool['functional_brain_mask']

    # Write mosaic and FD plot
    if config.get('write_report', False):
        plot = pe.Node(PlotMosaic(), name='plot_mosaic')
        plot.inputs.subject = config['subject_id']

        metadata = [config['session_id'], config['scan_id']]
        if 'site_name' in config.keys():
            metadata.append(config['site_name'])

        plot.inputs.metadata = metadata
        plot.inputs.title = 'tSNR volume'
        workflow.connect(tsnr, 'tsnr_file', plot, 'in_file')

        # Enable this if we want masks
        # if len(resource_pool['functional_brain_mask']) == 2:
        #     node, out_file = resource_pool['functional_brain_mask']
        #     workflow.connect(node, out_file, plot, 'in_mask')
        # else:
        #     plot.inputs.in_mask = resource_pool['functional_brain_mask']
        resource_pool['qap_mosaic'] = (plot, 'out_file')

        fdplot = pe.Node(PlotFD(), name='plot_fd')
        fdplot.inputs.subject = config['subject_id']
        fdplot.inputs.metadata = metadata
        workflow.connect(fd, 'out_file', fdplot, 'in_file')
        resource_pool['qap_fd'] = (fdplot, 'out_file')

    out_csv = op.join(config['output_directory'],
                      'qap_functional_temporal.csv')
    temporal_to_csv = pe.Node(nam.AddCSVRow(in_file=out_csv),
                              name='qap_functional_temporal_to_csv')

    workflow.connect(tsnr, 'tsnr_file', temporal, 'tsnr_volume')
    workflow.connect(temporal, 'qc', temporal_to_csv, '_outputs')
    resource_pool['qap_functional_temporal'] = (temporal_to_csv, 'csv_file')
    return workflow, resource_pool
def func_motion_correct_workflow(workflow, resource_pool, config):

    # resource pool should have:
    #     functional_scan

    import os
    import sys

    import nipype.interfaces.io as nio
    import nipype.pipeline.engine as pe

    import nipype.interfaces.utility as util
    import nipype.interfaces.fsl.maths as fsl

    from nipype.interfaces.afni import preprocess

    from workflow_utils import check_input_resources, \
                               check_config_settings


    check_input_resources(resource_pool, "functional_scan")
    check_config_settings(config, "start_idx")
    check_config_settings(config, "stop_idx")
    check_config_settings(config, "slice_timing_correction")


    func_get_idx = pe.Node(util.Function(input_names=['in_files', 
                                                      'stop_idx', 
                                                      'start_idx'],
                                         output_names=['stopidx', 
                                                       'startidx'],
                                         function=get_idx),
                                         name='func_get_idx')

    func_get_idx.inputs.in_files = resource_pool["functional_scan"]
    func_get_idx.inputs.start_idx = config["start_idx"]
    func_get_idx.inputs.stop_idx = config["stop_idx"]
    
    
    func_drop_trs = pe.Node(interface=preprocess.Calc(),
                           name='func_drop_trs')

    func_drop_trs.inputs.in_file_a = resource_pool["functional_scan"]
    func_drop_trs.inputs.expr = 'a'
    func_drop_trs.inputs.outputtype = 'NIFTI_GZ'


    workflow.connect(func_get_idx, 'startidx',
                    func_drop_trs, 'start_idx')

    workflow.connect(func_get_idx, 'stopidx',
                    func_drop_trs, 'stop_idx')
    
    #workflow.connect(func_drop_trs, 'out_file',
    #                outputNode, 'drop_tr')
    
    
    func_slice_timing_correction = pe.Node(interface=preprocess.TShift(),
                                           name='func_slice_time_correction')

    func_slice_timing_correction.inputs.outputtype = 'NIFTI_GZ'
    

    func_deoblique = pe.Node(interface=preprocess.Refit(),
                            name='func_deoblique')

    func_deoblique.inputs.deoblique = True
    
    
    if config["slice_timing_correction"] == True:

        workflow.connect(func_drop_trs, 'out_file',
                        func_slice_timing_correction,'in_file')
       
        workflow.connect(func_slice_timing_correction, 'out_file',
                        func_deoblique, 'in_file')
        

    else:

        workflow.connect(func_drop_trs, 'out_file',
                        func_deoblique, 'in_file')
    


    func_reorient = pe.Node(interface=preprocess.Resample(),
                               name='func_reorient')
    func_reorient.inputs.orientation = 'RPI'
    func_reorient.inputs.outputtype = 'NIFTI_GZ'


    workflow.connect(func_deoblique, 'out_file',
                    func_reorient, 'in_file')
    

    
    func_get_mean_RPI = pe.Node(interface=preprocess.TStat(),
                            name='func_get_mean_RPI')
    func_get_mean_RPI.inputs.options = '-mean'
    func_get_mean_RPI.inputs.outputtype = 'NIFTI_GZ'

    
    workflow.connect(func_reorient, 'out_file',
                    func_get_mean_RPI, 'in_file')
        

    # calculate motion parameters
    func_motion_correct = pe.Node(interface=preprocess.Volreg(),
                             name='func_motion_correct')

    func_motion_correct.inputs.args = '-Fourier -twopass'
    func_motion_correct.inputs.zpad = 4
    func_motion_correct.inputs.outputtype = 'NIFTI_GZ'

    
    workflow.connect(func_reorient, 'out_file',
                     func_motion_correct, 'in_file')

    workflow.connect(func_get_mean_RPI, 'out_file',
                     func_motion_correct, 'basefile')


    func_get_mean_motion = func_get_mean_RPI.clone('func_get_mean_motion')

    workflow.connect(func_motion_correct, 'out_file',
                     func_get_mean_motion, 'in_file')
    
    
    
    func_motion_correct_A = func_motion_correct.clone('func_motion_correct_A')
    func_motion_correct_A.inputs.md1d_file = 'max_displacement.1D'
    
    workflow.connect(func_reorient, 'out_file',
                     func_motion_correct_A, 'in_file')

    workflow.connect(func_get_mean_motion, 'out_file',
                     func_motion_correct_A, 'basefile')


    resource_pool["func_motion_correct"] = (func_motion_correct_A, 'out_file')
    resource_pool["coordinate_transformation"] = \
        (func_motion_correct_A, 'oned_matrix_save')


    return workflow, resource_pool