def row_to_table(row_data, output_table):
    """
    Add row to table using nipype (thread-safe in multi-processor execution).

    (Requires Python module lockfile)

    Parameters
    ----------
    row_data : pandas Series
        row of data
    output_table : string
        add row to this table file

    Examples
    --------
    >>> import pandas as pd
    >>> from mhealthx.xio import row_to_table
    >>> row_data = pd.Series({'A': ['A0'], 'B': ['B0'], 'C': ['C0']})
    >>> output_table = 'test.csv'
    >>> row_to_table(row_data, output_table)
    """
    from nipype.algorithms import misc

    addrow = misc.AddCSVRow()
    addrow.inputs.in_file = output_table
    addrow.inputs.set(**row_data.to_dict())
    addrow.run()
Exemple #2
0
def map_energy(name='EnergyMapping', out_csv='energiesmapping.csv'):

    out_csv = op.abspath(out_csv)
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'reference', 'surfaces0', 'surfaces1', 'in_mask', 'subject_id'
    ]),
                        name='inputnode')
    outputnode = pe.Node(
        niu.IdentityInterface(fields=['desc_zero', 'out_diff']),
        name='outputnode')

    ref_e = pe.Node(ComputeEnergy(), name='ComputeZeroEnergy')
    diff = pe.MapNode(namesh.ComputeMeshWarp(),
                      name='ComputeError',
                      iterfield=['surface1', 'surface2'])

    getval = pe.Node(nio.JSONFileGrabber(), name='GetZeroEnergy')
    csv = pe.Node(namisc.AddCSVRow(in_file=out_csv), name="AddReferenceRow")
    csv.inputs.error = 0.0

    mapper = warp_n_map(out_csv=out_csv)
    wf = pe.Workflow(name=name)
    wf.connect([(inputnode, ref_e, [('reference', 'reference'),
                                    ('surfaces0', 'surfaces'),
                                    ('in_mask', 'in_mask')]),
                (ref_e, outputnode, [('out_file', 'desc_zero')]),
                (ref_e, getval, [('out_file', 'in_file')]),
                (inputnode, csv, [('subject_id', 'subject_id')]),
                (getval, csv, [('total', 'total')]),
                (inputnode, diff, [('surfaces0', 'surface1'),
                                   ('surfaces1', 'surface2')]),
                (diff, outputnode, [('out_warp', 'out_diff')]),
                (inputnode, mapper, [('subject_id', 'inputnode.subject_id'),
                                     ('reference', 'inputnode.reference'),
                                     ('in_mask', 'inputnode.in_mask')]),
                (diff, mapper, [('out_warp', 'inputnode.surf_warp')]),
                (ref_e, mapper, [('out_desc', 'inputnode.descriptors')])])
    return wf
Exemple #3
0
def warp_n_map(name='EnergyWarpAndMap', out_csv='energies.csv'):
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'reference', 'surf_warp', 'in_mask', 'errfactor', 'descriptors',
        'subject_id'
    ]),
                        name='inputnode')
    inputnode.iterables = ('errfactor',
                           np.linspace(-1.2, 1.2, num=100,
                                       endpoint=True).tolist())

    outputnode = pe.Node(niu.IdentityInterface(fields=['out_energy']),
                         name='outputnode')

    applyef = pe.MapNode(namesh.MeshWarpMaths(operation='mul'),
                         name='MeshMaths',
                         iterfield=['in_surf'])
    mapeneg = pe.Node(ComputeEnergy(), name='ComputeEnergy')
    getval = pe.Node(nio.JSONFileGrabber(), name='GetEnergy')

    wf = pe.Workflow(name=name)
    wf.connect([(inputnode, applyef, [('surf_warp', 'in_surf'),
                                      ('errfactor', 'operator')]),
                (applyef, mapeneg, [('out_file', 'surfaces')]),
                (inputnode, mapeneg, [('reference', 'reference'),
                                      ('in_mask', 'in_mask'),
                                      ('descriptors', 'descriptors')]),
                (mapeneg, getval, [('out_file', 'in_file')]),
                (mapeneg, outputnode, [('out_file', 'out_energy')])])

    csv = pe.Node(namisc.AddCSVRow(in_file=out_csv), name="AddRow")

    wf.connect([(getval, csv, [('total', 'total')]),
                (inputnode, csv, [('errfactor', 'error'),
                                  ('subject_id', 'subject_id')])])

    return wf
Exemple #4
0
def registration_ev(name='EvaluateMapping'):
    """
    Workflow that provides different scores comparing two registration methods.
    It compares images similarity, displacement fields difference,
    mesh distances, and overlap indices.
    """
    def _stats(in_file):
        import numpy as np
        import nibabel as nb
        data = nb.load(in_file).get_data()
        if np.all(data < 1.0e-5):
            return [0.0] * 5
        data = np.ma.masked_equal(data, 0)
        result = np.array([
            data.mean(),
            data.std(),
            data.max(),
            data.min(),
            np.ma.extras.median(data)
        ])
        return result.tolist()

    def _get_id(inlist):
        return range(len(inlist))

    input_ref = pe.Node(niu.IdentityInterface(
        fields=['in_imag', 'in_tpms', 'in_surf', 'in_field', 'in_mask']),
                        name='refnode')
    input_tst = pe.Node(niu.IdentityInterface(
        fields=['in_imag', 'in_tpms', 'in_surf', 'in_field']),
                        name='tstnode')
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'snr', 'shape', 'method', 'repetition', 'resolution', 'out_csv'
    ]),
                        name='infonode')
    outputnode = pe.Node(niu.IdentityInterface(
        fields=['out_file', 'out_tpm_diff', 'out_field_err']),
                         name='outputnode')
    merge_ref = pe.Node(Merge(), name='ConcatRefInputs')
    merge_tst = pe.Node(Merge(), name='ConcatTestInputs')
    overlap = pe.Node(namev.FuzzyOverlap(weighting='volume'), name='Overlap')
    diff_im = pe.Node(namev.Similarity(metric='cc'), name='ContrastDiff')
    inv_fld = pe.Node(InverseField(), name='InvertField')
    diff_fld = pe.Node(namev.ErrorMap(), name='FieldDiff')
    mesh = pe.MapNode(HausdorffDistance(cells_mode=True),
                      iterfield=['surface1', 'surface2'],
                      name='SurfDistance')
    csv = pe.MapNode(namisc.AddCSVRow(infields=['surf_id', 'surfdist_avg']),
                     name="AddRow",
                     iterfield=['surf_id', 'surfdist_avg'])
    wf = pe.Workflow(name=name)
    wf.connect([
        (inputnode, csv, [('shape', 'model_type'), ('snr', 'snr'),
                          ('method', 'method'), ('resolution', 'resolution'),
                          ('repetition', 'repetition'),
                          ('out_csv', 'in_file')]),
        # (input_ref,  merge_ref, [('in_imag', 'in_files')]),
        # (input_tst,  merge_tst, [('in_imag', 'in_files')]),
        # (input_ref,    overlap, [('in_tpms', 'in_ref')]),
        # (input_tst,    overlap, [('in_tpms', 'in_tst')]),
        # (input_ref,    diff_im, [('in_mask', 'mask1'),
        #                          ('in_mask', 'mask2')]),
        # (merge_ref,    diff_im, [('merged_file', 'volume1')]),
        # (merge_tst,    diff_im, [('merged_file', 'volume2')]),
        # (input_ref,    inv_fld, [('in_field', 'in_field')]),
        # (input_ref,   diff_fld, [('in_mask', 'mask')]),
        # (inv_fld,     diff_fld, [('out_field', 'in_ref')]),
        # (input_tst,   diff_fld, [('in_field', 'in_tst')]),
        # (overlap,          csv, [('jaccard', 'fji_avg'),
        #                          ('class_fji', 'fji_tpm'),
        #                          ('dice', 'fdi_avg'),
        #                          ('class_fdi', 'fdi_tpm')]),
        # (diff_im,          csv, [('similarity', 'cc_image')]),
        # (diff_fld,         csv, [(('out_map', _stats), 'fmap_error')]),
        # (csv,       outputnode, [('csv_file', 'out_file')]),
        # (overlap,   outputnode, [('diff_file', 'out_tpm_diff')]),
        # (diff_fld,  outputnode, [('out_map', 'out_field_err')]),
        (input_ref, mesh, [('in_surf', 'surface1')]),
        (input_tst, mesh, [('in_surf', 'surface2')]),
        (mesh, csv, [('avg_hd', 'surfdist_avg'),
                     (('avg_hd', _get_id), 'surf_id')])
        # (mesh,             csv, [('max_hd', 'surfdist_hausdorff'),
        #                          ('avg_hd', 'surfdist_avg'),
        #                          ('std_hd', 'surfdist_std'),
        #                          ('stats_hd', 'surfdist_stats')])
    ])
    return wf
Exemple #5
0
def qap_functional_temporal_workflow(workflow, resource_pool, config):

    # resource pool should have:
    #     functional_brain_mask
    #     func_motion_correct
    #     coordinate_transformation

    import os
    import sys
    import nipype.interfaces.io as nio
    import nipype.pipeline.engine as pe
    import nipype.interfaces.utility as niu
    import nipype.algorithms.misc as nam

    from qap_workflows_utils import qap_functional_temporal
    from temporal_qc import fd_jenkinson
    from qap.viz.interfaces import PlotMosaic, PlotFD

    def _getfirst(inlist):
        if isinstance(inlist, list):
            return inlist[0]

        return inlist

    # if 'mean_functional' not in resource_pool.keys():
    #     from functional_preproc import mean_functional_workflow
    #     workflow, resource_pool = \
    #         mean_functional_workflow(workflow, resource_pool, config)

    if 'functional_brain_mask' not in resource_pool.keys():
        from functional_preproc import functional_brain_mask_workflow
        workflow, resource_pool = \
            functional_brain_mask_workflow(workflow, resource_pool, config)

    if ('func_motion_correct' not in resource_pool.keys()) or \
        ('coordinate_transformation' not in resource_pool.keys() and
            'mcflirt_rel_rms' not in resource_pool.keys()):
        from functional_preproc import func_motion_correct_workflow
        workflow, resource_pool = \
            func_motion_correct_workflow(workflow, resource_pool, config)

    fd = pe.Node(niu.Function(input_names=['in_file'],
                              output_names=['out_file'],
                              function=fd_jenkinson),
                 name='generate_FD_file')

    if 'mcflirt_rel_rms' in resource_pool.keys():
        fd.inputs.in_file = resource_pool['mcflirt_rel_rms']
    else:
        if len(resource_pool['coordinate_transformation']) == 2:
            node, out_file = resource_pool['coordinate_transformation']
            workflow.connect(node, out_file, fd, 'in_file')
        else:
            fd.inputs.in_file = resource_pool['coordinate_transformation']

    temporal = pe.Node(niu.Function(input_names=[
        'func_motion_correct', 'func_brain_mask', 'tsnr_volume', 'fd_file',
        'subject_id', 'session_id', 'scan_id', 'site_name'
    ],
                                    output_names=['qc'],
                                    function=qap_functional_temporal),
                       name='qap_functional_temporal')
    temporal.inputs.subject_id = config['subject_id']
    temporal.inputs.session_id = config['session_id']
    temporal.inputs.scan_id = config['scan_id']
    workflow.connect(fd, 'out_file', temporal, 'fd_file')

    if 'site_name' in config.keys():
        temporal.inputs.site_name = config['site_name']

    tsnr = pe.Node(nam.TSNR(), name='compute_tsnr')
    if len(resource_pool['func_motion_correct']) == 2:
        node, out_file = resource_pool['func_motion_correct']
        workflow.connect(node, out_file, tsnr, 'in_file')
        workflow.connect(node, out_file, temporal, 'func_motion_correct')
    else:
        from workflow_utils import check_input_resources
        check_input_resources(resource_pool, 'func_motion_correct')
        input_file = resource_pool['func_motion_correct']
        tsnr.inputs.in_file = input_file
        temporal.inputs.func_motion_correct = input_file

    if len(resource_pool['functional_brain_mask']) == 2:
        node, out_file = resource_pool['functional_brain_mask']
        workflow.connect(node, out_file, temporal, 'func_brain_mask')
    else:
        temporal.inputs.func_brain_mask = \
            resource_pool['functional_brain_mask']

    # Write mosaic and FD plot
    if config.get('write_report', False):
        plot = pe.Node(PlotMosaic(), name='plot_mosaic')
        plot.inputs.subject = config['subject_id']

        metadata = [config['session_id'], config['scan_id']]
        if 'site_name' in config.keys():
            metadata.append(config['site_name'])

        plot.inputs.metadata = metadata
        plot.inputs.title = 'tSNR volume'
        workflow.connect(tsnr, 'tsnr_file', plot, 'in_file')

        # Enable this if we want masks
        # if len(resource_pool['functional_brain_mask']) == 2:
        #     node, out_file = resource_pool['functional_brain_mask']
        #     workflow.connect(node, out_file, plot, 'in_mask')
        # else:
        #     plot.inputs.in_mask = resource_pool['functional_brain_mask']
        resource_pool['qap_mosaic'] = (plot, 'out_file')

        fdplot = pe.Node(PlotFD(), name='plot_fd')
        fdplot.inputs.subject = config['subject_id']
        fdplot.inputs.metadata = metadata
        workflow.connect(fd, 'out_file', fdplot, 'in_file')
        resource_pool['qap_fd'] = (fdplot, 'out_file')

    out_csv = op.join(config['output_directory'],
                      'qap_functional_temporal.csv')
    temporal_to_csv = pe.Node(nam.AddCSVRow(in_file=out_csv),
                              name='qap_functional_temporal_to_csv')

    workflow.connect(tsnr, 'tsnr_file', temporal, 'tsnr_volume')
    workflow.connect(temporal, 'qc', temporal_to_csv, '_outputs')
    resource_pool['qap_functional_temporal'] = (temporal_to_csv, 'csv_file')
    return workflow, resource_pool
Exemple #6
0
def qap_functional_spatial_workflow(workflow, resource_pool, config):

    # resource pool should have:
    #     mean_functional
    #     functional_brain_mask

    import os
    import sys

    import nipype.interfaces.io as nio
    import nipype.pipeline.engine as pe

    import nipype.algorithms.misc as nam
    import nipype.interfaces.utility as niu
    import nipype.algorithms.misc as nam

    from qap_workflows_utils import qap_functional_spatial
    from qap.viz.interfaces import PlotMosaic

    from workflow_utils import check_input_resources

    if 'mean_functional' not in resource_pool.keys():
        from functional_preproc import mean_functional_workflow
        workflow, resource_pool = \
            mean_functional_workflow(workflow, resource_pool, config)

    if 'functional_brain_mask' not in resource_pool.keys():
        from functional_preproc import functional_brain_mask_workflow
        workflow, resource_pool = \
            functional_brain_mask_workflow(workflow, resource_pool, config)

    spatial_epi = pe.Node(niu.Function(input_names=[
        'mean_epi', 'func_brain_mask', 'direction', 'subject_id', 'session_id',
        'scan_id', 'site_name'
    ],
                                       output_names=['qc'],
                                       function=qap_functional_spatial),
                          name='qap_functional_spatial')

    # Subject infos
    if 'ghost_direction' not in config.keys():
        config['ghost_direction'] = 'y'

    spatial_epi.inputs.direction = config['ghost_direction']
    spatial_epi.inputs.subject_id = config['subject_id']
    spatial_epi.inputs.session_id = config['session_id']
    spatial_epi.inputs.scan_id = config['scan_id']

    if 'site_name' in config.keys():
        spatial_epi.inputs.site_name = config['site_name']

    if len(resource_pool['mean_functional']) == 2:
        node, out_file = resource_pool['mean_functional']
        workflow.connect(node, out_file, spatial_epi, 'mean_epi')
    else:
        spatial_epi.inputs.mean_epi = resource_pool['mean_functional']

    if len(resource_pool['functional_brain_mask']) == 2:
        node, out_file = resource_pool['functional_brain_mask']
        workflow.connect(node, out_file, spatial_epi, 'func_brain_mask')
    else:
        spatial_epi.inputs.func_brain_mask = \
            resource_pool['functional_brain_mask']

    if config.get('write_report', False):
        plot = pe.Node(PlotMosaic(), name='plot_mosaic')
        plot.inputs.subject = config['subject_id']

        metadata = [config['session_id'], config['scan_id']]
        if 'site_name' in config.keys():
            metadata.append(config['site_name'])

        plot.inputs.metadata = metadata
        plot.inputs.title = 'Mean EPI'

        if len(resource_pool['mean_functional']) == 2:
            node, out_file = resource_pool['mean_functional']
            workflow.connect(node, out_file, plot, 'in_file')
        else:
            plot.inputs.in_file = resource_pool['mean_functional']

        # Enable this if we want masks
        # if len(resource_pool['functional_brain_mask']) == 2:
        #     node, out_file = resource_pool['functional_brain_mask']
        #     workflow.connect(node, out_file, plot, 'in_mask')
        # else:
        #     plot.inputs.in_mask = resource_pool['functional_brain_mask']
        resource_pool['qap_mosaic'] = (plot, 'out_file')

    out_csv = op.join(config['output_directory'], 'qap_functional_spatial.csv')
    spatial_epi_to_csv = pe.Node(nam.AddCSVRow(in_file=out_csv),
                                 name='qap_functional_spatial_to_csv')
    workflow.connect(spatial_epi, 'qc', spatial_epi_to_csv, '_outputs')
    resource_pool['qap_functional_spatial'] = (spatial_epi_to_csv, 'csv_file')

    return workflow, resource_pool
Exemple #7
0
def qap_anatomical_spatial_workflow(workflow,
                                    resource_pool,
                                    config,
                                    report=False):
    # resource pool should have:
    #     anatomical_reorient
    #     qap_head_mask
    #     anatomical_gm_mask
    #     anatomical_wm_mask
    #     anatomical_csf_mask

    import os
    import sys

    import nipype.interfaces.io as nio
    import nipype.pipeline.engine as pe
    import nipype.interfaces.utility as niu
    import nipype.algorithms.misc as nam
    from qap_workflows_utils import qap_anatomical_spatial
    from qap.viz.interfaces import PlotMosaic

    if 'qap_head_mask' not in resource_pool.keys():

        from qap_workflows import qap_mask_workflow

        workflow, resource_pool = \
            qap_mask_workflow(workflow, resource_pool, config)

    if ('anatomical_gm_mask' not in resource_pool.keys()) or \
            ('anatomical_wm_mask' not in resource_pool.keys()) or \
            ('anatomical_csf_mask' not in resource_pool.keys()):

        from anatomical_preproc import segmentation_workflow

        workflow, resource_pool = \
            segmentation_workflow(workflow, resource_pool, config)

    if 'anatomical_reorient' not in resource_pool.keys():
        from anatomical_preproc import anatomical_reorient_workflow
        workflow, resource_pool = \
            anatomical_reorient_workflow(workflow, resource_pool, config)

    spatial = pe.Node(niu.Function(input_names=[
        'anatomical_reorient', 'head_mask_path', 'anatomical_gm_mask',
        'anatomical_wm_mask', 'anatomical_csf_mask', 'subject_id',
        'session_id', 'scan_id', 'site_name'
    ],
                                   output_names=['qc'],
                                   function=qap_anatomical_spatial),
                      name='qap_anatomical_spatial')

    # Subject infos
    spatial.inputs.subject_id = config['subject_id']
    spatial.inputs.session_id = config['session_id']
    spatial.inputs.scan_id = config['scan_id']

    if 'site_name' in config.keys():
        spatial.inputs.site_name = config['site_name']

    if len(resource_pool['anatomical_reorient']) == 2:
        node, out_file = resource_pool['anatomical_reorient']
        workflow.connect(node, out_file, spatial, 'anatomical_reorient')
    else:
        spatial.inputs.anatomical_reorient = \
            resource_pool['anatomical_reorient']

    if len(resource_pool['qap_head_mask']) == 2:
        node, out_file = resource_pool['qap_head_mask']
        workflow.connect(node, out_file, spatial, 'head_mask_path')
    else:
        spatial.inputs.head_mask_path = resource_pool['qap_head_mask']

    if len(resource_pool['anatomical_gm_mask']) == 2:
        node, out_file = resource_pool['anatomical_gm_mask']
        workflow.connect(node, out_file, spatial, 'anatomical_gm_mask')
    else:
        spatial.inputs.anatomical_gm_mask = \
            resource_pool['anatomical_gm_mask']

    if len(resource_pool['anatomical_wm_mask']) == 2:
        node, out_file = resource_pool['anatomical_wm_mask']
        workflow.connect(node, out_file, spatial, 'anatomical_wm_mask')
    else:
        spatial.inputs.anatomical_wm_mask = \
            resource_pool['anatomical_wm_mask']

    if len(resource_pool['anatomical_csf_mask']) == 2:
        node, out_file = resource_pool['anatomical_csf_mask']
        workflow.connect(node, out_file, spatial, 'anatomical_csf_mask')
    else:
        spatial.inputs.anatomical_csf_mask = \
            resource_pool['anatomical_csf_mask']

    if config.get('write_report', False):
        plot = pe.Node(PlotMosaic(), name='plot_mosaic')
        plot.inputs.subject = config['subject_id']

        metadata = [config['session_id'], config['scan_id']]
        if 'site_name' in config.keys():
            metadata.append(config['site_name'])

        plot.inputs.metadata = metadata
        plot.inputs.title = 'Anatomical reoriented'

        if len(resource_pool['anatomical_reorient']) == 2:
            node, out_file = resource_pool['anatomical_reorient']
            workflow.connect(node, out_file, plot, 'in_file')
        else:
            plot.inputs.in_file = resource_pool['anatomical_reorient']

        # Enable this if we want masks
        # if len(resource_pool['qap_head_mask']) == 2:
        #     node, out_file = resource_pool['qap_head_mask']
        #     workflow.connect(node, out_file, plot, 'in_mask')
        # else:
        #     plot.inputs.in_mask = resource_pool['qap_head_mask']

        resource_pool['qap_mosaic'] = (plot, 'out_file')

    out_csv = op.join(config['output_directory'], 'qap_anatomical_spatial.csv')
    spatial_to_csv = pe.Node(nam.AddCSVRow(in_file=out_csv),
                             name='qap_anatomical_spatial_to_csv')

    workflow.connect(spatial, 'qc', spatial_to_csv, '_outputs')
    resource_pool['qap_anatomical_spatial'] = (spatial_to_csv, 'csv_file')
    return workflow, resource_pool
def get_ICV(subject_list, base_directory):
    #==============================================================
    # Loading required packages
    import nipype.interfaces.io as nio
    import nipype.pipeline.engine as pe
    import nipype.interfaces.utility as util
    from nipype.algorithms import misc
    from nipype import SelectFiles
    from nipype.interfaces import fsl
    from own_nipype import MAT2DET
    import os

    #====================================
    # Defining the nodes for the workflow

    # Getting the subject ID
    infosource = pe.Node(
        interface=util.IdentityInterface(fields=['subject_id']),
        name='infosource')
    infosource.iterables = ('subject_id', subject_list)

    # Getting the relevant diffusion-weighted data
    templates = dict(
        in_file=
        '/imaging/jb07/CALM/CALM_BIDS/{subject_id}/anat/{subject_id}_T1w.nii.gz'
    )

    selectfiles = pe.Node(SelectFiles(templates), name="selectfiles")
    selectfiles.inputs.base_directory = os.path.abspath(base_directory)

    # Segment the image with FSL FAST
    fast = pe.Node(interface=fsl.FAST(), name='fast')
    fast.inputs.img_type = 1
    fast.inputs.no_bias = True

    # Select files from the FAST output
    GM_select = pe.Node(interface=util.Select(index=[1]), name='GM_select')
    WM_select = pe.Node(interface=util.Select(index=[2]), name='WM_select')

    # Calculate GM and WM volume with FSL stats
    GM_volume = pe.Node(interface=fsl.ImageStats(), name='GM_volume')
    GM_volume.inputs.op_string = '-M -V'

    WM_volume = pe.Node(interface=fsl.ImageStats(), name='WM_volume')
    WM_volume.inputs.op_string = '-M -V'

    flt = pe.Node(interface=fsl.FLIRT(), name='flt')
    flt.inputs.reference = os.environ[
        'FSLDIR'] + '/data/standard/MNI152_T1_1mm_brain.nii.gz'

    mat2det = pe.Node(interface=MAT2DET(), name='mat2det')

    # Create an output csv file
    addrow = pe.Node(interface=misc.AddCSVRow(), name='addrow')
    addrow.inputs.in_file = base_directory + 'volume_results.csv'

    #====================================
    # Setting up the workflow
    get_ICV = pe.Workflow(name='get_ICV')
    get_ICV.connect(infosource, 'subject_id', selectfiles, 'subject_id')
    #get_ICV.connect(selectfiles, 'in_file', flt, 'in_file')
    #get_ICV.connect(flt, 'out_matrix_file', mat2det, 'in_matrix')
    #get_ICV.connect(infosource, 'subject_id', mat2det, 'subject_id')
    get_ICV.connect(infosource, 'subject_id', fast, 'out_basename')
    get_ICV.connect(selectfiles, 'in_file', fast, 'in_files')
    get_ICV.connect(fast, 'partial_volume_files', GM_select, 'inlist')
    get_ICV.connect(GM_select, 'out', GM_volume, 'in_file')
    get_ICV.connect(fast, 'partial_volume_files', WM_select, 'inlist')
    get_ICV.connect(WM_select, 'out', WM_volume, 'in_file')
    get_ICV.connect(infosource, 'subject_id', addrow, 'MRI.ID')
    get_ICV.connect(GM_volume, 'out_stat', addrow, 'GM_volume')
    get_ICV.connect(WM_volume, 'out_stat', addrow, 'WM_volume')

    #====================================
    # Running the workflow
    get_ICV.base_dir = os.path.abspath(base_directory)
    get_ICV.write_graph()
    get_ICV.run('PBSGraph')