Exemplo n.º 1
0
def create_merge_group_network_results_workflow(group_list,
                                                data_dir,
                                                subjects_dir,
                                                output_dir,
                                                title='group'):
    """Creates a third-level pipeline to merge the Connectome File Format (CFF) outputs from each group
    and combines them into a single CFF file for each group. This version of the third-level pipeline also
    concatenates the comma-separated value files for the NetworkX metrics and the connectivity matrices
    into single files.

    Example
    -------

    >>> import nipype.workflows.dmri.connectivity.group_connectivity as groupwork
    >>> from nipype.testing import example_data
    >>> subjects_dir = '.'
    >>> data_dir = '.'
    >>> output_dir = '.'
    >>> group_list = {}
    >>> group_list['group1'] = ['subj1', 'subj2']
    >>> group_list['group2'] = ['subj3', 'subj4']
    >>> l3pipeline = groupwork.create_merge_group_network_results_workflow(group_list, data_dir, subjects_dir, output_dir)
    >>> l3pipeline.run()                 # doctest: +SKIP

    Inputs::

        group_list: Dictionary of subject lists, keyed by group name
        data_dir: Path to the data directory
        subjects_dir: Path to the Freesurfer 'subjects' directory
        output_dir: Path for the output files
        title: String to use as a title for the output merged CFF file (default 'group')
    """
    l3infosource = pe.Node(
        interface=util.IdentityInterface(fields=['group_id']),
        name='l3infosource')
    l3infosource.inputs.group_id = group_list.keys()

    l3source = pe.Node(nio.DataGrabber(
        infields=['group_id'],
        outfields=['CFFfiles', 'CSVmetrics', 'CSVmatrices']),
                       name='l3source')
    l3source.inputs.template_args = dict(CFFfiles=[['group_id']],
                                         CSVmetrics=[['group_id']],
                                         CSVmatrices=[['group_id']])
    l3source.inputs.template = op.join(output_dir, '%s/%s')

    l3source.inputs.field_template = dict(
        CFFfiles=op.join(output_dir, '%s/*.cff'),
        CSVmetrics=op.join(output_dir, '%s/csv/*.csv'),
        CSVmatrices=op.join(output_dir, '%s/cmatrices_csv/*/*.csv'))

    l3inputnode = pe.Node(interface=util.IdentityInterface(
        fields=['Group_CFFs', 'Group_CSVmetrics', 'Group_CSVmatrices']),
                          name='l3inputnode')

    MergeCNetworks_grp = pe.Node(interface=cmtk.MergeCNetworks(),
                                 name="MergeCNetworks_grp")
    MergeCNetworks_grp.inputs.out_file = title

    l3datasink = pe.Node(interface=nio.DataSink(), name="l3datasink")
    l3datasink.inputs.base_directory = output_dir

    l3pipeline = pe.Workflow(name="l3output")
    l3pipeline.base_dir = output_dir
    l3pipeline.connect([
        (l3infosource, l3source, [('group_id', 'group_id')]),
        (l3source, l3inputnode, [('CFFfiles', 'Group_CFFs')]),
        (l3source, l3inputnode, [('CSVmetrics', 'Group_CSVmetrics')]),
        (l3source, l3inputnode, [('CSVmatrices', 'Group_CSVmatrices')]),
    ])

    l3pipeline.connect([(l3inputnode, MergeCNetworks_grp, [('Group_CFFs',
                                                            'in_files')])])
    l3pipeline.connect([(MergeCNetworks_grp, l3datasink, [('connectome_file',
                                                           '@l3output')])])
    l3pipeline.connect([(l3inputnode, l3datasink, [
        (('Group_CSVmetrics', concatcsv), '@l3output.csvmetrics')
    ])])
    l3pipeline.connect([(l3inputnode, l3datasink, [
        (('Group_CSVmatrices', concatcsv), '@l3output.csvmatrices')
    ])])
    return l3pipeline
Exemplo n.º 2
0
def create_merge_networks_by_group_workflow(group_list, group_id, data_dir,
                                            subjects_dir, output_dir):
    """Creates a second-level pipeline to merge the Connectome File Format (CFF) outputs from the group-level
    MRtrix structural connectivity processing pipeline into a single CFF file for each group.

    Example
    -------

    >>> import nipype.workflows.dmri.connectivity.group_connectivity as groupwork
    >>> from nipype.testing import example_data
    >>> subjects_dir = '.'
    >>> data_dir = '.'
    >>> output_dir = '.'
    >>> group_list = {}
    >>> group_list['group1'] = ['subj1', 'subj2']
    >>> group_list['group2'] = ['subj3', 'subj4']
    >>> group_id = 'group1'
    >>> l2pipeline = groupwork.create_merge_networks_by_group_workflow(group_list, group_id, data_dir, subjects_dir, output_dir)
    >>> l2pipeline.run()                 # doctest: +SKIP

    Inputs::

        group_list: Dictionary of subject lists, keyed by group name
        group_id: String containing the group name
        data_dir: Path to the data directory
        subjects_dir: Path to the Freesurfer 'subjects' directory
        output_dir: Path for the output files
    """
    group_infosource = pe.Node(
        interface=util.IdentityInterface(fields=['group_id']),
        name="group_infosource")
    group_infosource.inputs.group_id = group_id

    l2infosource = pe.Node(
        interface=util.IdentityInterface(fields=['group_id']),
        name='l2infosource')

    l2source = pe.Node(nio.DataGrabber(infields=['group_id'],
                                       outfields=['CFFfiles']),
                       name='l2source')
    l2source.inputs.template_args = dict(CFFfiles=[['group_id']])
    l2source.inputs.template = op.join(output_dir, '%s/cff/*/connectome.cff')
    l2source.inputs.base_directory = data_dir

    l2inputnode = pe.Node(
        interface=util.IdentityInterface(fields=['CFFfiles']),
        name='l2inputnode')
    MergeCNetworks = pe.Node(interface=cmtk.MergeCNetworks(),
                             name="MergeCNetworks")

    l2datasink = pe.Node(interface=nio.DataSink(), name="l2datasink")
    l2datasink.inputs.base_directory = output_dir
    l2datasink.inputs.container = group_id

    l2pipeline = pe.Workflow(name="l2output")
    l2pipeline.base_dir = op.join(output_dir, 'l2output')
    l2pipeline.connect([(group_infosource, l2infosource, [('group_id',
                                                           'group_id')])])

    l2pipeline.connect([
        (l2infosource, l2source, [('group_id', 'group_id')]),
        (l2source, l2inputnode, [('CFFfiles', 'CFFfiles')]),
    ])

    l2pipeline.connect([(l2inputnode, MergeCNetworks, [('CFFfiles', 'in_files')
                                                       ])])
    l2pipeline.connect([(group_infosource, MergeCNetworks, [('group_id',
                                                             'out_file')])])
    l2pipeline.connect([(MergeCNetworks, l2datasink, [('connectome_file',
                                                       '@l2output')])])
    l2pipeline.connect([(group_infosource, l2datasink, [('group_id',
                                                         '@group_id')])])
    return l2pipeline
Exemplo n.º 3
0
def create_merge_network_results_by_group_workflow(group_list, group_id,
                                                   data_dir, subjects_dir,
                                                   output_dir):
    """Creates a second-level pipeline to merge the Connectome File Format (CFF) outputs from the group-level
    MRtrix structural connectivity processing pipeline into a single CFF file for each group.

    Example
    -------

    >>> import nipype.workflows.dmri.connectivity.group_connectivity as groupwork
    >>> from nipype.testing import example_data
    >>> subjects_dir = '.'
    >>> data_dir = '.'
    >>> output_dir = '.'
    >>> group_list = {}
    >>> group_list['group1'] = ['subj1', 'subj2']
    >>> group_list['group2'] = ['subj3', 'subj4']
    >>> group_id = 'group1'
    >>> l2pipeline = groupwork.create_merge_network_results_by_group_workflow(group_list, group_id, data_dir, subjects_dir, output_dir)
    >>> l2pipeline.run()                 # doctest: +SKIP

    Inputs::

        group_list: Dictionary of subject lists, keyed by group name
        group_id: String containing the group name
        data_dir: Path to the data directory
        subjects_dir: Path to the Freesurfer 'subjects' directory
        output_dir: Path for the output files
    """
    group_infosource = pe.Node(
        interface=util.IdentityInterface(fields=['group_id']),
        name="group_infosource")
    group_infosource.inputs.group_id = group_id

    l2infosource = pe.Node(interface=util.IdentityInterface(fields=[
        'group_id',
        'merged',
    ]),
                           name='l2infosource')

    l2source = pe.Node(nio.DataGrabber(infields=['group_id'],
                                       outfields=[
                                           'CFFfiles', 'CSVmatrices',
                                           'CSVfibers', 'CSVnodal', 'CSVglobal'
                                       ]),
                       name='l2source')

    l2source.inputs.template_args = dict(CFFfiles=[['group_id']],
                                         CSVmatrices=[['group_id']],
                                         CSVnodal=[['group_id']],
                                         CSVglobal=[['group_id']],
                                         CSVfibers=[['group_id']])
    l2source.inputs.base_directory = data_dir
    l2source.inputs.template = '%s/%s'
    l2source.inputs.field_template = dict(
        CFFfiles=op.join(output_dir, '%s/cff/*/connectome.cff'),
        CSVmatrices=op.join(output_dir, '%s/cmatrices_csv/*/*.csv'),
        CSVnodal=op.join(output_dir, '%s/nxcsv/*/*nodal*.csv'),
        CSVglobal=op.join(output_dir, '%s/nxcsv/*/*global*.csv'),
        CSVfibers=op.join(output_dir, '%s/fiber_csv/*/*fibers*.csv'))

    l2inputnode = pe.Node(interface=util.IdentityInterface(fields=[
        'CFFfiles', 'CSVfibers', 'CSVmatrices', 'CSVnodal', 'CSVglobal',
        'network_file'
    ]),
                          name='l2inputnode')

    MergeCNetworks = pe.Node(interface=cmtk.MergeCNetworks(),
                             name="MergeCNetworks")

    l2datasink = pe.Node(interface=nio.DataSink(), name="l2datasink")
    l2datasink.inputs.base_directory = output_dir
    l2datasink.inputs.container = group_id

    l2pipeline = pe.Workflow(name="l2output_" + group_id)
    l2pipeline.base_dir = op.join(output_dir, 'l2output')
    l2pipeline.connect([(group_infosource, l2infosource, [('group_id',
                                                           'group_id')])])

    l2pipeline.connect([
        (l2infosource, l2source, [('group_id', 'group_id')]),
        (l2source, l2inputnode, [('CFFfiles', 'CFFfiles')]),
        (l2source, l2inputnode, [('CSVmatrices', 'CSVmatrices')]),
        (l2source, l2inputnode, [('CSVnodal', 'CSVnodal')]),
        (l2source, l2inputnode, [('CSVglobal', 'CSVglobal')]),
        (l2source, l2inputnode, [('CSVfibers', 'CSVfibers')]),
    ])

    l2pipeline.connect([(l2inputnode, MergeCNetworks, [('CFFfiles', 'in_files')
                                                       ])])

    l2pipeline.connect([(group_infosource, MergeCNetworks, [('group_id',
                                                             'out_file')])])
    l2pipeline.connect([(MergeCNetworks, l2datasink, [('connectome_file',
                                                       '@l2output')])])

    AddCSVColumn_node = pe.Node(interface=misc.AddCSVColumn(),
                                name="AddCSVColumn_node")
    AddCSVColumn_node.inputs.extra_column_heading = 'group'
    AddCSVColumn_global = AddCSVColumn_node.clone(name="AddCSVColumn_global")
    AddCSVColumn_matrices = AddCSVColumn_node.clone(
        name="AddCSVColumn_matrices")
    AddCSVColumn_fibers = AddCSVColumn_node.clone(name="AddCSVColumn_fibers")

    concat_csv_interface = Function(input_names=["in_files"],
                                    output_names=["out_name"],
                                    function=concatcsv)

    concat_node_csvs = pe.Node(interface=concat_csv_interface,
                               name='concat_node_csvs')
    concat_global_csvs = pe.Node(interface=concat_csv_interface,
                                 name='concat_global_csvs')
    concat_matrix_csvs = pe.Node(interface=concat_csv_interface,
                                 name='concat_matrix_csvs')
    concat_fiber_csvs = pe.Node(interface=concat_csv_interface,
                                name='concat_fiber_csvs')

    l2pipeline.connect([(l2inputnode, concat_node_csvs, [('CSVnodal',
                                                          'in_files')])])
    l2pipeline.connect([(concat_node_csvs, AddCSVColumn_node, [('out_name',
                                                                'in_file')])])
    l2pipeline.connect([(group_infosource, AddCSVColumn_node,
                         [('group_id', 'extra_field')])])
    l2pipeline.connect([(AddCSVColumn_node, l2datasink,
                         [('csv_file', '@l2output.node_csv')])])
    l2pipeline.connect([(group_infosource, l2datasink, [('group_id',
                                                         '@group_id')])])

    l2pipeline.connect([(l2inputnode, concat_global_csvs, [('CSVglobal',
                                                            'in_files')])])
    l2pipeline.connect([(concat_global_csvs, AddCSVColumn_global,
                         [('out_name', 'in_file')])])
    l2pipeline.connect([(group_infosource, AddCSVColumn_global,
                         [('group_id', 'extra_field')])])
    l2pipeline.connect([(AddCSVColumn_global, l2datasink,
                         [('csv_file', '@l2output.global_csv')])])

    l2pipeline.connect([(l2inputnode, concat_matrix_csvs, [('CSVmatrices',
                                                            'in_files')])])
    l2pipeline.connect([(concat_matrix_csvs, AddCSVColumn_matrices,
                         [('out_name', 'in_file')])])
    l2pipeline.connect([(group_infosource, AddCSVColumn_matrices,
                         [('group_id', 'extra_field')])])
    l2pipeline.connect([(AddCSVColumn_matrices, l2datasink,
                         [('csv_file', '@l2output.cmatrices_csv')])])

    l2pipeline.connect([(l2inputnode, concat_fiber_csvs, [('CSVmatrices',
                                                           'in_files')])])
    l2pipeline.connect([(concat_fiber_csvs, AddCSVColumn_fibers,
                         [('out_name', 'in_file')])])
    l2pipeline.connect([(group_infosource, AddCSVColumn_fibers,
                         [('group_id', 'extra_field')])])
    l2pipeline.connect([(AddCSVColumn_fibers, l2datasink,
                         [('csv_file', '@l2output.fibers_csv')])])
    return l2pipeline