Пример #1
0
def RunSubjectWorkflow(args):
    """
                           .-----------.
                       --- | Session 1 | ---> /project/subjectA/session1/phase/
                     /     *-----------*
    .-----------.   /
    | Subject A | <
    *-----------*   \
                     \     .-----------.
                       --- | Session 2 | ---> /project/subjectA/session2/phase/
                           *-----------*
    **** Replaces WorkflowT1T2.py ****
    """
    database, start_time, subject, master_config = args
    assert 'baseline' in master_config[
        'components'] or 'longitudinal' in master_config[
            'components'], "Baseline or Longitudinal is not in WORKFLOW_COMPONENTS!"
    # HACK:
    #    To avoid a "sqlite3.ProgrammingError: Base Cursor.__init__ not called" error
    #    using multiprocessing.map_async(), re-instantiate database
    # database.__init__(defaultDBName=database.dbName, subject_list=database.subjectList)
    #
    # END HACK
    import time

    from nipype import config, logging
    config.update_config(master_config)  # Set universal pipeline options
    assert config.get('execution',
                      'plugin') == master_config['execution']['plugin']
    # DEBUG
    # config.enable_debug_mode()
    # config.set('execution', 'stop_on_first_rerun', 'true')
    # END DEBUG
    logging.update_logging(config)

    import nipype.pipeline.engine as pe
    import nipype.interfaces.base as nbase
    import nipype.interfaces.io as nio
    from nipype.interfaces.utility import IdentityInterface, Function
    import traits

    from baw_exp import OpenSubjectDatabase
    from SessionDB import SessionDB
    from PipeLineFunctionHelpers import convertToList
    from atlasNode import MakeAtlasNode
    from utilities.misc import GenerateSubjectOutputPattern as outputPattern
    from utilities.misc import GenerateWFName

    while time.time() < start_time:
        time.sleep(start_time - time.time() + 1)
        print "Delaying start for {subject}".format(subject=subject)
    print("===================== SUBJECT: {0} ===========================".
          format(subject))

    subjectWorkflow = pe.Workflow(
        name="BAW_StandardWorkup_subject_{0}".format(subject))
    subjectWorkflow.base_dir = config.get('logging', 'log_directory')
    # subjectWorkflow.config['execution']['plugin'] = 'Linear'  # Hardcodeded in WorkupT1T2.py - why?
    # DEBUG
    # subjectWorkflow.config['execution']['stop_on_first_rerun'] = 'true'
    # END DEBUG
    atlasNode = MakeAtlasNode(master_config['atlascache'], 'BAtlas')

    sessionWorkflow = dict()
    inputsSpec = dict()
    sessions = database.getSessionsFromSubject(subject)
    # print "These are the sessions: ", sessions
    if 'baseline' in master_config['components']:
        current_phase = 'baseline'
        from baseline import create_baseline as create_wkfl
    elif 'longitudinal' in master_config['components']:
        current_phase = 'longitudinal'
        from longitudinal import create_longitudial as create_wkfl

    for session in sessions:  # TODO (future): Replace with iterable inputSpec node and add Function node for getAllFiles()
        project = database.getProjFromSession(session)
        pname = "{0}_{1}".format(
            session,
            current_phase)  # Long node names make graphs a pain to read/print
        # pname = GenerateWFName(project, subject, session, current_phase)
        print "Building session pipeline for {0}".format(session)
        inputsSpec[session] = pe.Node(
            name='inputspec_{0}'.format(session),
            interface=IdentityInterface(
                fields=['T1s', 'T2s', 'PDs', 'FLs', 'OTs']))
        inputsSpec[session].inputs.T1s = database.getFilenamesByScantype(
            session, ['T1-15', 'T1-30'])
        inputsSpec[session].inputs.T2s = database.getFilenamesByScantype(
            session, ['T2-15', 'T2-30'])
        inputsSpec[session].inputs.PDs = database.getFilenamesByScantype(
            session, ['PD-15', 'PD-30'])
        inputsSpec[session].inputs.FLs = database.getFilenamesByScantype(
            session, ['FL-15', 'FL-30'])
        inputsSpec[session].inputs.OTs = database.getFilenamesByScantype(
            session, ['OTHER-15', 'OTHER-30'])

        sessionWorkflow[session] = create_wkfl(project,
                                               subject,
                                               session,
                                               master_config,
                                               interpMode='Linear',
                                               pipeline_name=pname)

        subjectWorkflow.connect([
            (inputsSpec[session], sessionWorkflow[session], [
                ('T1s', 'inputspec.T1s'),
                ('T2s', 'inputspec.T2s'),
                ('PDs', 'inputspec.PDs'),
                ('FLs', 'inputspec.FLs'),
                ('OTs', 'inputspec.OTHERs'),
            ]),
            (atlasNode, sessionWorkflow[session],
             [('template_landmarks_50Lmks_fcsv',
               'inputspec.atlasLandmarkFilename'),
              ('template_weights_50Lmks_wts', 'inputspec.atlasWeightFilename'),
              ('LLSModel_50Lmks_hdf5', 'inputspec.LLSModel'),
              ('T1_50Lmks_mdl', 'inputspec.inputTemplateModel')]),
        ])
        if current_phase == 'baseline':
            subjectWorkflow.connect([
                (atlasNode, sessionWorkflow[session],
                 [('template_t1', 'inputspec.template_t1'),
                  ('ExtendedAtlasDefinition_xml', 'inputspec.atlasDefinition')
                  ]),
            ])
        else:
            assert current_phase == 'longitudinal', "Phase value is unknown: {0}".format(
                current_phase)

    from utils import run_workflow, print_workflow
    if False:
        print_workflow(template,
                       plugin=master_config['execution']['plugin'],
                       dotfilename='template')
    return run_workflow(template,
                        plugin=master_config['execution']['plugin'],
                        plugin_args=master_config['plugin_args'])
Пример #2
0
def main(args):
    subjects, master_config = args

    import os
    import sys
    import traceback

    # Set universal pipeline options
    from nipype import config
    config.update_config(master_config)
    assert config.get('execution',
                      'plugin') == master_config['execution']['plugin']

    import nipype.pipeline.engine as pe
    import nipype.interfaces.io as nio
    from nipype.interfaces.utility import IdentityInterface, Function
    import nipype.interfaces.ants as ants

    from template import MergeByExtendListElements, xml_filename
    from PipeLineFunctionHelpers import mapPosteriorList
    from atlasNode import GetAtlasNode, MakeNewAtlasTemplate
    from utilities.misc import GenerateSubjectOutputPattern as outputPattern
    from utilities.distributed import modify_qsub_args

    template = pe.Workflow(name='SubjectAtlas_Template')
    template.base_dir = master_config['logging']['log_directory']

    BAtlas = GetAtlasNode(master_config['previouscache'], 'BAtlas')

    inputspec = pe.Node(interface=IdentityInterface(fields=['subject']),
                        name='inputspec')
    inputspec.iterables = ('subject', subjects)

    baselineDG = pe.Node(nio.DataGrabber(infields=['subject'],
                                         outfields=[
                                             't1_average', 't2_average',
                                             'pd_average', 'fl_average',
                                             'outputLabels', 'posteriorImages'
                                         ]),
                         name='Baseline_DG')
    baselineDG.inputs.base_directory = master_config['previousresult']
    baselineDG.inputs.sort_filelist = True
    baselineDG.inputs.raise_on_empty = False
    baselineDG.inputs.template = '*/%s/*/Baseline/%s.nii.gz'
    baselineDG.inputs.template_args['t1_average'] = [[
        'subject', 't1_average_BRAINSABC'
    ]]
    baselineDG.inputs.template_args['t2_average'] = [[
        'subject', 't2_average_BRAINSABC'
    ]]
    baselineDG.inputs.template_args['pd_average'] = [[
        'subject', 'pd_average_BRAINSABC'
    ]]
    baselineDG.inputs.template_args['fl_average'] = [[
        'subject', 'fl_average_BRAINSABC'
    ]]
    baselineDG.inputs.template_args['outputLabels'] = [[
        'subject', 'brain_label_seg'
    ]]
    baselineDG.inputs.field_template = {
        'posteriorImages': '*/%s/*/TissueClassify/POSTERIOR_%s.nii.gz'
    }
    posterior_files = [
        'AIR', 'BASAL', 'CRBLGM', 'CRBLWM', 'CSF', 'GLOBUS', 'HIPPOCAMPUS',
        'NOTCSF', 'NOTGM', 'NOTVB', 'NOTWM', 'SURFGM', 'THALAMUS', 'VB', 'WM'
    ]
    baselineDG.inputs.template_args['posteriorImages'] = [[
        'subject', posterior_files
    ]]

    MergeByExtendListElementsNode = pe.Node(
        Function(
            function=MergeByExtendListElements,
            input_names=['t1s', 't2s', 'pds', 'fls', 'labels', 'posteriors'],
            output_names=[
                'ListOfImagesDictionaries', 'registrationImageTypes',
                'interpolationMapping'
            ]),
        run_without_submitting=True,
        name="99_MergeByExtendListElements")
    from PipeLineFunctionHelpers import WrapPosteriorImagesFromDictionaryFunction as wrapfunc
    template.connect([(inputspec, baselineDG, [('subject', 'subject')]),
                      (baselineDG, MergeByExtendListElementsNode,
                       [('t1_average', 't1s'), ('t2_average', 't2s'),
                        ('pd_average', 'pds'), ('fl_average', 'fls'),
                        ('outputLabels', 'labels'),
                        (('posteriorImages', wrapfunc), 'posteriors')])])

    myInitAvgWF = pe.Node(
        interface=ants.AverageImages(),
        name='Atlas_antsSimpleAverage')  # was 'Phase1_antsSimpleAverage'
    myInitAvgWF.inputs.dimension = 3
    myInitAvgWF.inputs.normalize = True
    template.connect(baselineDG, 't1_average', myInitAvgWF, "images")
    ####################################################################################################
    # TEMPLATE_BUILD_RUN_MODE = 'MULTI_IMAGE'
    # if numSessions == 1:
    #     TEMPLATE_BUILD_RUN_MODE = 'SINGLE_IMAGE'
    ####################################################################################################
    from BAWantsRegistrationBuildTemplate import BAWantsRegistrationTemplateBuildSingleIterationWF as registrationWF
    buildTemplateIteration1 = registrationWF('iteration01')
    # buildTemplateIteration2 = buildTemplateIteration1.clone(name='buildTemplateIteration2')
    buildTemplateIteration2 = registrationWF('Iteration02')

    MakeNewAtlasTemplateNode = pe.Node(
        interface=Function(
            function=MakeNewAtlasTemplate,
            input_names=[
                't1_image', 'deformed_list', 'AtlasTemplate', 'outDefinition'
            ],
            output_names=['outAtlasFullPath', 'clean_deformed_list']),
        # This is a lot of work, so submit it run_without_submitting=True,
        run_without_submitting=
        True,  # HACK:  THIS NODE REALLY SHOULD RUN ON THE CLUSTER!
        name='99_MakeNewAtlasTemplate')

    if master_config['execution'][
            'plugin'] == 'SGE':  # for some nodes, the qsub call needs to be modified on the cluster

        MakeNewAtlasTemplateNode.plugin_args = {
            'template': master_config['plugin_args']['template'],
            'qsub_args': modify_qsub_args(master_config['queue'], '1000M', 1,
                                          1),
            'overwrite': True
        }
        for bt in [buildTemplateIteration1, buildTemplateIteration2]:
            ##################################################
            # *** Hans, is this TODO already addressed? ***  #
            # ---->  # TODO:  Change these parameters  <---- #
            ##################################################
            BeginANTS = bt.get_node("BeginANTS")
            BeginANTS.plugin_args = {
                'template':
                master_config['plugin_args']['template'],
                'overwrite':
                True,
                'qsub_args':
                modify_qsub_args(master_config['queue'],
                                 '9000M',
                                 4,
                                 hard=False)
            }
            wimtdeformed = bt.get_node("wimtdeformed")
            wimtdeformed.plugin_args = {
                'template':
                master_config['plugin_args']['template'],
                'overwrite':
                True,
                'qsub_args':
                modify_qsub_args(master_config['queue'], '2000M', 1, 2)
            }
            AvgAffineTransform = bt.get_node("AvgAffineTransform")
            AvgAffineTransform.plugin_args = {
                'template': master_config['plugin_args']['template'],
                'overwrite': True,
                'qsub_args': modify_qsub_args(master_config['queue'], '2000M',
                                              1)
            }
            wimtPassivedeformed = bt.get_node("wimtPassivedeformed")
            wimtPassivedeformed.plugin_args = {
                'template':
                master_config['plugin_args']['template'],
                'overwrite':
                True,
                'qsub_args':
                modify_qsub_args(master_config['queue'], '2000M', 1, 2)
            }

    template.connect([
        (myInitAvgWF, buildTemplateIteration1, [('output_average_image',
                                                 'inputspec.fixed_image')]),
        (MergeByExtendListElementsNode, buildTemplateIteration1,
         [('ListOfImagesDictionaries', 'inputspec.ListOfImagesDictionaries'),
          ('registrationImageTypes', 'inputspec.registrationImageTypes'),
          ('interpolationMapping', 'inputspec.interpolationMapping')]),
        (buildTemplateIteration1, buildTemplateIteration2,
         [('outputspec.template', 'inputspec.fixed_image')]),
        (MergeByExtendListElementsNode, buildTemplateIteration2,
         [('ListOfImagesDictionaries', 'inputspec.ListOfImagesDictionaries'),
          ('registrationImageTypes', 'inputspec.registrationImageTypes'),
          ('interpolationMapping', 'inputspec.interpolationMapping')]),
        (inputspec, MakeNewAtlasTemplateNode, [(('subject', xml_filename),
                                                'outDefinition')]),
        (BAtlas, MakeNewAtlasTemplateNode, [('ExtendedAtlasDefinition_xml_in',
                                             'AtlasTemplate')]),
        (buildTemplateIteration2, MakeNewAtlasTemplateNode,
         [('outputspec.template', 't1_image'),
          ('outputspec.passive_deformed_templates', 'deformed_list')]),
    ])

    # Create DataSinks
    Atlas_DataSink = pe.Node(nio.DataSink(), name="Atlas_DS")
    Atlas_DataSink.overwrite = master_config['ds_overwrite']
    Atlas_DataSink.inputs.base_directory = master_config['resultdir']

    Subject_DataSink = pe.Node(nio.DataSink(), name="Subject_DS")
    Subject_DataSink.overwrite = master_config['ds_overwrite']
    Subject_DataSink.inputs.base_directory = master_config['resultdir']

    template.connect([
        (inputspec, Atlas_DataSink, [('subject', 'container')]),
        (buildTemplateIteration1, Atlas_DataSink,
         [('outputspec.template', 'Atlas.iteration1')]),  # Unnecessary
        (MakeNewAtlasTemplateNode, Atlas_DataSink, [('outAtlasFullPath',
                                                     'Atlas.definitions')]),
        (BAtlas, Atlas_DataSink,
         [('template_landmarks_50Lmks_fcsv', 'Atlas.20111119_BCD.@fcsv'),
          ('template_weights_50Lmks_wts', 'Atlas.20111119_BCD.@wts'),
          ('LLSModel_50Lmks_hdf5', 'Atlas.20111119_BCD.@hdf5'),
          ('T1_50Lmks_mdl', 'Atlas.20111119_BCD.@mdl')]),
        (inputspec, Subject_DataSink, [(('subject', outputPattern),
                                        'regexp_substitutions')]),
        (buildTemplateIteration2, Subject_DataSink,
         [('outputspec.template', 'ANTSTemplate.@template')]),
        (MakeNewAtlasTemplateNode, Subject_DataSink, [
            ('clean_deformed_list', 'ANTSTemplate.@passive_deformed_templates')
        ]),
    ])

    from utils import run_workflow, print_workflow
    if False:
        print_workflow(template,
                       plugin=master_config['execution']['plugin'],
                       dotfilename='template')
    return run_workflow(template,
                        plugin=master_config['execution']['plugin'],
                        plugin_args=master_config['plugin_args'])
Пример #3
0
def RunSubjectWorkflow(args):
    """
                           .-----------.
                       --- | Session 1 | ---> /project/subjectA/session1/phase/
                     /     *-----------*
    .-----------.   /
    | Subject A | <
    *-----------*   \
                     \     .-----------.
                       --- | Session 2 | ---> /project/subjectA/session2/phase/
                           *-----------*
    **** Replaces WorkflowT1T2.py ****
    """
    database, start_time, subject, master_config = args
    assert 'baseline' in master_config['components'] or 'longitudinal' in master_config['components'], "Baseline or Longitudinal is not in WORKFLOW_COMPONENTS!"
    # HACK:
    #    To avoid a "sqlite3.ProgrammingError: Base Cursor.__init__ not called" error
    #    using multiprocessing.map_async(), re-instantiate database
    # database.__init__(defaultDBName=database.dbName, subject_list=database.subjectList)
    #
    # END HACK
    import time

    from nipype import config, logging
    config.update_config(master_config)  # Set universal pipeline options
    assert config.get('execution', 'plugin') == master_config['execution']['plugin']
    # DEBUG
    # config.enable_debug_mode()
    # config.set('execution', 'stop_on_first_rerun', 'true')
    # END DEBUG
    logging.update_logging(config)

    import nipype.pipeline.engine as pe
    import nipype.interfaces.base as nbase
    import nipype.interfaces.io as nio
    from nipype.interfaces.utility import IdentityInterface, Function
    import traits

    from baw_exp import OpenSubjectDatabase
    from SessionDB import SessionDB
    from PipeLineFunctionHelpers import convertToList
    from atlasNode import MakeAtlasNode
    from utilities.misc import GenerateSubjectOutputPattern as outputPattern
    from utilities.misc import GenerateWFName

    while time.time() < start_time:
        time.sleep(start_time - time.time() + 1)
        print "Delaying start for {subject}".format(subject=subject)
    print("===================== SUBJECT: {0} ===========================".format(subject))

    subjectWorkflow = pe.Workflow(name="BAW_StandardWorkup_subject_{0}".format(subject))
    subjectWorkflow.base_dir = config.get('logging', 'log_directory')
    # subjectWorkflow.config['execution']['plugin'] = 'Linear'  # Hardcodeded in WorkupT1T2.py - why?
    # DEBUG
    # subjectWorkflow.config['execution']['stop_on_first_rerun'] = 'true'
    # END DEBUG
    atlasNode = MakeAtlasNode(master_config['atlascache'], 'BAtlas')

    sessionWorkflow = dict()
    inputsSpec = dict()
    sessions = database.getSessionsFromSubject(subject)
    # print "These are the sessions: ", sessions
    if 'baseline' in master_config['components']:
        current_phase = 'baseline'
        from baseline import create_baseline as create_wkfl
    elif 'longitudinal' in master_config['components']:
        current_phase = 'longitudinal'
        from longitudinal import create_longitudial as create_wkfl

    for session in sessions:  # TODO (future): Replace with iterable inputSpec node and add Function node for getAllFiles()
        project = database.getProjFromSession(session)
        pname = "{0}_{1}".format(session, current_phase)  # Long node names make graphs a pain to read/print
        # pname = GenerateWFName(project, subject, session, current_phase)
        print "Building session pipeline for {0}".format(session)
        inputsSpec[session] = pe.Node(name='inputspec_{0}'.format(session),
                                      interface=IdentityInterface(fields=['T1s', 'T2s', 'PDs', 'FLs', 'OTs']))
        inputsSpec[session].inputs.T1s = database.getFilenamesByScantype(session, ['T1-15', 'T1-30'])
        inputsSpec[session].inputs.T2s = database.getFilenamesByScantype(session, ['T2-15', 'T2-30'])
        inputsSpec[session].inputs.PDs = database.getFilenamesByScantype(session, ['PD-15', 'PD-30'])
        inputsSpec[session].inputs.FLs = database.getFilenamesByScantype(session, ['FL-15', 'FL-30'])
        inputsSpec[session].inputs.OTs = database.getFilenamesByScantype(session, ['OTHER-15', 'OTHER-30'])

        sessionWorkflow[session] = create_wkfl(project, subject, session, master_config,
                                               interpMode='Linear', pipeline_name=pname)

        subjectWorkflow.connect([(inputsSpec[session], sessionWorkflow[session], [('T1s', 'inputspec.T1s'),
                                                                                  ('T2s', 'inputspec.T2s'),
                                                                                  ('PDs', 'inputspec.PDs'),
                                                                                  ('FLs', 'inputspec.FLs'),
                                                                                  ('OTs', 'inputspec.OTHERs'),
                                                                                  ]),
                                 (atlasNode, sessionWorkflow[session], [('template_landmarks_50Lmks_fcsv',
                                                                         'inputspec.atlasLandmarkFilename'),
                                                                        ('template_weights_50Lmks_wts',
                                                                         'inputspec.atlasWeightFilename'),
                                                                        ('LLSModel_50Lmks_hdf5', 'inputspec.LLSModel'),
                                                                        ('T1_50Lmks_mdl', 'inputspec.inputTemplateModel')]),
                                ])
        if current_phase == 'baseline':
            subjectWorkflow.connect([(atlasNode, sessionWorkflow[session], [('template_t1', 'inputspec.template_t1'),
                                                                            ('ExtendedAtlasDefinition_xml',
                                                                             'inputspec.atlasDefinition')]),
                                 ])
        else:
            assert current_phase == 'longitudinal', "Phase value is unknown: {0}".format(current_phase)

    from utils import run_workflow, print_workflow
    if False:
        print_workflow(template, plugin=master_config['execution']['plugin'], dotfilename='template')
    return run_workflow(template, plugin=master_config['execution']['plugin'], plugin_args=master_config['plugin_args'])
Пример #4
0
def main(args):
    subjects, master_config = args

    import os
    import sys
    import traceback

    # Set universal pipeline options
    from nipype import config
    config.update_config(master_config)
    assert config.get('execution', 'plugin') == master_config['execution']['plugin']

    import nipype.pipeline.engine as pe
    import nipype.interfaces.io as nio
    from nipype.interfaces.utility import IdentityInterface, Function
    import nipype.interfaces.ants as ants

    from template import MergeByExtendListElements, xml_filename
    from PipeLineFunctionHelpers import mapPosteriorList
    from atlasNode import GetAtlasNode, MakeNewAtlasTemplate
    from utilities.misc import GenerateSubjectOutputPattern as outputPattern
    from utilities.distributed import modify_qsub_args

    template = pe.Workflow(name='SubjectAtlas_Template')
    template.base_dir = master_config['logging']['log_directory']

    if 'previouscache' in master_config:
        # Running off previous baseline experiment
        BAtlas = GetAtlasNode(master_config['previouscache'], 'BAtlas')
    else:
        # Running after previous baseline experiment
        BAtlas = GetAtlasNode(os.path.dirname(master_config['atlascache']), 'BAtlas')
    inputspec = pe.Node(interface=IdentityInterface(fields=['subject']), name='inputspec')
    inputspec.iterables = ('subject', subjects)

    baselineDG = pe.Node(nio.DataGrabber(infields=['subject'], outfields=['t1_average', 't2_average', 'pd_average',
                                                                            'fl_average', 'outputLabels', 'posteriorImages']),
                         name='Baseline_DG')
    if 'previousresult' in master_config:
        baselineDG.inputs.base_directory = master_config['previousresult']
    else:
        baselineDG.inputs.base_directory = master_config['resultdir']
    baselineDG.inputs.sort_filelist = True
    baselineDG.inputs.raise_on_empty = False
    baselineDG.inputs.template = '*/%s/*/Baseline/%s.nii.gz'
    baselineDG.inputs.template_args['t1_average'] = [['subject', 't1_average_BRAINSABC']]
    baselineDG.inputs.template_args['t2_average'] = [['subject', 't2_average_BRAINSABC']]
    baselineDG.inputs.template_args['pd_average'] = [['subject', 'pd_average_BRAINSABC']]
    baselineDG.inputs.template_args['fl_average'] = [['subject', 'fl_average_BRAINSABC']]
    baselineDG.inputs.template_args['outputLabels'] = [['subject', 'brain_label_seg']]
    baselineDG.inputs.field_template = {'posteriorImages':'*/%s/*/TissueClassify/POSTERIOR_%s.nii.gz'}
    posterior_files = ['AIR', 'BASAL', 'CRBLGM', 'CRBLWM', 'CSF', 'GLOBUS', 'HIPPOCAMPUS', 'NOTCSF', 'NOTGM', 'NOTVB', 'NOTWM',
                       'SURFGM', 'THALAMUS', 'VB', 'WM']
    baselineDG.inputs.template_args['posteriorImages'] = [['subject', posterior_files]]

    MergeByExtendListElementsNode = pe.Node(Function(function=MergeByExtendListElements,
                                                     input_names=['t1s', 't2s',
                                                                  'pds', 'fls',
                                                                  'labels', 'posteriors'],
                                                     output_names=['ListOfImagesDictionaries', 'registrationImageTypes',
                                                                   'interpolationMapping']),
                                            run_without_submitting=True, name="99_MergeByExtendListElements")
    from PipeLineFunctionHelpers import WrapPosteriorImagesFromDictionaryFunction as wrapfunc
    template.connect([(inputspec, baselineDG, [('subject', 'subject')]),
                      (baselineDG, MergeByExtendListElementsNode, [('t1_average', 't1s'),
                                                                   ('t2_average', 't2s'),
                                                                   ('pd_average', 'pds'),
                                                                   ('fl_average', 'fls'),
                                                                   ('outputLabels', 'labels'),
                                                                   (('posteriorImages', wrapfunc), 'posteriors')])
                    ])

    myInitAvgWF = pe.Node(interface=ants.AverageImages(), name='Atlas_antsSimpleAverage')  # was 'Phase1_antsSimpleAverage'
    myInitAvgWF.inputs.dimension = 3
    myInitAvgWF.inputs.normalize = True
    template.connect(baselineDG, 't1_average', myInitAvgWF, "images")
    ####################################################################################################
    # TEMPLATE_BUILD_RUN_MODE = 'MULTI_IMAGE'
    # if numSessions == 1:
    #     TEMPLATE_BUILD_RUN_MODE = 'SINGLE_IMAGE'
    ####################################################################################################
    from BAWantsRegistrationBuildTemplate import BAWantsRegistrationTemplateBuildSingleIterationWF as registrationWF
    buildTemplateIteration1 = registrationWF('iteration01')
    # buildTemplateIteration2 = buildTemplateIteration1.clone(name='buildTemplateIteration2')
    buildTemplateIteration2 = registrationWF('Iteration02')

    MakeNewAtlasTemplateNode = pe.Node(interface=Function(function=MakeNewAtlasTemplate,
                                                          input_names=['t1_image', 'deformed_list', 'AtlasTemplate', 'outDefinition'],
                                                          output_names=['outAtlasFullPath', 'clean_deformed_list']),
                                       # This is a lot of work, so submit it run_without_submitting=True,
                                       run_without_submitting=True,  # HACK:  THIS NODE REALLY SHOULD RUN ON THE CLUSTER!
                                       name='99_MakeNewAtlasTemplate')

    if master_config['execution']['plugin'] == 'SGE':  # for some nodes, the qsub call needs to be modified on the cluster

        MakeNewAtlasTemplateNode.plugin_args = {'template': master_config['plugin_args']['template'],
                                                'qsub_args': modify_qsub_args(master_config['queue'], '1000M', 1, 1),
                                                'overwrite': True}
        for bt in [buildTemplateIteration1, buildTemplateIteration2]:
            ##################################################
            # *** Hans, is this TODO already addressed? ***  #
            # ---->  # TODO:  Change these parameters  <---- #
            ##################################################
            BeginANTS = bt.get_node("BeginANTS")
            BeginANTS.plugin_args = {'template': master_config['plugin_args']['template'], 'overwrite': True,
                                     'qsub_args': modify_qsub_args(master_config['queue'], '9000M', 4, hard=False)}
            wimtdeformed = bt.get_node("wimtdeformed")
            wimtdeformed.plugin_args = {'template': master_config['plugin_args']['template'], 'overwrite': True,
                                        'qsub_args': modify_qsub_args(master_config['queue'], '2000M', 1, 2)}
            AvgAffineTransform = bt.get_node("AvgAffineTransform")
            AvgAffineTransform.plugin_args = {'template': master_config['plugin_args']['template'], 'overwrite': True,
                                              'qsub_args': modify_qsub_args(master_config['queue'], '2000M', 1)}
            wimtPassivedeformed = bt.get_node("wimtPassivedeformed")
            wimtPassivedeformed.plugin_args = {'template': master_config['plugin_args']['template'], 'overwrite': True,
                                                'qsub_args': modify_qsub_args(master_config['queue'], '2000M', 1, 2)}

    template.connect([(myInitAvgWF, buildTemplateIteration1, [('output_average_image', 'inputspec.fixed_image')]),
                      (MergeByExtendListElementsNode, buildTemplateIteration1, [('ListOfImagesDictionaries', 'inputspec.ListOfImagesDictionaries'),
                                                                                ('registrationImageTypes', 'inputspec.registrationImageTypes'),
                                                                                ('interpolationMapping','inputspec.interpolationMapping')]),
                      (buildTemplateIteration1, buildTemplateIteration2, [('outputspec.template', 'inputspec.fixed_image')]),
                      (MergeByExtendListElementsNode, buildTemplateIteration2, [('ListOfImagesDictionaries', 'inputspec.ListOfImagesDictionaries'),
                                                                                ('registrationImageTypes','inputspec.registrationImageTypes'),
                                                                                ('interpolationMapping', 'inputspec.interpolationMapping')]),
                      (inputspec, MakeNewAtlasTemplateNode, [(('subject', xml_filename), 'outDefinition')]),
                      (BAtlas, MakeNewAtlasTemplateNode, [('ExtendedAtlasDefinition_xml_in', 'AtlasTemplate')]),
                      (buildTemplateIteration2, MakeNewAtlasTemplateNode, [('outputspec.template', 't1_image'),
                                                                           ('outputspec.passive_deformed_templates', 'deformed_list')]),
                      ])

    # Create DataSinks
    Atlas_DataSink = pe.Node(nio.DataSink(), name="Atlas_DS")
    Atlas_DataSink.overwrite = master_config['ds_overwrite']
    Atlas_DataSink.inputs.base_directory = master_config['resultdir']

    Subject_DataSink = pe.Node(nio.DataSink(), name="Subject_DS")
    Subject_DataSink.overwrite = master_config['ds_overwrite']
    Subject_DataSink.inputs.base_directory = master_config['resultdir']

    template.connect([(inputspec, Atlas_DataSink, [('subject', 'container')]),
                      (buildTemplateIteration1, Atlas_DataSink, [('outputspec.template', 'Atlas.iteration1')]),  # Unnecessary
                      (MakeNewAtlasTemplateNode, Atlas_DataSink, [('outAtlasFullPath', 'Atlas.definitions')]),
                      (BAtlas, Atlas_DataSink, [('template_landmarks_50Lmks_fcsv', 'Atlas.20111119_BCD.@fcsv'),
                                                ('template_weights_50Lmks_wts', 'Atlas.20111119_BCD.@wts'),
                                                ('LLSModel_50Lmks_hdf5', 'Atlas.20111119_BCD.@hdf5'),
                                                ('T1_50Lmks_mdl', 'Atlas.20111119_BCD.@mdl')]),
                      (inputspec, Subject_DataSink, [(('subject', outputPattern), 'regexp_substitutions')]),
                      (buildTemplateIteration2, Subject_DataSink, [('outputspec.template', 'ANTSTemplate.@template')]),
                      (MakeNewAtlasTemplateNode, Subject_DataSink, [('clean_deformed_list', 'ANTSTemplate.@passive_deformed_templates')]),
                     ])

    from utils import run_workflow, print_workflow
    if False:
        print_workflow(template, plugin=master_config['execution']['plugin'], dotfilename='template')
    return run_workflow(template, plugin=master_config['execution']['plugin'], plugin_args=master_config['plugin_args'])
Пример #5
0
def RunSubjectWorkflow(args):
    """
                           .-----------.
                       --- | Session 1 | ---> /project/subjectA/session1/phase/
                     /     *-----------*
    .-----------.   /
    | Subject A | <
    *-----------*   \
                     \     .-----------.
                       --- | Session 2 | ---> /project/subjectA/session2/phase/
                           *-----------*
    **** Replaces WorkflowT1T2.py ****
    """
    start_time, subject, master_config = args
    assert 'baseline' in master_config['components'] or 'longitudinal' in master_config['components'], "Baseline or Longitudinal is not in WORKFLOW_COMPONENTS!"
    import time

    from nipype import config, logging
    config.update_config(master_config)  # Set universal pipeline options
    assert config.get('execution', 'plugin') == master_config['execution']['plugin']
    # DEBUG
    # config.enable_debug_mode()
    # config.set('execution', 'stop_on_first_rerun', 'true')
    # END DEBUG
    logging.update_logging(config)

    import nipype.pipeline.engine as pe
    import nipype.interfaces.base as nbase
    import nipype.interfaces.io as nio
    from nipype.interfaces.utility import IdentityInterface, Function
    import traits

    from baw_exp import OpenSubjectDatabase
    from SessionDB import SessionDB
    from PipeLineFunctionHelpers import convertToList
    from atlasNode import MakeAtlasNode
    from utilities.misc import GenerateSubjectOutputPattern as outputPattern
    from utilities.misc import GenerateWFName
    from utils import run_workflow, print_workflow

    while time.time() < start_time:
        time.sleep(start_time - time.time() + 1)
        print "Delaying start for {subject}".format(subject=subject)
    print("===================== SUBJECT: {0} ===========================".format(subject))

    subjectWorkflow = pe.Workflow(name="BAW_StandardWorkup_subject_{0}".format(subject))
    subjectWorkflow.base_dir = config.get('logging', 'log_directory')
    # subjectWorkflow.config['execution']['plugin'] = 'Linear'  # Hardcodeded in WorkupT1T2.py - why?
    # DEBUG
    # subjectWorkflow.config['execution']['stop_on_first_rerun'] = 'true'
    # END DEBUG


    sessionWorkflow = dict()
    inputsSpec = dict()
    # To avoid a "sqlite3.ProgrammingError: Base Cursor.__init__ not called" error
    #    using multiprocessing.map_async(), instantiate database here
    database = OpenSubjectDatabase(master_config['cachedir'], [subject], master_config['prefix'], master_config['dbfile'])
    # print database.getAllSessions()
    database.open_connection()

    sessions = database.getSessionsFromSubject(subject)
    # print "These are the sessions: ", sessions
    if 'baseline' in master_config['components']:
        current_phase = 'baseline'
        atlasNode = MakeAtlasNode(master_config['atlascache'], 'BAtlas')
    elif 'longitudinal' in master_config['components']:
        current_phase = 'longitudinal'
        atlasNode = GetAtlasNode(master_config['previouscache'], 'BAtlas')
    from longitudinal import create_longitudinal as create_wkfl

    for session in sessions:  # TODO (future): Replace with iterable inputSpec node and add Function node for getAllFiles()
        project = database.getProjFromSession(session)
        pname = "{0}_{1}".format(session, current_phase)  # Long node names make graphs a pain to read/print
        # pname = GenerateWFName(project, subject, session, current_phase)
        print "Building session pipeline for {0}".format(session)
        inputsSpec[session] = pe.Node(name='inputspec_{0}'.format(session),
                                      interface=IdentityInterface(fields=['T1s', 'T2s', 'PDs', 'FLs', 'OTs']))
        inputsSpec[session].inputs.T1s = database.getFilenamesByScantype(session, ['T1-15', 'T1-30'])
        inputsSpec[session].inputs.T2s = database.getFilenamesByScantype(session, ['T2-15', 'T2-30'])
        inputsSpec[session].inputs.PDs = database.getFilenamesByScantype(session, ['PD-15', 'PD-30'])
        inputsSpec[session].inputs.FLs = database.getFilenamesByScantype(session, ['FL-15', 'FL-30'])
        inputsSpec[session].inputs.OTs = database.getFilenamesByScantype(session, ['OTHER-15', 'OTHER-30'])

        sessionWorkflow[session] = create_wkfl(project, subject, session, master_config,
                                               interpMode='Linear', pipeline_name=pname)

        subjectWorkflow.connect([(inputsSpec[session], sessionWorkflow[session], [('T1s', 'inputspec.T1s'),
                                                                                  ('T2s', 'inputspec.T2s'),
                                                                                  ('PDs', 'inputspec.PDs'),
                                                                                  ('FLs', 'inputspec.FLs'),
                                                                                  ('OTs', 'inputspec.OTHERs'),
                                                                                  ]),
                                 (atlasNode, sessionWorkflow[session], [('template_landmarks_50Lmks_fcsv',
                                                                         'inputspec.atlasLandmarkFilename'),
                                                                        ('template_weights_50Lmks_wts',
                                                                         'inputspec.atlasWeightFilename'),
                                                                        ('LLSModel_50Lmks_hdf5', 'inputspec.LLSModel'),
                                                                        ('T1_50Lmks_mdl', 'inputspec.inputTemplateModel')]),
                                ])
        if 'segmentation' in master_config['components']:
            from WorkupT1T2BRAINSCut import GenerateWFName
            try:
                bCutInputName = ".".join(['segmentation', GenerateWFName(project, subject, session, 'Segmentation'), 'inputspec'])
            except:
                print project, subject, session
                raise
            subjectWorkflow.connect([(atlasNode, sessionWorkflow[session],
                                      [('hncma-atlas', 'segmentation.inputspec.hncma-atlas'),
                                       ('template_t1', 'segmentation.inputspec.template_t1'),
                                       ('template_t1', bCutInputName + '.template_t1'),
                                       ('rho', bCutInputName + '.rho'),
                                       ('phi', bCutInputName + '.phi'),
                                       ('theta', bCutInputName + '.theta'),
                                       ('l_caudate_ProbabilityMap', bCutInputName + '.l_caudate_ProbabilityMap'),
                                       ('r_caudate_ProbabilityMap', bCutInputName + '.r_caudate_ProbabilityMap'),
                                       ('l_hippocampus_ProbabilityMap', bCutInputName + '.l_hippocampus_ProbabilityMap'),
                                       ('r_hippocampus_ProbabilityMap', bCutInputName + '.r_hippocampus_ProbabilityMap'),
                                       ('l_putamen_ProbabilityMap', bCutInputName + '.l_putamen_ProbabilityMap'),
                                       ('r_putamen_ProbabilityMap', bCutInputName + '.r_putamen_ProbabilityMap'),
                                       ('l_thalamus_ProbabilityMap', bCutInputName + '.l_thalamus_ProbabilityMap'),
                                       ('r_thalamus_ProbabilityMap', bCutInputName + '.r_thalamus_ProbabilityMap'),
                                       ('l_accumben_ProbabilityMap', bCutInputName + '.l_accumben_ProbabilityMap'),
                                       ('r_accumben_ProbabilityMap', bCutInputName + '.r_accumben_ProbabilityMap'),
                                       ('l_globus_ProbabilityMap', bCutInputName + '.l_globus_ProbabilityMap'),
                                       ('r_globus_ProbabilityMap', bCutInputName + '.r_globus_ProbabilityMap'),
                                       ('trainModelFile_txtD0060NT0060_gz',
                                        bCutInputName + '.trainModelFile_txtD0060NT0060_gz')])])
        if current_phase == 'baseline':
            subjectWorkflow.connect([(atlasNode, sessionWorkflow[session], [('template_t1', 'inputspec.template_t1'),
                                                                            ('ExtendedAtlasDefinition_xml',
                                                                             'inputspec.atlasDefinition')]),
                                 ])
        else:
            template_DG = pe.Node(interface=nio.DataGrabber(infields=['subject'],
                                  outfields=['template_t1', 'outAtlasFullPath']),
                                  name='Template_DG')
            template_DG.inputs.base_directory = master_config['previousresult']
            template_DG.inputs.subject = subject
            template_DG.inputs.template = 'SUBJECT_TEMPLATES/%s/AVG_%s.nii.gz'
            template_DG.inputs.template_args['template_t1'] = [['subject', 'T1']]
            template_DG.inputs.field_template = {'outAtlasFullPath': 'Atlas/definitions/AtlasDefinition_%s.xml'}
            template_DG.inputs.template_args['outAtlasFullPath'] = [['subject']]
            template_DG.inputs.sort_filelist = True
            template_DG.inputs.raise_on_empty = True

            baw201.connect([(template_DG, sessionWorkflow[session], [('outAtlasFullPath', 'inputspec.atlasDefinition'),
                                                                     ('template_t1', 'inputspec.template_t1')]),
                           ])
            assert current_phase == 'longitudinal', "Phase value is unknown: {0}".format(current_phase)
        if not True:
            return print_workflow(subjectWorkflow,
                                  plugin=master_config['execution']['plugin'], dotfilename='subjectWorkflow') #, graph2use='flat')
    return run_workflow(subjectWorkflow, plugin=master_config['execution']['plugin'], plugin_args=master_config['plugin_args'])