Exemplo n.º 1
0
def main(environment, experiment, pipeline, cluster, **kwargs):
    """
    This function...

    :param environment:
    :param experiment:
    :param pipeline:
    :param cluster:
    :param kwargs:
    :return:
    """
    from utilities.configFileParser import nipype_options

    print("Copying Atlas directory and determining appropriate Nipype options...")
    pipeline = nipype_options(
        kwargs, pipeline, cluster, experiment, environment
    )  # Generate Nipype options
    print("Getting session(s) from database...")

    write_cvsubset_file(
        environment,
        experiment,
        pipeline,
        cluster,
        csv_file=kwargs["--sampleFile"],
        test_size=kwargs["--testSamplesize"],
        hasHeader=kwargs["--header"],
    )
Exemplo n.º 2
0
def _main(environment, experiment, pipeline, cluster, **kwds):
    from utilities.configFileParser import nipype_options
    from utilities.misc import add_dict

    print "Copying Atlas directory and determining appropriate Nipype options..."
    pipeline = nipype_options(kwds, pipeline, cluster, experiment, environment)  # Generate Nipype options
    print "Getting session(s) from database..."
    createAndRun(kwds['SESSIONS'], environment, experiment, pipeline, cluster)
    return 0
Exemplo n.º 3
0
def _SingleSession_main(environment, experiment, pipeline, cluster, **kwds):
    from utilities.configFileParser import nipype_options

    print("Copying Atlas directory and determining appropriate Nipype options...")
    pipeline = nipype_options(kwds, pipeline, cluster, experiment, environment)  # Generate Nipype options
    print("Getting session(s) from database...")
    createAndRun(kwds['SESSIONS'], environment, experiment, pipeline, cluster, useSentinal=kwds['--use-sentinal'],
                 dryRun=kwds['--dry-run'])
    return 0
Exemplo n.º 4
0
def _SingleSession_main(environment, experiment, pipeline, cluster, **kwds):
    from utilities.configFileParser import nipype_options


    print "Copying Atlas directory and determining appropriate Nipype options..."
    pipeline = nipype_options(kwds, pipeline, cluster, experiment, environment)  # Generate Nipype options
    print "Getting session(s) from database..."
    createAndRun(kwds['SESSIONS'], environment, experiment, pipeline, cluster, useSentinal=kwds['--use-sentinal'],
                 dryRun=kwds['--dry-run'])
    return 0
Exemplo n.º 5
0
def main(environment, experiment, pipeline, cluster, **kwargs):
    from utilities.configFileParser import nipype_options

    print "Copying Atlas directory and determining appropriate Nipype options..."
    pipeline = nipype_options(kwargs, pipeline, cluster, experiment, environment)  # Generate Nipype options
    print "Getting session(s) from database..."

    writeCVSubsetFile( environment,
                       experiment,
                       pipeline,
                       cluster,
                       csv_file=kwargs['--sampleFile'],
                       test_size=kwargs['--testSamplesize'],
                       hasHeader=kwargs['--header'] )
Exemplo n.º 6
0
def main(environment, experiment, pipeline, cluster, **kwargs):
    from utilities.configFileParser import nipype_options

    print("Copying Atlas directory and determining appropriate Nipype options...")
    pipeline = nipype_options(kwargs, pipeline, cluster, experiment, environment)  # Generate Nipype options
    print("Getting session(s) from database...")

    writeCVSubsetFile(environment,
                      experiment,
                      pipeline,
                      cluster,
                      csv_file=kwargs['--sampleFile'],
                      test_size=kwargs['--testSamplesize'],
                      hasHeader=kwargs['--header'])
Exemplo n.º 7
0
def run(argv, environment, experiment, pipeline, cluster):
    from utilities.configFileParser import nipype_options
    from utilities.misc import add_dict
    from utilities.distributed import create_global_sge_script
    print "Getting subjects from database..."
    subjects = get_subjects(argv, experiment['cachedir'], environment['prefix'], experiment['dbfile']) # Build database before parallel section
    if environment['cluster']:
        print "Creating SGE template string..."
        node_template = create_global_sge_script(cluster, environment)
    else:
        node_template = None
    print "Copying Atlas directory and determining appropriate Nipype options..."
    pipeline = nipype_options(argv, pipeline, cluster, node_template, experiment)  # Generate Nipype options
    master_config = {}
    for configDict in [environment, experiment, pipeline, cluster]:
        master_config = add_dict(master_config, configDict)
    print "Dispatching jobs to the system..."
    return dispatcher(master_config, subjects)
Exemplo n.º 8
0
def run(argv, environment, experiment, pipeline, cluster):
    from utilities.configFileParser import nipype_options
    from utilities.misc import add_dict
    from utilities.distributed import create_global_sge_script
    print "Getting subjects from database..."
    subjects = get_subjects(
        argv, experiment['cachedir'], environment['prefix'],
        experiment['dbfile'])  # Build database before parallel section
    if environment['cluster']:
        print "Creating SGE template string..."
        node_template = create_global_sge_script(cluster, environment)
    else:
        node_template = None
    print "Copying Atlas directory and determining appropriate Nipype options..."
    pipeline = nipype_options(argv, pipeline, cluster, node_template,
                              experiment)  # Generate Nipype options
    master_config = {}
    for configDict in [environment, experiment, pipeline, cluster]:
        master_config = add_dict(master_config, configDict)
    print "Dispatching jobs to the system..."
    return dispatcher(master_config, subjects)
Exemplo n.º 9
0
def _template_runner(argv, environment, experiment, pipeline_options, cluster):
    print("Getting subjects from database...")
    # subjects = argv["--subjects"].split(',')
    subjects, subjects_sessions_dictionary = get_subjects_sessions_dictionary(argv['SUBJECTS'],
            experiment['cachedir'],
            experiment['resultdir'],
            environment['prefix'],
            experiment['dbfile'],
            argv['--use-sentinal'], argv['--use-shuffle']
            ) # Build database before parallel section
    useSentinal = argv['--use-sentinal']

    # Quick preliminary sanity check
    for thisSubject in subjects:
        if len(subjects_sessions_dictionary[thisSubject]) == 0:
            print("ERROR: subject {0} has no sessions found.  Did you supply a valid subject id on the command line?".format(thisSubject) )
            sys.exit(-1)

    for thisSubject in subjects:
        print("Processing atlas generation for this subject: {0}".format(thisSubject))
        print("="*80)
        print("Copying Atlas directory and determining appropriate Nipype options...")
        subj_pipeline_options = nipype_options(argv, pipeline_options, cluster, experiment, environment)  # Generate Nipype options
        print("Dispatching jobs to the system...")
        ######
        ###### Now start workflow construction
        ######
        # Set universal pipeline options
        nipype_config.update_config(subj_pipeline_options)

        ready_for_template_building = True
        for thisSession in subjects_sessions_dictionary[thisSubject]:
            path_test = os.path.join(experiment['previousresult'],'*/{0}/{1}/TissueClassify/t1_average_BRAINSABC.nii.gz'.format(thisSubject,thisSession))
            t1_file_result = glob.glob(path_test)
            if len(t1_file_result) != 1:
                print("Incorrect number of t1 images found for data grabber {0}".format(t1_file_result))
                print("     at path {0}".format(path_test))
                ready_for_template_building = False
        if not ready_for_template_building:
            print("TEMPORARY SKIPPING:  Not ready to process {0}".format(thisSubject))
            continue

        base_output_directory = os.path.join(subj_pipeline_options['logging']['log_directory'],thisSubject)
        template = pe.Workflow(name='SubjectAtlas_Template_'+thisSubject)
        template.base_dir = base_output_directory

        subjectNode = pe.Node(interface=IdentityInterface(fields=['subject']), run_without_submitting=True, name='99_subjectIterator')
        subjectNode.inputs.subject = thisSubject

        sessionsExtractorNode = pe.Node(Function(function=getSessionsFromSubjectDictionary,
                                                          input_names=['subject_session_dictionary','subject'],
                                                          output_names=['sessions']),
                                       run_without_submitting=True, name="99_sessionsExtractor")
        sessionsExtractorNode.inputs.subject_session_dictionary = subjects_sessions_dictionary



        baselineOptionalDG = pe.MapNode(nio.DataGrabber(infields=['subject','session'],
                                                        outfields=[ 't2_average', 'pd_average',
                                                                   'fl_average'],
                                                       run_without_submitting=True
                                                       ),
                                        run_without_submitting=True,
                                        iterfield=['session'], name='BaselineOptional_DG')

        baselineOptionalDG.inputs.base_directory = experiment['previousresult']
        baselineOptionalDG.inputs.sort_filelist = True
        baselineOptionalDG.inputs.raise_on_empty = False
        baselineOptionalDG.inputs.template = '*'

        baselineOptionalDG.inputs.field_template = {
                                            't2_average':'*/%s/%s/TissueClassify/t2_average_BRAINSABC.nii.gz',
                                            'pd_average':'*/%s/%s/TissueClassify/pd_average_BRAINSABC.nii.gz',
                                            'fl_average':'*/%s/%s/TissueClassify/fl_average_BRAINSABC.nii.gz'
                                       }
        baselineOptionalDG.inputs.template_args  = {
                                            't2_average':[['subject','session']],
                                            'pd_average':[['subject','session']],
                                            'fl_average':[['subject','session']]
                                       }



        baselineRequiredDG = pe.MapNode(nio.DataGrabber(infields=['subject','session'],
                                                outfields=['t1_average', 'brainMaskLabels',
                                                           'posteriorImages','passive_intensities','passive_masks',
                                                           'BCD_ACPC_Landmarks_fcsv'],
                                run_without_submitting=True
                                ),
                                run_without_submitting=True,
                                iterfield=['session'], name='Baseline_DG')

        baselineRequiredDG.inputs.base_directory = experiment['previousresult']
        baselineRequiredDG.inputs.sort_filelist = True
        baselineRequiredDG.inputs.raise_on_empty = True
        baselineRequiredDG.inputs.template = '*'
        posterior_files = ['AIR', 'BASAL', 'CRBLGM', 'CRBLWM', 'CSF', 'GLOBUS', 'HIPPOCAMPUS',
                           'NOTCSF', 'NOTGM', 'NOTVB', 'NOTWM', 'SURFGM', 'THALAMUS', 'VB', 'WM']
        passive_intensities_files = [
            'rho.nii.gz',
            'phi.nii.gz',
            'theta.nii.gz',
            'l_thalamus_ProbabilityMap.nii.gz',
            'r_accumben_ProbabilityMap.nii.gz',
            'l_globus_ProbabilityMap.nii.gz',
            'l_accumben_ProbabilityMap.nii.gz',
            'l_caudate_ProbabilityMap.nii.gz',
            'l_putamen_ProbabilityMap.nii.gz',
            'r_thalamus_ProbabilityMap.nii.gz',
            'r_putamen_ProbabilityMap.nii.gz',
            'r_caudate_ProbabilityMap.nii.gz',
            'r_hippocampus_ProbabilityMap.nii.gz',
            'r_globus_ProbabilityMap.nii.gz',
            'l_hippocampus_ProbabilityMap.nii.gz'
            ]
        passive_mask_files = [
            'template_WMPM2_labels.nii.gz',
            'hncma_atlas.nii.gz',
            'template_nac_labels.nii.gz',
            'template_leftHemisphere.nii.gz',
            'template_rightHemisphere.nii.gz',
            'template_ventricles.nii.gz',
            'template_headregion.nii.gz'
            ]

        baselineRequiredDG.inputs.field_template = {'t1_average':'*/%s/%s/TissueClassify/t1_average_BRAINSABC.nii.gz',
                                       'brainMaskLabels':'*/%s/%s/TissueClassify/complete_brainlabels_seg.nii.gz',
                               'BCD_ACPC_Landmarks_fcsv':'*/%s/%s/ACPCAlign/BCD_ACPC_Landmarks.fcsv',
                                       'posteriorImages':'*/%s/%s/TissueClassify/POSTERIOR_%s.nii.gz',
                                   'passive_intensities':'*/%s/%s/WarpedAtlas2Subject/%s',
                                         'passive_masks':'*/%s/%s/WarpedAtlas2Subject/%s',
                                       }
        baselineRequiredDG.inputs.template_args  = {'t1_average':[['subject','session']],
                                       'brainMaskLabels':[['subject','session']],
                               'BCD_ACPC_Landmarks_fcsv':[['subject','session']],
                                       'posteriorImages':[['subject','session', posterior_files]],
                                   'passive_intensities':[['subject','session', passive_intensities_files]],
                                         'passive_masks':[['subject','session', passive_mask_files]]
                                       }

        MergeByExtendListElementsNode = pe.Node(Function(function=MergeByExtendListElements,
                                                         input_names=['t1s', 't2s',
                                                                      'pds', 'fls',
                                                                      'labels', 'posteriors',
                                                                      'passive_intensities', 'passive_masks'
                                                                      ],
                                                         output_names=['ListOfImagesDictionaries', 'registrationImageTypes',
                                                                       'interpolationMapping']),
                                                run_without_submitting=True, name="99_MergeByExtendListElements")

        template.connect([(subjectNode, baselineRequiredDG, [('subject', 'subject')]),
                          (subjectNode, baselineOptionalDG, [('subject', 'subject')]),
                          (subjectNode, sessionsExtractorNode, [('subject','subject')]),
                          (sessionsExtractorNode, baselineRequiredDG, [('sessions', 'session')]),
                          (sessionsExtractorNode, baselineOptionalDG, [('sessions', 'session')]),
                          (baselineRequiredDG, MergeByExtendListElementsNode,
                                    [('t1_average', 't1s'),
                                     ('brainMaskLabels', 'labels'),
                                     (('posteriorImages',
                                        ConvertSessionsListOfPosteriorListToDictionaryOfSessionLists), 'posteriors')
                                     ]),
                          (baselineOptionalDG, MergeByExtendListElementsNode,
                                    [
                                     ('t2_average', 't2s'),
                                     ('pd_average', 'pds'),
                                     ('fl_average', 'fls')
                                     ]),
                          (baselineRequiredDG, MergeByExtendListElementsNode,
                                     [
                                      (('passive_intensities',
                                        ConvertSessionsListOfPosteriorListToDictionaryOfSessionLists), 'passive_intensities')
                                     ]),
                          (baselineRequiredDG, MergeByExtendListElementsNode,
                                     [
                                     (('passive_masks',
                                        ConvertSessionsListOfPosteriorListToDictionaryOfSessionLists), 'passive_masks')
                                     ])
                        ])

        myInitAvgWF = pe.Node(interface=ants.AverageImages(), name='Atlas_antsSimpleAverage')  # was 'Phase1_antsSimpleAverage'
        myInitAvgWF.inputs.dimension = 3
        myInitAvgWF.inputs.normalize = True
        myInitAvgWF.inputs.num_threads = -1
        template.connect(baselineRequiredDG, 't1_average', myInitAvgWF, "images")
        ####################################################################################################
        # TEMPLATE_BUILD_RUN_MODE = 'MULTI_IMAGE'
        # if numSessions == 1:
        #     TEMPLATE_BUILD_RUN_MODE = 'SINGLE_IMAGE'
        ####################################################################################################
        CLUSTER_QUEUE=cluster['queue']
        CLUSTER_QUEUE_LONG=cluster['long_q']
        buildTemplateIteration1 = BAWantsRegistrationTemplateBuildSingleIterationWF('iteration01',CLUSTER_QUEUE,CLUSTER_QUEUE_LONG)
        # buildTemplateIteration2 = buildTemplateIteration1.clone(name='buildTemplateIteration2')
        buildTemplateIteration2 = BAWantsRegistrationTemplateBuildSingleIterationWF('Iteration02',CLUSTER_QUEUE,CLUSTER_QUEUE_LONG)

        CreateAtlasXMLAndCleanedDeformedAveragesNode = pe.Node(interface=Function(function=CreateAtlasXMLAndCleanedDeformedAverages,
                                                              input_names=['t1_image', 'deformed_list', 'AtlasTemplate', 'outDefinition'],
                                                              output_names=['outAtlasFullPath', 'clean_deformed_list']),
                                           # This is a lot of work, so submit it run_without_submitting=True,
                                           run_without_submitting=True,  # HACK:  THIS NODE REALLY SHOULD RUN ON THE CLUSTER!
                                           name='99_CreateAtlasXMLAndCleanedDeformedAverages')

        if subj_pipeline_options['plugin_name'].startswith('SGE'):  # for some nodes, the qsub call needs to be modified on the cluster

            CreateAtlasXMLAndCleanedDeformedAveragesNode.plugin_args = {'template': subj_pipeline_options['plugin_args']['template'],
                                                    'qsub_args': modify_qsub_args(cluster['queue'], 1, 1, 1),
                                                    'overwrite': True}
            for bt in [buildTemplateIteration1, buildTemplateIteration2]:
                BeginANTS = bt.get_node("BeginANTS")
                BeginANTS.plugin_args = {'template': subj_pipeline_options['plugin_args']['template'], 'overwrite': True,
                                         'qsub_args': modify_qsub_args(cluster['queue'], 7, 4, 16)}
                wimtdeformed = bt.get_node("wimtdeformed")
                wimtdeformed.plugin_args = {'template': subj_pipeline_options['plugin_args']['template'], 'overwrite': True,
                                            'qsub_args': modify_qsub_args(cluster['queue'], 2, 2, 2)}

                #AvgAffineTransform = bt.get_node("AvgAffineTransform")
                #AvgAffineTransform.plugin_args = {'template': subj_pipeline_options['plugin_args']['template'], 'overwrite': True,
                #                                  'qsub_args': modify_qsub_args(cluster['queue'], 2, 1, 1)}

                wimtPassivedeformed = bt.get_node("wimtPassivedeformed")
                wimtPassivedeformed.plugin_args = {'template': subj_pipeline_options['plugin_args']['template'], 'overwrite': True,
                                                    'qsub_args': modify_qsub_args(cluster['queue'], 2, 2, 4)}

        # Running off previous baseline experiment
        NACCommonAtlas = MakeAtlasNode(experiment['atlascache'], 'NACCommonAtlas_{0}'.format('subject'),
                ['S_BRAINSABCSupport'] ) ## HACK : replace 'subject' with subject id once this is a loop rather than an iterable.
        template.connect([(myInitAvgWF, buildTemplateIteration1, [('output_average_image', 'inputspec.fixed_image')]),
                          (MergeByExtendListElementsNode, buildTemplateIteration1, [('ListOfImagesDictionaries', 'inputspec.ListOfImagesDictionaries'),
                                                                                    ('registrationImageTypes', 'inputspec.registrationImageTypes'),
                                                                                    ('interpolationMapping','inputspec.interpolationMapping')]),
                          (buildTemplateIteration1, buildTemplateIteration2, [('outputspec.template', 'inputspec.fixed_image')]),
                          (MergeByExtendListElementsNode, buildTemplateIteration2, [('ListOfImagesDictionaries', 'inputspec.ListOfImagesDictionaries'),
                                                                                    ('registrationImageTypes','inputspec.registrationImageTypes'),
                                                                                    ('interpolationMapping', 'inputspec.interpolationMapping')]),
                          (subjectNode, CreateAtlasXMLAndCleanedDeformedAveragesNode, [(('subject', xml_filename), 'outDefinition')]),
                          (NACCommonAtlas, CreateAtlasXMLAndCleanedDeformedAveragesNode, [('ExtendedAtlasDefinition_xml_in', 'AtlasTemplate')]),
                          (buildTemplateIteration2, CreateAtlasXMLAndCleanedDeformedAveragesNode, [('outputspec.template', 't1_image'),
                                                                               ('outputspec.passive_deformed_templates', 'deformed_list')]),
                          ])


        ## Genearate an average lmks file.
        myAverageLmk = pe.Node(interface = GenerateAverageLmkFile(), name="myAverageLmk" )
        myAverageLmk.inputs.outputLandmarkFile = "AVG_LMKS.fcsv"
        template.connect(baselineRequiredDG,'BCD_ACPC_Landmarks_fcsv',myAverageLmk,'inputLandmarkFiles')

        # Create DataSinks
        SubjectAtlas_DataSink = pe.Node(nio.DataSink(), name="Subject_DS")
        SubjectAtlas_DataSink.overwrite = subj_pipeline_options['ds_overwrite']
        SubjectAtlas_DataSink.inputs.base_directory = experiment['resultdir']

        template.connect([(subjectNode, SubjectAtlas_DataSink, [('subject', 'container')]),
                          (CreateAtlasXMLAndCleanedDeformedAveragesNode, SubjectAtlas_DataSink, [('outAtlasFullPath', 'Atlas.@definitions')]),
                          (CreateAtlasXMLAndCleanedDeformedAveragesNode, SubjectAtlas_DataSink, [('clean_deformed_list', 'Atlas.@passive_deformed_templates')]),

                          (subjectNode, SubjectAtlas_DataSink, [(('subject', outputPattern), 'regexp_substitutions')]),
                          (buildTemplateIteration2, SubjectAtlas_DataSink, [('outputspec.template', 'Atlas.@template')]),
                          (myAverageLmk,SubjectAtlas_DataSink,[('outputLandmarkFile','Atlas.@outputLandmarkFile')]),
                         ])

        dotfilename = argv['--dotfilename']
        if dotfilename is not None:
            print("WARNING: Printing workflow, but not running pipeline")
            print_workflow(template, plugin=subj_pipeline_options['plugin_name'], dotfilename=dotfilename)
        else:
            run_workflow(template, plugin=subj_pipeline_options['plugin_name'], plugin_args=subj_pipeline_options['plugin_args'])
Exemplo n.º 10
0
def _template_runner(argv, environment, experiment, pipeline_options, cluster):
    print("Getting subjects from database...")
    # subjects = argv["--subjects"].split(',')
    subjects, subjects_sessions_dictionary = get_subjects_sessions_dictionary(
        argv['SUBJECTS'], experiment['cachedir'], experiment['resultdir'],
        environment['prefix'], experiment['dbfile'], argv['--use-sentinal'],
        argv['--use-shuffle'])  # Build database before parallel section
    useSentinal = argv['--use-sentinal']

    # Quick preliminary sanity check
    for thisSubject in subjects:
        if len(subjects_sessions_dictionary[thisSubject]) == 0:
            print(
                "ERROR: subject {0} has no sessions found.  Did you supply a valid subject id on the command line?"
                .format(thisSubject))
            sys.exit(-1)

    for thisSubject in subjects:
        print("Processing atlas generation for this subject: {0}".format(
            thisSubject))
        print("=" * 80)
        print(
            "Copying Atlas directory and determining appropriate Nipype options..."
        )
        subj_pipeline_options = nipype_options(
            argv, pipeline_options, cluster, experiment,
            environment)  # Generate Nipype options
        print("Dispatching jobs to the system...")
        ######
        ###### Now start workflow construction
        ######
        # Set universal pipeline options
        nipype_config.update_config(subj_pipeline_options)

        ready_for_template_building = True
        for thisSession in subjects_sessions_dictionary[thisSubject]:
            path_test = os.path.join(
                experiment['previousresult'],
                '*/{0}/{1}/TissueClassify/t1_average_BRAINSABC.nii.gz'.format(
                    thisSubject, thisSession))
            t1_file_result = glob.glob(path_test)
            if len(t1_file_result) != 1:
                print(
                    "Incorrect number of t1 images found for data grabber {0}".
                    format(t1_file_result))
                print("     at path {0}".format(path_test))
                ready_for_template_building = False
        if not ready_for_template_building:
            print("TEMPORARY SKIPPING:  Not ready to process {0}".format(
                thisSubject))
            continue

        base_output_directory = os.path.join(
            subj_pipeline_options['logging']['log_directory'], thisSubject)
        template = pe.Workflow(name='SubjectAtlas_Template_' + thisSubject)
        template.base_dir = base_output_directory

        subjectNode = pe.Node(interface=IdentityInterface(fields=['subject']),
                              run_without_submitting=True,
                              name='99_subjectIterator')
        subjectNode.inputs.subject = thisSubject

        sessionsExtractorNode = pe.Node(Function(
            function=getSessionsFromSubjectDictionary,
            input_names=['subject_session_dictionary', 'subject'],
            output_names=['sessions']),
                                        run_without_submitting=True,
                                        name="99_sessionsExtractor")
        sessionsExtractorNode.inputs.subject_session_dictionary = subjects_sessions_dictionary

        baselineOptionalDG = pe.MapNode(nio.DataGrabber(
            infields=['subject', 'session'],
            outfields=['t2_average', 'pd_average', 'fl_average'],
            run_without_submitting=True),
                                        run_without_submitting=True,
                                        iterfield=['session'],
                                        name='BaselineOptional_DG')

        baselineOptionalDG.inputs.base_directory = experiment['previousresult']
        baselineOptionalDG.inputs.sort_filelist = True
        baselineOptionalDG.inputs.raise_on_empty = False
        baselineOptionalDG.inputs.template = '*'

        baselineOptionalDG.inputs.field_template = {
            't2_average': '*/%s/%s/TissueClassify/t2_average_BRAINSABC.nii.gz',
            'pd_average': '*/%s/%s/TissueClassify/pd_average_BRAINSABC.nii.gz',
            'fl_average': '*/%s/%s/TissueClassify/fl_average_BRAINSABC.nii.gz'
        }
        baselineOptionalDG.inputs.template_args = {
            't2_average': [['subject', 'session']],
            'pd_average': [['subject', 'session']],
            'fl_average': [['subject', 'session']]
        }

        baselineRequiredDG = pe.MapNode(nio.DataGrabber(
            infields=['subject', 'session'],
            outfields=[
                't1_average', 'brainMaskLabels', 'posteriorImages',
                'passive_intensities', 'passive_masks',
                'BCD_ACPC_Landmarks_fcsv'
            ],
            run_without_submitting=True),
                                        run_without_submitting=True,
                                        iterfield=['session'],
                                        name='Baseline_DG')

        baselineRequiredDG.inputs.base_directory = experiment['previousresult']
        baselineRequiredDG.inputs.sort_filelist = True
        baselineRequiredDG.inputs.raise_on_empty = True
        baselineRequiredDG.inputs.template = '*'
        posterior_files = [
            'AIR', 'BASAL', 'CRBLGM', 'CRBLWM', 'CSF', 'GLOBUS', 'HIPPOCAMPUS',
            'NOTCSF', 'NOTGM', 'NOTVB', 'NOTWM', 'SURFGM', 'THALAMUS', 'VB',
            'WM'
        ]
        passive_intensities_files = [
            'rho.nii.gz', 'phi.nii.gz', 'theta.nii.gz',
            'l_thalamus_ProbabilityMap.nii.gz',
            'r_accumben_ProbabilityMap.nii.gz',
            'l_globus_ProbabilityMap.nii.gz',
            'l_accumben_ProbabilityMap.nii.gz',
            'l_caudate_ProbabilityMap.nii.gz',
            'l_putamen_ProbabilityMap.nii.gz',
            'r_thalamus_ProbabilityMap.nii.gz',
            'r_putamen_ProbabilityMap.nii.gz',
            'r_caudate_ProbabilityMap.nii.gz',
            'r_hippocampus_ProbabilityMap.nii.gz',
            'r_globus_ProbabilityMap.nii.gz',
            'l_hippocampus_ProbabilityMap.nii.gz'
        ]
        passive_mask_files = [
            'template_WMPM2_labels.nii.gz', 'hncma_atlas.nii.gz',
            'template_nac_labels.nii.gz', 'template_leftHemisphere.nii.gz',
            'template_rightHemisphere.nii.gz', 'template_ventricles.nii.gz',
            'template_headregion.nii.gz'
        ]

        baselineRequiredDG.inputs.field_template = {
            't1_average': '*/%s/%s/TissueClassify/t1_average_BRAINSABC.nii.gz',
            'brainMaskLabels':
            '*/%s/%s/TissueClassify/complete_brainlabels_seg.nii.gz',
            'BCD_ACPC_Landmarks_fcsv':
            '*/%s/%s/ACPCAlign/BCD_ACPC_Landmarks.fcsv',
            'posteriorImages': '*/%s/%s/TissueClassify/POSTERIOR_%s.nii.gz',
            'passive_intensities': '*/%s/%s/WarpedAtlas2Subject/%s',
            'passive_masks': '*/%s/%s/WarpedAtlas2Subject/%s',
        }
        baselineRequiredDG.inputs.template_args = {
            't1_average': [['subject', 'session']],
            'brainMaskLabels': [['subject', 'session']],
            'BCD_ACPC_Landmarks_fcsv': [['subject', 'session']],
            'posteriorImages': [['subject', 'session', posterior_files]],
            'passive_intensities':
            [['subject', 'session', passive_intensities_files]],
            'passive_masks': [['subject', 'session', passive_mask_files]]
        }

        MergeByExtendListElementsNode = pe.Node(
            Function(function=MergeByExtendListElements,
                     input_names=[
                         't1s', 't2s', 'pds', 'fls', 'labels', 'posteriors',
                         'passive_intensities', 'passive_masks'
                     ],
                     output_names=[
                         'ListOfImagesDictionaries', 'registrationImageTypes',
                         'interpolationMapping'
                     ]),
            run_without_submitting=True,
            name="99_MergeByExtendListElements")

        template.connect([
            (subjectNode, baselineRequiredDG, [('subject', 'subject')]),
            (subjectNode, baselineOptionalDG, [('subject', 'subject')]),
            (subjectNode, sessionsExtractorNode, [('subject', 'subject')]),
            (sessionsExtractorNode, baselineRequiredDG, [('sessions',
                                                          'session')]),
            (sessionsExtractorNode, baselineOptionalDG, [('sessions',
                                                          'session')]),
            (baselineRequiredDG, MergeByExtendListElementsNode,
             [('t1_average', 't1s'), ('brainMaskLabels', 'labels'),
              (('posteriorImages',
                ConvertSessionsListOfPosteriorListToDictionaryOfSessionLists),
               'posteriors')]),
            (baselineOptionalDG, MergeByExtendListElementsNode,
             [('t2_average', 't2s'), ('pd_average', 'pds'),
              ('fl_average', 'fls')]),
            (baselineRequiredDG, MergeByExtendListElementsNode,
             [(('passive_intensities',
                ConvertSessionsListOfPosteriorListToDictionaryOfSessionLists),
               'passive_intensities')]),
            (baselineRequiredDG, MergeByExtendListElementsNode,
             [(('passive_masks',
                ConvertSessionsListOfPosteriorListToDictionaryOfSessionLists),
               'passive_masks')])
        ])

        myInitAvgWF = pe.Node(
            interface=ants.AverageImages(),
            name='Atlas_antsSimpleAverage')  # was 'Phase1_antsSimpleAverage'
        myInitAvgWF.inputs.dimension = 3
        myInitAvgWF.inputs.normalize = True
        myInitAvgWF.inputs.num_threads = -1
        template.connect(baselineRequiredDG, 't1_average', myInitAvgWF,
                         "images")
        ####################################################################################################
        # TEMPLATE_BUILD_RUN_MODE = 'MULTI_IMAGE'
        # if numSessions == 1:
        #     TEMPLATE_BUILD_RUN_MODE = 'SINGLE_IMAGE'
        ####################################################################################################
        CLUSTER_QUEUE = cluster['queue']
        CLUSTER_QUEUE_LONG = cluster['long_q']
        buildTemplateIteration1 = BAWantsRegistrationTemplateBuildSingleIterationWF(
            'iteration01', CLUSTER_QUEUE, CLUSTER_QUEUE_LONG)
        # buildTemplateIteration2 = buildTemplateIteration1.clone(name='buildTemplateIteration2')
        buildTemplateIteration2 = BAWantsRegistrationTemplateBuildSingleIterationWF(
            'Iteration02', CLUSTER_QUEUE, CLUSTER_QUEUE_LONG)

        CreateAtlasXMLAndCleanedDeformedAveragesNode = pe.Node(
            interface=Function(
                function=CreateAtlasXMLAndCleanedDeformedAverages,
                input_names=[
                    't1_image', 'deformed_list', 'AtlasTemplate',
                    'outDefinition'
                ],
                output_names=['outAtlasFullPath', 'clean_deformed_list']),
            # This is a lot of work, so submit it run_without_submitting=True,
            run_without_submitting=
            True,  # HACK:  THIS NODE REALLY SHOULD RUN ON THE CLUSTER!
            name='99_CreateAtlasXMLAndCleanedDeformedAverages')

        if subj_pipeline_options['plugin_name'].startswith(
                'SGE'
        ):  # for some nodes, the qsub call needs to be modified on the cluster

            CreateAtlasXMLAndCleanedDeformedAveragesNode.plugin_args = {
                'template': subj_pipeline_options['plugin_args']['template'],
                'qsub_args': modify_qsub_args(cluster['queue'], 1, 1, 1),
                'overwrite': True
            }
            for bt in [buildTemplateIteration1, buildTemplateIteration2]:
                BeginANTS = bt.get_node("BeginANTS")
                BeginANTS.plugin_args = {
                    'template':
                    subj_pipeline_options['plugin_args']['template'],
                    'overwrite': True,
                    'qsub_args': modify_qsub_args(cluster['queue'], 7, 4, 16)
                }
                wimtdeformed = bt.get_node("wimtdeformed")
                wimtdeformed.plugin_args = {
                    'template':
                    subj_pipeline_options['plugin_args']['template'],
                    'overwrite': True,
                    'qsub_args': modify_qsub_args(cluster['queue'], 2, 2, 2)
                }

                #AvgAffineTransform = bt.get_node("AvgAffineTransform")
                #AvgAffineTransform.plugin_args = {'template': subj_pipeline_options['plugin_args']['template'], 'overwrite': True,
                #                                  'qsub_args': modify_qsub_args(cluster['queue'], 2, 1, 1)}

                wimtPassivedeformed = bt.get_node("wimtPassivedeformed")
                wimtPassivedeformed.plugin_args = {
                    'template':
                    subj_pipeline_options['plugin_args']['template'],
                    'overwrite': True,
                    'qsub_args': modify_qsub_args(cluster['queue'], 2, 2, 4)
                }

        # Running off previous baseline experiment
        NACCommonAtlas = MakeAtlasNode(
            experiment['atlascache'], 'NACCommonAtlas_{0}'.format('subject'),
            ['S_BRAINSABCSupport']
        )  ## HACK : replace 'subject' with subject id once this is a loop rather than an iterable.
        template.connect([
            (myInitAvgWF, buildTemplateIteration1,
             [('output_average_image', 'inputspec.fixed_image')]),
            (MergeByExtendListElementsNode, buildTemplateIteration1,
             [('ListOfImagesDictionaries',
               'inputspec.ListOfImagesDictionaries'),
              ('registrationImageTypes', 'inputspec.registrationImageTypes'),
              ('interpolationMapping', 'inputspec.interpolationMapping')]),
            (buildTemplateIteration1, buildTemplateIteration2,
             [('outputspec.template', 'inputspec.fixed_image')]),
            (MergeByExtendListElementsNode, buildTemplateIteration2,
             [('ListOfImagesDictionaries',
               'inputspec.ListOfImagesDictionaries'),
              ('registrationImageTypes', 'inputspec.registrationImageTypes'),
              ('interpolationMapping', 'inputspec.interpolationMapping')]),
            (subjectNode, CreateAtlasXMLAndCleanedDeformedAveragesNode,
             [(('subject', xml_filename), 'outDefinition')]),
            (NACCommonAtlas, CreateAtlasXMLAndCleanedDeformedAveragesNode,
             [('ExtendedAtlasDefinition_xml_in', 'AtlasTemplate')]),
            (buildTemplateIteration2,
             CreateAtlasXMLAndCleanedDeformedAveragesNode, [
                 ('outputspec.template', 't1_image'),
                 ('outputspec.passive_deformed_templates', 'deformed_list')
             ]),
        ])

        ## Genearate an average lmks file.
        myAverageLmk = pe.Node(interface=GenerateAverageLmkFile(),
                               name="myAverageLmk")
        myAverageLmk.inputs.outputLandmarkFile = "AVG_LMKS.fcsv"
        template.connect(baselineRequiredDG, 'BCD_ACPC_Landmarks_fcsv',
                         myAverageLmk, 'inputLandmarkFiles')

        # Create DataSinks
        SubjectAtlas_DataSink = pe.Node(nio.DataSink(), name="Subject_DS")
        SubjectAtlas_DataSink.overwrite = subj_pipeline_options['ds_overwrite']
        SubjectAtlas_DataSink.inputs.base_directory = experiment['resultdir']

        template.connect([
            (subjectNode, SubjectAtlas_DataSink, [('subject', 'container')]),
            (CreateAtlasXMLAndCleanedDeformedAveragesNode,
             SubjectAtlas_DataSink, [('outAtlasFullPath', 'Atlas.@definitions')
                                     ]),
            (CreateAtlasXMLAndCleanedDeformedAveragesNode,
             SubjectAtlas_DataSink, [('clean_deformed_list',
                                      'Atlas.@passive_deformed_templates')]),
            (subjectNode, SubjectAtlas_DataSink, [(('subject', outputPattern),
                                                   'regexp_substitutions')]),
            (buildTemplateIteration2, SubjectAtlas_DataSink,
             [('outputspec.template', 'Atlas.@template')]),
            (myAverageLmk, SubjectAtlas_DataSink,
             [('outputLandmarkFile', 'Atlas.@outputLandmarkFile')]),
        ])

        dotfilename = argv['--dotfilename']
        if dotfilename is not None:
            print("WARNING: Printing workflow, but not running pipeline")
            print_workflow(template,
                           plugin=subj_pipeline_options['plugin_name'],
                           dotfilename=dotfilename)
        else:
            run_workflow(template,
                         plugin=subj_pipeline_options['plugin_name'],
                         plugin_args=subj_pipeline_options['plugin_args'])