Example #1
0
def _create_singleSession(dataDict, master_config, interpMode, pipeline_name):
    """
    create singleSession workflow on a single session

    This is the main function to call when processing a data set with T1 & T2
    data.  ExperimentBaseDirectoryPrefix is the base of the directory to place results, T1Images & T2Images
    are the lists of images to be used in the auto-workup. atlas_fname_wpath is
    the path and filename of the atlas to use.
    """
    assert 'tissue_classify' in master_config['components'] or \
           'auxlmk' in master_config['components'] or \
           'denoise' in master_config['components'] or \
           'landmark' in master_config['components'] or \
           'segmentation' in master_config['components'] or \
           'malf_2012_neuro' in master_config['components']

    from nipype import config, logging

    config.update_config(master_config)  # Set universal pipeline options
    logging.update_logging(config)

    from workflows.baseline import generate_single_session_template_WF

    project = dataDict['project']
    subject = dataDict['subject']
    session = dataDict['session']

    blackListFileName = dataDict['T1s'][0] + '_noDenoise'
    isBlackList = os.path.isfile(blackListFileName)

    pname = "{0}_{1}_{2}".format(master_config['workflow_phase'], subject,
                                 session)
    onlyT1 = not (len(dataDict['T2s']) > 0)
    sessionWorkflow = generate_single_session_template_WF(
        project,
        subject,
        session,
        onlyT1,
        master_config,
        phase=master_config['workflow_phase'],
        interpMode=interpMode,
        pipeline_name=pipeline_name,
        doDenoise=(not isBlackList))
    sessionWorkflow.base_dir = master_config['cachedir']

    sessionWorkflow_inputsspec = sessionWorkflow.get_node('inputspec')
    sessionWorkflow_inputsspec.inputs.T1s = dataDict['T1s']
    sessionWorkflow_inputsspec.inputs.T2s = dataDict['T2s']
    sessionWorkflow_inputsspec.inputs.PDs = dataDict['PDs']
    sessionWorkflow_inputsspec.inputs.FLs = dataDict['FLs']
    sessionWorkflow_inputsspec.inputs.OTHERs = dataDict['OTs']
    return sessionWorkflow
Example #2
0
def _create_singleSession(dataDict, master_config, interpMode, pipeline_name):
    """
    create singleSession workflow on a single session

    This is the main function to call when processing a data set with T1 & T2
    data.  ExperimentBaseDirectoryPrefix is the base of the directory to place results, T1Images & T2Images
    are the lists of images to be used in the auto-workup. atlas_fname_wpath is
    the path and filename of the atlas to use.
    """
    assert 'tissue_classify' in master_config['components'] or \
           'auxlmk' in master_config['components'] or \
           'denoise' in master_config['components'] or \
           'landmark' in master_config['components'] or \
           'segmentation' in master_config['components'] or \
           'malf_2012_neuro' in master_config['components']

    from nipype import config, logging

    config.update_config(master_config)  # Set universal pipeline options
    logging.update_logging(config)

    from workflows.baseline import generate_single_session_template_WF

    project = dataDict['project']
    subject = dataDict['subject']
    session = dataDict['session']

    blackListFileName = dataDict['T1s'][0] + '_noDenoise'
    isBlackList = os.path.isfile(blackListFileName)

    pname = "{0}_{1}_{2}".format(master_config['workflow_phase'], subject, session)
    onlyT1 = not (len(dataDict['T2s']) > 0)
    if onlyT1:
        print "T1 Only processing starts ..."
    else:
        print "Multimodal processing starts ..."
    sessionWorkflow = generate_single_session_template_WF(project, subject, session, onlyT1, master_config,
                                                          phase=master_config['workflow_phase'],
                                                          interpMode=interpMode,
                                                          pipeline_name=pipeline_name,
                                                          doDenoise=(not isBlackList))
    sessionWorkflow.base_dir = master_config['cachedir']

    sessionWorkflow_inputsspec = sessionWorkflow.get_node('inputspec')
    sessionWorkflow_inputsspec.inputs.T1s = dataDict['T1s']
    sessionWorkflow_inputsspec.inputs.T2s = dataDict['T2s']
    sessionWorkflow_inputsspec.inputs.PDs = dataDict['PDs']
    sessionWorkflow_inputsspec.inputs.FLs = dataDict['FLs']
    sessionWorkflow_inputsspec.inputs.OTHERs = dataDict['OTs']
    return sessionWorkflow
Example #3
0
def _create_singleSession(dataDict, master_config, interpMode, pipeline_name):
    """
    create singleSession workflow on a single session

    This is the main function to call when processing a data set with T1 & T2
    data.  ExperimentBaseDirectoryPrefix is the base of the directory to place results, T1Images & T2Images
    are the lists of images to be used in the auto-workup. atlas_fname_wpath is
    the path and filename of the atlas to use.
    """
    assert 'tissue_classify' in master_config['components'] or \
           'auxlmk' in master_config['components'] or \
           'denoise' in master_config['components'] or \
           'landmark' in master_config['components'] or \
           'segmentation' in master_config['components'] or \
           'jointfusion_2015_wholebrain' in master_config['components']

    from nipype import config, logging

    config.update_config(master_config)  # Set universal pipeline options
    logging.update_logging(config)

    from workflows.baseline import generate_single_session_template_WF

    project = dataDict['project']
    subject = dataDict['subject']
    session = dataDict['session']

    blackListFileName = dataDict['T1s'][0] + '_noDenoise'
    isBlackList = os.path.isfile(blackListFileName)

    pname = "{0}_{1}_{2}".format(master_config['workflow_phase'], subject,
                                 session)
    onlyT1 = not (len(dataDict['T2s']) > 0)
    if onlyT1:
        print("T1 Only processing starts ...")
    else:
        print("Multimodal processing starts ...")

    doDenoise = False
    if ('denoise' in master_config['components']):
        if isBlackList:
            print("""
                  Denoise is ignored when the session is in Blacklist
                  There is known issue that Landmark Detection algorithm
                  may not work well with denoising step
                  """)
            doDenoise = False
        else:
            doDenoise = True
    useEMSP = False
    if len(dataDict['EMSP']) > 0:
        useEMSP = True
    sessionWorkflow = generate_single_session_template_WF(
        project,
        subject,
        session,
        onlyT1,
        master_config,
        phase=master_config['workflow_phase'],
        interpMode=interpMode,
        pipeline_name=pipeline_name,
        doDenoise=doDenoise,
        badT2=dataDict['BadT2'],
        useEMSP=useEMSP)
    sessionWorkflow.base_dir = master_config['cachedir']

    sessionWorkflow_inputsspec = sessionWorkflow.get_node('inputspec')
    sessionWorkflow_inputsspec.inputs.T1s = dataDict['T1s']
    sessionWorkflow_inputsspec.inputs.T2s = dataDict['T2s']
    sessionWorkflow_inputsspec.inputs.PDs = dataDict['PDs']
    sessionWorkflow_inputsspec.inputs.FLs = dataDict['FLs']
    if useEMSP:
        sessionWorkflow_inputsspec.inputs.EMSP = dataDict['EMSP'][0]
    sessionWorkflow_inputsspec.inputs.OTHERs = dataDict['OTHERs']
    return sessionWorkflow
Example #4
0
def _create_singleSession(dataDict, master_config, interpMode, pipeline_name):
    """
    create singleSession workflow on a single session

    This is the main function to call when processing a data set with T1 & T2
    data.  ExperimentBaseDirectoryPrefix is the base of the directory to place results, T1Images & T2Images
    are the lists of images to be used in the auto-workup. atlas_fname_wpath is
    the path and filename of the atlas to use.
    """
    assert 'tissue_classify' in master_config['components'] or \
           'auxlmk' in master_config['components'] or \
           'denoise' in master_config['components'] or \
           'landmark' in master_config['components'] or \
           'segmentation' in master_config['components'] or \
           'jointfusion_2015_wholebrain' in master_config['components']

    from nipype import config, logging

    config.update_config(master_config)  # Set universal pipeline options
    logging.update_logging(config)

    from workflows.baseline import generate_single_session_template_WF

    project = dataDict['project']
    subject = dataDict['subject']
    session = dataDict['session']

    blackListFileName = dataDict['T1s'][0] + '_noDenoise'
    isBlackList = os.path.isfile(blackListFileName)

    pname = "{0}_{1}_{2}".format(master_config['workflow_phase'], subject, session)
    onlyT1 = not (len(dataDict['T2s']) > 0)
    if onlyT1:
        print("T1 Only processing starts ...")
    else:
        print("Multimodal processing starts ...")

    doDenoise = False
    if ('denoise' in master_config['components']):
        if isBlackList:
            print("""
                  Denoise is ignored when the session is in Blacklist
                  There is known issue that Landmark Detection algorithm
                  may not work well with denoising step
                  """)
            doDenoise = False
        else:
            doDenoise = True
    useEMSP=False
    if len( dataDict['EMSP']) >0:
        useEMSP =True
    sessionWorkflow = generate_single_session_template_WF(project, subject, session, onlyT1, master_config,
                                                          phase=master_config['workflow_phase'],
                                                          interpMode=interpMode,
                                                          pipeline_name=pipeline_name,
                                                          doDenoise=doDenoise,
                                                          badT2=dataDict['BadT2'],
                                                          useEMSP=useEMSP)
    sessionWorkflow.base_dir = master_config['cachedir']

    sessionWorkflow_inputsspec = sessionWorkflow.get_node('inputspec')
    sessionWorkflow_inputsspec.inputs.T1s = dataDict['T1s']
    sessionWorkflow_inputsspec.inputs.T2s = dataDict['T2s']
    sessionWorkflow_inputsspec.inputs.PDs = dataDict['PDs']
    sessionWorkflow_inputsspec.inputs.FLs = dataDict['FLs']
    if useEMSP:
        sessionWorkflow_inputsspec.inputs.EMSP = dataDict['EMSP'][0]
    sessionWorkflow_inputsspec.inputs.OTHERs = dataDict['OTHERs']
    return sessionWorkflow