Ejemplo n.º 1
0
def test_vmhc_ants():

    test_name = 'test_vmhc_ants'

    # get the config and strat for the mock
    pipeline_config, strat = configuration_strategy_mock(method='ANTS')
    num_strat = 0

    workflow = pe.Workflow(name=test_name)
    workflow.base_dir = pipeline_config.workingDirectory
    workflow.config['execution'] = {
        'hash_method': 'timestamp',
        'crashdump_dir': os.path.abspath(pipeline_config.crashLogDirectory)
    }

    nodes = strat.get_nodes_names()

    print('nodes {0}'.format(nodes))

    workflow, strat = create_vmhc(workflow,
                                  num_strat,
                                  strat,
                                  pipeline_config,
                                  output_name='vmhc_{0}'.format(num_strat))

    workflow.run()
Ejemplo n.º 2
0
def test_registration():
    from ..registration import create_nonlinear_register
    from ..registration import create_register_func_to_mni

    from CPAC.pipeline import nipype_pipeline_engine as pe
    import nipype.interfaces.fsl as fsl

    func_file = '/home/data/Projects/nuisance_reliability_paper/working_dir_CPAC_order/resting_preproc/nuisance_preproc/_session_id_NYU_TRT_session1_subject_id_sub05676/_csf_threshold_0.4/_gm_threshold_0.2/_wm_threshold_0.66/_run_scrubbing_False/_nc_5/_selector_6.7/regress_nuisance/mapflow/_regress_nuisance0/residual.nii.gz'
    anat_skull_file = '/home/data/Projects/nuisance_reliability_paper/working_dir_CPAC_order/resting_preproc/anatpreproc/_session_id_NYU_TRT_session1_subject_id_sub05676/anat_reorient/mprage_anonymized_RPI.nii.gz'
    anat_bet_file = '/home/data/Projects/nuisance_reliability_paper/working_dir_CPAC_order/resting_preproc/anatpreproc/_session_id_NYU_TRT_session1_subject_id_sub05676/anat_skullstrip/mprage_anonymized_RPI_3dT.nii.gz'
    mni_brain_file = '/usr/share/fsl/4.1/data/standard/MNI152_T1_3mm_brain.nii.gz'
    mni_skull_file = '/usr/share/fsl/4.1/data/standard/MNI152_T1_3mm.nii.gz'

    mni_workflow = pe.Workflow(name='mni_workflow')

    nr = create_nonlinear_register()
    nr.inputs.inputspec.input_brain = anat_bet_file
    nr.inputs.inputspec.input_skull = anat_skull_file
    nr.inputs.inputspec.reference_brain = mni_brain_file
    nr.inputs.inputspec.reference_skull = mni_skull_file
    nr.inputs.inputspec.fnirt_config = '/usr/share/fsl/4.1/etc/flirtsch/T1_2_MNI152_3mm.cnf'
    func2mni = create_register_func_to_mni()
    func2mni.inputs.inputspec.func = func_file
    func2mni.inputs.inputspec.mni = mni_brain_file
    func2mni.inputs.inputspec.anat = anat_bet_file

    mni_workflow.connect(nr, 'outputspec.nonlinear_xfm', func2mni,
                         'inputspec.anat_to_mni_xfm')
    mni_workflow.base_dir = './mni_05676_3'
    mni_workflow.run()
Ejemplo n.º 3
0
def create_grp_analysis_dataflow(wf_name='gp_dataflow'):
    from CPAC.pipeline import nipype_pipeline_engine as pe
    import nipype.interfaces.utility as util
    from CPAC.utils.datasource import select_model_files

    wf = pe.Workflow(name=wf_name)

    inputnode = pe.Node(util.IdentityInterface(
        fields=['ftest', 'grp_model', 'model_name'], mandatory_inputs=True),
                        name='inputspec')

    selectmodel = pe.Node(function.Function(
        input_names=['model', 'ftest', 'model_name'],
        output_names=['fts_file', 'con_file', 'grp_file', 'mat_file'],
        function=select_model_files,
        as_module=True),
                          name='selectnode')

    wf.connect(inputnode, 'ftest', selectmodel, 'ftest')
    wf.connect(inputnode, 'grp_model', selectmodel, 'model')
    wf.connect(inputnode, 'model_name', selectmodel, 'model_name')

    outputnode = pe.Node(util.IdentityInterface(
        fields=['fts', 'grp', 'mat', 'con'], mandatory_inputs=True),
                         name='outputspec')

    wf.connect(selectmodel, 'mat_file', outputnode, 'mat')
    wf.connect(selectmodel, 'grp_file', outputnode, 'grp')
    wf.connect(selectmodel, 'fts_file', outputnode, 'fts')
    wf.connect(selectmodel, 'con_file', outputnode, 'con')

    return wf
Ejemplo n.º 4
0
def test_fsl_apply_transform_func_to_mni_linear_mapnode():

    c, strat = configuration_strategy_mock(method='FSL')

    strat.append_name('anat_mni_flirt_register_0')

    # build the workflow
    workflow = pe.Workflow(
        name='test_fsl_apply_transform_func_to_mni_linear_mapnode')
    workflow.base_dir = c.workingDirectory
    workflow.config['execution'] = {
        'hash_method': 'timestamp',
        'crashdump_dir': os.path.abspath(c.crashLogDirectory)
    }

    workflow = fsl_apply_transform_func_to_mni(
        workflow,
        'dr_tempreg_to_standard',
        'dr_tempreg_maps_files',
        'template_brain_for_func_preproc',
        0,
        strat,
        c.funcRegFSLinterpolation,
        map_node=True)

    workflow.run()
Ejemplo n.º 5
0
def create_connectome(name='connectome'):

    wf = pe.Workflow(name=name)

    inputspec = pe.Node(
        util.IdentityInterface(fields=[
            'time_series',
            'method'
        ]),
        name='inputspec'
    )

    outputspec = pe.Node(
        util.IdentityInterface(fields=[
            'connectome',
        ]),
        name='outputspec'
    )

    node = pe.Node(Function(input_names=['time_series', 'method'],
                            output_names=['connectome'],
                            function=compute_correlation,
                            as_module=True),
                   name='connectome')

    wf.connect([
        (inputspec, node, [('time_series', 'time_series')]),
        (inputspec, node, [('method', 'method')]),
        (node, outputspec, [('connectome', 'connectome')]),
    ])

    return wf
    
Ejemplo n.º 6
0
def create_anat_datasource(wf_name='anat_datasource'):
    from CPAC.pipeline import nipype_pipeline_engine as pe
    import nipype.interfaces.utility as util

    wf = pe.Workflow(name=wf_name)

    inputnode = pe.Node(util.IdentityInterface(
        fields=['subject', 'anat', 'creds_path', 'dl_dir', 'img_type'],
        mandatory_inputs=True),
                        name='inputnode')

    check_s3_node = pe.Node(function.Function(
        input_names=['file_path', 'creds_path', 'dl_dir', 'img_type'],
        output_names=['local_path'],
        function=check_for_s3,
        as_module=True),
                            name='check_for_s3')

    wf.connect(inputnode, 'anat', check_s3_node, 'file_path')
    wf.connect(inputnode, 'creds_path', check_s3_node, 'creds_path')
    wf.connect(inputnode, 'dl_dir', check_s3_node, 'dl_dir')
    wf.connect(inputnode, 'img_type', check_s3_node, 'img_type')

    outputnode = pe.Node(util.IdentityInterface(fields=['subject', 'anat']),
                         name='outputspec')

    wf.connect(inputnode, 'subject', outputnode, 'subject')
    wf.connect(check_s3_node, 'local_path', outputnode, 'anat')

    # Return the workflow
    return wf
Ejemplo n.º 7
0
def prep_cwas_workflow(c, subject_infos):
    print('Preparing CWAS workflow')
    p_id, s_ids, scan_ids, s_paths = (list(tup) for tup in zip(*subject_infos))
    print('Subjects', s_ids)

    wf = pe.Workflow(name='cwas_workflow')
    wf.base_dir = c.pipeline_setup['working_directory']['path']

    from CPAC.cwas import create_cwas
    import numpy as np
    regressor = np.loadtxt(c.cwasRegressorFile)

    cw = create_cwas()
    cw.inputs.inputspec.roi = c.cwasROIFile
    cw.inputs.inputspec.subjects = s_paths
    cw.inputs.inputspec.regressor = regressor
    cw.inputs.inputspec.cols = c.cwasRegressorCols
    cw.inputs.inputspec.f_samples = c.cwasFSamples
    cw.inputs.inputspec.strata = c.cwasRegressorStrata  # will stay None?
    cw.inputs.inputspec.parallel_nodes = c.cwasParallelNodes

    ds = pe.Node(nio.DataSink(), name='cwas_sink')
    out_dir = os.path.dirname(s_paths[0]).replace(s_ids[0], 'cwas_results')
    ds.inputs.base_directory = out_dir
    ds.inputs.container = ''

    wf.connect(cw, 'outputspec.F_map', ds, 'F_map')
    wf.connect(cw, 'outputspec.p_map', ds, 'p_map')

    wf.run(plugin='MultiProc', plugin_args={'n_procs': c.numCoresPerSubject})
Ejemplo n.º 8
0
Archivo: qc.py Proyecto: gkiar/C-PAC
def create_qc_skullstrip(wf_name='qc_skullstrip'):

    wf = pe.Workflow(name=wf_name)

    input_node = pe.Node(util.IdentityInterface(
        fields=['anatomical_brain', 'anatomical_reorient']),
                         name='inputspec')

    output_node = pe.Node(
        util.IdentityInterface(fields=['axial_image', 'sagittal_image']),
        name='outputspec')

    skull_edge = pe.Node(Function(input_names=['in_file'],
                                  output_names=['out_file'],
                                  function=afni_Edge3,
                                  as_module=True),
                         name='skull_edge')

    montage_skull = create_montage('montage_skull',
                                   'red',
                                   'skull_vis',
                                   mapnode=False)

    wf.connect(input_node, 'anatomical_reorient', skull_edge, 'in_file')
    wf.connect(input_node, 'anatomical_brain', montage_skull,
               'inputspec.underlay')
    wf.connect(skull_edge, 'out_file', montage_skull, 'inputspec.overlay')

    wf.connect(montage_skull, 'outputspec.axial_png', output_node,
               'axial_image')
    wf.connect(montage_skull, 'outputspec.sagittal_png', output_node,
               'sagittal_image')

    return wf
Ejemplo n.º 9
0
def fisher_z_score_standardize(wf_name, label,
                               input_image_type='func_derivative', opt=None):

    wf = pe.Workflow(name=wf_name)

    map_node = False
    if input_image_type == 'func_derivative_multi':
        map_node = True

    inputnode = pe.Node(util.IdentityInterface(fields=['correlation_file',
                                                       'timeseries_oned']),
                        name='inputspec')

    fisher_z_score_std = get_fisher_zscore(label, map_node,
                                           'fisher_z_score_std')
    wf.connect(inputnode, 'correlation_file',
               fisher_z_score_std, 'inputspec.correlation_file')

    wf.connect(inputnode, 'timeseries_oned',
               fisher_z_score_std, 'inputspec.timeseries_one_d')

    outputnode = pe.Node(util.IdentityInterface(fields=['out_file']),
                         name='outputspec')

    wf.connect(fisher_z_score_std, 'outputspec.fisher_z_score_img',
               outputnode, 'out_file')

    return wf
Ejemplo n.º 10
0
def test_nonlinear_register():
    from ..registration import create_nonlinear_register

    from CPAC.pipeline import nipype_pipeline_engine as pe
    import nipype.interfaces.fsl as fsl

    ## necessary inputs
    ## -input_brain
    ## -input_skull
    ## -reference_brain
    ## -reference_skull
    ## -fnirt_config
    ## -fnirt_warp_res

    ## input_brain
    anat_bet_file = '/home/data/Projects/nuisance_reliability_paper/working_dir_CPAC_order/resting_preproc/anatpreproc/_session_id_NYU_TRT_session1_subject_id_sub05676/anat_skullstrip/mprage_anonymized_RPI_3dT.nii.gz'

    ## input_skull

    ## reference_brain
    mni_file = '/usr/share/fsl/4.1/data/standard/MNI152_T1_3mm_brain.nii.gz'

    ## reference_skull

    ## fnirt_config
    fnirt_config = 'T1_2_MNI152_3mm'

    ## fnirt_warp_res
    fnirt_warp_res = None

    #?? what is this for?:
    func_file = '/home/data/Projects/nuisance_reliability_paper/working_dir_CPAC_order/resting_preproc/nuisance_preproc/_session_id_NYU_TRT_session1_subject_id_sub05676/_csf_threshold_0.4/_gm_threshold_0.2/_wm_threshold_0.66/_run_scrubbing_False/_nc_5/_selector_6.7/regress_nuisance/mapflow/_regress_nuisance0/residual.nii.gz'

    mni_workflow = pe.Workflow(name='mni_workflow')

    linear_reg = pe.Node(interface=fsl.FLIRT(), name='linear_reg_0')
    linear_reg.inputs.cost = 'corratio'
    linear_reg.inputs.dof = 6
    linear_reg.inputs.interp = 'nearestneighbour'

    linear_reg.inputs.in_file = func_file
    linear_reg.inputs.reference = anat_bet_file

    #T1 to MNI Node
    c = create_nonlinear_register()
    c.inputs.inputspec.input = anat_bet_file
    c.inputs.inputspec.reference = '/usr/share/fsl/4.1/data/standard/MNI152_T1_3mm_brain.nii.gz'
    c.inputs.inputspec.fnirt_config = 'T1_2_MNI152_3mm'

    #EPI to MNI warp Node
    mni_warp = pe.Node(interface=fsl.ApplyWarp(), name='mni_warp')
    mni_warp.inputs.ref_file = '/usr/share/fsl/4.1/data/standard/MNI152_T1_3mm_brain.nii.gz'
    mni_warp.inputs.in_file = func_file

    mni_workflow.connect(c, 'outputspec.nonlinear_xfm', mni_warp, 'field_file')
    mni_workflow.connect(linear_reg, 'out_matrix_file', mni_warp, 'premat')

    mni_workflow.base_dir = './'
    mni_workflow.run()
Ejemplo n.º 11
0
def test_ants_apply_warp_func_mni_symm():

    test_name = 'test_ants_apply_warps_func_mni_symm'

    # get the config and strat for the mock
    c, strat = configuration_strategy_mock()
    num_strat = 0

    node, out = strat['mean_functional']
    mean_functional = node.inputs.file

    # build the workflow
    workflow = pe.Workflow(name=test_name)
    workflow.base_dir = c.workingDirectory
    workflow.config['execution'] = {
        'hash_method': 'timestamp',
        'crashdump_dir': os.path.abspath(c.crashLogDirectory)
    }

    workflow = ants_apply_warps_func_mni(
        workflow,
        'mean_functional_to_standard_symm',
        'mean_functional',
        'template_brain_for_func_preproc',
        num_strat,
        strat,
        interpolation_method=c.funcRegANTSinterpolation,
        distcor=False,
        map_node=False,
        inverse=False,
        symmetry='symmetric',
        input_image_type=0,
        num_ants_cores=8)

    workflow = ants_apply_warps_func_mni(
        workflow,
        'mean_functional_standard_to_original_symm',
        'mean_functional_to_standard_symm',
        'mean_functional',
        num_strat,
        strat,
        interpolation_method=c.funcRegANTSinterpolation,
        distcor=False,
        map_node=False,
        inverse=True,
        symmetry='symmetric',
        input_image_type=0,
        num_ants_cores=1)

    retval = workflow.run()

    mean_functional_after_transform = os.path.join(
        c.workingDirectory, test_name,
        'apply_ants_warp_mean_functional_standard_to_original_symm_inverse_0',
        'sub-M10978008_ses-NFB3_task-test_bold_calc_tshift_resample_volreg_calc_tstat_antswarp_antswarp.nii.gz'
    )

    assert (test_utils.pearson_correlation(
        mean_functional, mean_functional_after_transform) > .93)
Ejemplo n.º 12
0
def test_qc():

    outputs = Outputs()

    c = Configuration({
        "workingDirectory": "",
        "crashLogDirectory": "",
        "outputDirectory":""
    })

    workflow = pe.Workflow(name='workflow_name')
    workflow.base_dir = c.workingDirectory
    workflow.config['execution'] = {
        'hash_method': 'timestamp',
        'crashdump_dir': os.path.abspath(c.crashLogDirectory)
    }

    strat_initial = Strategy()
    strat_list = [strat_initial]

    output_df_dct = gather_outputs(
        c.outputDirectory,
        [
            "functional_brain_mask",
            "functional_to_anat_linear_xfm",
            "anatomical_brain",
            "anatomical_reorient",
            "mean_functional_in_anat",
            "motion_params",
            "frame_wise_displacement_power",
            "frame_wise_displacement_jenkinson",
        ],
        None,
        get_motion=False,
        get_raw_score=False,
        get_func=True,
        derivatives=[
            "functional_brain_mask",
            "functional_to_anat_linear_xfm",
            "anatomical_brain",
            "anatomical_reorient",
            "mean_functional_in_anat",
            "motion_params",
            "frame_wise_displacement_power",
            "frame_wise_displacement_jenkinson",
        ],
        exts=['nii', 'nii.gz', '1D', 'mat', 'txt']
    )

    for (resource, _), df in output_df_dct.items():
        strat_initial.update_resource_pool({
            resource: file_node(df.Filepath[0])
        })

    qc_montage_id_a, qc_montage_id_s, qc_hist_id, qc_plot_id = \
        create_qc_workflow(workflow, c, strat_list, outputs.qc)
Ejemplo n.º 13
0
def get_normalized_moments(wf_name='normalized_moments'):
    """
    Workflow to calculate the normalized moments for skewedness calculations

    Parameters
    ----------
    wf_name : string
        name of the workflow

    Returns
    -------
    wflow : workflow object
        workflow object

    Notes
    -----
    `Source <https://github.com/FCP-INDI/C-PAC/blob/master/CPAC/timeseries/timeseries_analysis.py>`_

    Workflow Inputs::

        inputspec.spatial_timeseries : string (nifti file)
            spatial map timeseries

    Workflow Outputs::

        outputspec.moments: list
            list of moment values

    Example
    -------
    >>> import CPAC.timeseries.timeseries_analysis as t
    >>> wf = t.get_normalized_moments()
    >>> wf.inputs.inputspec.spatial_timeseries = '/home/data/outputs/SurfaceRegistration/lh_surface_file.nii.gz'
    >>> wf.base_dir = './'
    >>> wf.run()
    """

    wflow = pe.Workflow(name=wf_name)

    inputNode = pe.Node(util.IdentityInterface(fields=['spatial_timeseries']),
                        name='inputspec')

    # calculate normalized moments
    # output of this node is a list, 'moments'
    norm_moments = pe.Node(util.CalculateNormalizedMoments(moment='3'),
                           name='norm_moments')

    outputNode = pe.Node(util.IdentityInterface(fields=['moments_outputs']),
                         name='outputspec')

    wflow.connect(inputNode, 'spatial_timeseries', norm_moments,
                  'timeseries_file')
    wflow.connect(norm_moments, 'moments', outputNode, 'moments_outputs')

    return wflow
Ejemplo n.º 14
0
def run_warp_nipype(inputs,output_dir=None,run=True):
   import EPI_DistCorr
   warp_workflow = pe.Workflow(name = 'preproc')
   if output_dir == None:
     output_dir = '/home/nrajamani'
        
   workflow_dir = os.path.join(output_dir,"workflow_output_with_aroma_with_change")
   warp_workflow.base_dir = workflow_dir
   # taken from QAP files 
   #resource_pool = {}
   
   num_of_cores = 1
   #resource_pool({'epireg': (warp_nipype2.warp_nipype, 'outputspec.epireg')})
   t_node = EPI_DistCorr.create_EPI_DistCorr()####
   t_node.inputs.inputspec.anat_file=  '/Users/nanditharajamani/Downloads/ExBox19/T1.nii.gz'
   t_node.inputs.inputspec.func_file= '/Users/nanditharajamani/Downloads/ExBox19/func.nii.gz'
   t_node.inputs.inputspec.fmap_pha= '/Users/nanditharajamani/Downloads/ExBox19/fmap_phase.nii.gz'
   t_node.inputs.inputspec.fmap_mag= '/Users/nanditharajamani/Downloads/ExBox19/fmap_mag.nii.gz'
   t_node.inputs.inputspec.bbr_schedule='/usr/local/fsl/etc/flirtsch/bbr.sch'
   t_node.inputs.inputspec.deltaTE = 2.46
   t_node.inputs.inputspec.dwellT = 0.0005
   t_node.inputs.inputspec.dwell_asym_ratio = 0.93902439
   t_node.inputs.inputspec.bet_frac = 0.5
   #'home/nrajamani/FieldMap_SubjectExampleData/SubjectData/epi_run2/fMT0160-0015-00003-000003-01_BRAIN.nii.gz',
   #   for image in inputs:
#       if not(image.endswith('.nii') or image.endswith('.nii.gz')):
#           raise 'The input image is not the right format'
#   try:
#       for image in inputs:
#           size = image.get_shape()
#           assert len(size) == 3
#   except:
#       if len(size) < 3:
#           raise 'input image is not 3D'
#   intensity = ImageStats(in_file = t_node.inputs.inputspec.fmap_pha, op_string = '-p 90')
#   if intensity < 3686:
#      raise 'input phase image does not have the correct range values'         
   dataSink = pe.Node(nio.DataSink(), name='dataSink_file')
   dataSink.inputs.base_directory = workflow_dir
   #node, out_file = resource_pool["epireg"]
   #warp_workflow.connect(t_node,'outputspec.roi_file',dataSink,'roi_file')
   warp_workflow.connect(t_node,'outputspec.fieldmap',dataSink,'fieldmap_file')
   warp_workflow.connect(t_node,'outputspec.fmapmagbrain',dataSink,'fmapmagbrain')
   warp_workflow.connect(t_node,'outputspec.fieldmapmask',dataSink,'fieldmapmask')
   warp_workflow.connect(t_node,'outputspec.fmap_despiked',dataSink,'fmap_despiked')
   warp_workflow.connect(t_node,'outputspec.struct',dataSink,'epi2struct')
   warp_workflow.connect(t_node,'outputspec.anat_func',dataSink,'anat_func')
   if run == True:
       warp_workflow.run(plugin='MultiProc', plugin_args ={'n_procs': num_of_cores})
       #outpath = glob.glob(os.path.join(workflow_dir, "EPI_DistCorr","*"))[0]
       #return outpath
   else:
       return warp_workflow, warp_workflow.base_dir
Ejemplo n.º 15
0
def setup_test_wf(s3_prefix, paths_list, test_name, workdirs_to_keep=None):
    """Set up a basic template Nipype workflow for testing single nodes or
    small sub-workflows.
    """

    import os
    import shutil
    from CPAC.pipeline import nipype_pipeline_engine as pe
    from CPAC.utils.datasource import check_for_s3
    from CPAC.utils.interfaces.datasink import DataSink

    test_dir = os.path.join(os.getcwd(), test_name)
    work_dir = os.path.join(test_dir, "workdir")
    out_dir = os.path.join(test_dir, "output")

    if os.path.exists(out_dir):
        try:
            shutil.rmtree(out_dir)
        except:
            pass

    if os.path.exists(work_dir):
        for dirname in os.listdir(work_dir):
            if workdirs_to_keep:
                for keepdir in workdirs_to_keep:
                    print("{0} --- {1}\n".format(dirname, keepdir))
                    if keepdir in dirname:
                        continue
            try:
                shutil.rmtree(os.path.join(work_dir, dirname))
            except:
                pass

    local_paths = {}
    for subpath in paths_list:
        s3_path = os.path.join(s3_prefix, subpath)
        local_path = check_for_s3(s3_path, dl_dir=test_dir)
        local_paths[subpath] = local_path

    wf = pe.Workflow(name=test_name)
    wf.base_dir = os.path.join(work_dir)
    wf.config['execution'] = {
        'hash_method': 'timestamp',
        'crashdump_dir': os.path.abspath(test_dir)
    }

    ds = pe.Node(DataSink(), name='sinker_{0}'.format(test_name))
    ds.inputs.base_directory = out_dir
    ds.inputs.parameterization = True

    return (wf, ds, local_paths)
Ejemplo n.º 16
0
def test_smooth():

    test_name = 'test_smooth_nodes'

    c, strat = configuration_strategy_mock(method='FSL')
    num_strat = 0

    # build the workflow
    workflow = pe.Workflow(name=test_name)
    workflow.base_dir = c.workingDirectory
    workflow.config['execution'] = {
        'hash_method': 'timestamp',
        'crashdump_dir': os.path.abspath(c.crashLogDirectory)
    }

    spatial_smooth(workflow, 'mean_functional', 'functional_brain_mask',
                   'mean_functional_smooth'.format(num_strat), strat,
                   num_strat, c)

    func_node, func_output = strat['mean_functional']
    mask_node, mask_output = strat['functional_brain_mask']

    spatial_smooth(workflow, (func_node, func_output),
                   (mask_node, mask_output),
                   'mean_functional_smooth_nodes'.format(num_strat), strat,
                   num_strat, c)

    print(workflow.list_node_names())
    workflow.run()

    correlations = []

    for fwhm in c.fwhm:

        out_name1 = os.path.join(
            c.workingDirectory, test_name,
            '_fwhm_{0}/mean_functional_smooth_0/'.format(fwhm),
            'sub-M10978008_ses-NFB3_task-test_bold_calc_tshift_resample_volreg_calc_tstat_maths.nii.gz'
        )

        out_name2 = os.path.join(
            c.workingDirectory, test_name,
            '_fwhm_{0}/mean_functional_smooth_nodes_0/'.format(fwhm),
            'sub-M10978008_ses-NFB3_task-test_bold_calc_tshift_resample_volreg_calc_tstat_maths.nii.gz'
        )

        correlations.append(
            test_utils.pearson_correlation(out_name1, out_name2) > 0.99)

    assert all(correlations)
Ejemplo n.º 17
0
def initialize_nipype_wf(cfg, sub_data_dct, name=""):

    if name:
        name = f'_{name}'

    workflow_name = f'cpac{name}_{sub_data_dct["subject_id"]}_{sub_data_dct["unique_id"]}'
    wf = pe.Workflow(name=workflow_name)
    wf.base_dir = cfg.pipeline_setup['working_directory']['path']
    wf.config['execution'] = {
        'hash_method': 'timestamp',
        'crashdump_dir': os.path.abspath(cfg.pipeline_setup['log_directory'][
                                         'path'])
    }

    return wf
Ejemplo n.º 18
0
def run_randomize(inputs, output_dir=None, run=True):
    from . import pipeline
    randomise_workflow = pe.Workflow(name='preproc')
    if output_dir == None:
        output_dir = ''

    workflow_dir = os.path.join(output_dir, "randomise_results")
    randomise_workflow.base_dir = workflow_dir

    #resource_pool = {}

    num_of_cores = 1

    t_node = pipeline.create_randomise()
    t_node.inputs.inputspec.subjects = ''
    t_node.inputs.inputspec.design_matrix_file = ''
    t_node.inputs.inputspec.constrast_file = ''
    #t_node.inputs.inputspec.f_constrast_file= ''
    t_node.inputs.inputspec.permutations = 5000
    #t_node.inputs.inputspec.mask = ''#

    dataSink = pe.Node(nio.DataSink(), name='dataSink_file')
    dataSink.inputs.base_directory = workflow_dir
    randomise_workflow.connect(t_node, 'outputspec.index_file', dataSink,
                               'index_file')
    #randomise_workflow.connect(t_node,'outputspec.thresh_out',dataSink,'threshold_file')
    randomise_workflow.connect(t_node, 'outputspec.localmax_txt_file',
                               dataSink, 'localmax_txt_file')
    randomise_workflow.connect(t_node, 'outputspec.localmax_vol_file',
                               dataSink, 'localmax_vol_file')
    randomise_workflow.connect(t_node, 'outputspec.max_file', dataSink,
                               'max_file')
    randomise_workflow.connect(t_node, 'outputspec.mean_file', dataSink,
                               'mean_file')
    randomise_workflow.connect(t_node, 'outputspec.pval_file', dataSink,
                               'pval_file')
    randomise_workflow.connect(t_node, 'outputspec.size_file', dataSink,
                               'size_file')
    randomise_workflow.connect(t_node, 'outputspec.tstat_files', dataSink,
                               'tstat_files')
    randomise_workflow.connect(t_node, 'outputspec.t_corrected_p_files',
                               dataSink, 't_corrected_p_files')
    if run == True:
        randomise_workflow.run(plugin='MultiProc',
                               plugin_args={'n_procs': num_of_cores})
    else:
        return randomise_workflow, randomise_workflow.base_dir
Ejemplo n.º 19
0
def prep_randomise_workflow(c, subject_infos):
    print('Preparing Randomise workflow')
    p_id, s_ids, scan_ids, s_paths = (list(tup) for tup in zip(*subject_infos))
    print('Subjects', s_ids)

    wf = pe.Workflow(name='randomise_workflow')
    wf.base_dir = c.pipeline_setup['working_directory']['path']

    from CPAC.randomise import create_randomise

    rw = create_randomise()

    rw.inputs.inputspec.permutations = c.randopermutations
    rw.inputs.inputspec.subjects = s_paths
    #rw.inputs.inputspec.pipeline_ouput_folder = c.os.path.join(c.outputDirectory,
    #                                         'pipeline_{0}'.format(c.pipelineName))
    rw.inputs.inputspec.mask_boolean = c.mask_boolean  #TODO pipe from output dir, not the user input
    rw.inputs.inputspec.tfce = c.tfce  # will stay None?
    rw.inputs.inputspec.demean = c.demean
    rw.inputs.inputspec.c_thresh = c.c_thresh

    ds = pe.Node(nio.DataSink(), name='randomise_sink')
    out_dir = os.path.dirname(s_paths[0]).replace(s_ids[0],
                                                  'randomise_results')
    ds.inputs.base_directory = out_dir
    ds.inputs.container = ''
    #'tstat_files' ,'t_corrected_p_files','index_file','threshold_file','localmax_txt_file','localmax_vol_file','max_file','mean_file','pval_file','size_file'
    wf.connect(rw, 'outputspec.tstat_files', ds, 'tstat_files')
    wf.connect(rw, 'outputspec.t_corrected_p_files', ds, 't_corrected_p_files')
    wf.connect(rw, 'outputspec.index_file', ds, 'index_file')
    wf.connect(rw, 'outputspec.threshold_file', ds, 'threshold_file')
    wf.connect(rw, 'outputspec.localmax_vol_file', ds, 'localmax_vol_file')
    wf.connect(rw, 'outputspec.localmax_txt_file', ds, 'localmax_txt_file')
    wf.connect(rw, 'outputspec.max_file', ds, 'max_file')
    wf.connect(rw, 'outputspec.mean_file', ds, 'mean_file')
    wf.connect(rw, 'outputspec.max_file', ds, 'max_file')
    wf.connect(rw, 'outputspec.pval_file', ds, 'pval_file')
    wf.connect(rw, 'outputspec.size_file', ds, 'size_file')

    wf.run(plugin='MultiProc', plugin_args={'n_procs': c.numCoresPerSubject})

    return wf
Ejemplo n.º 20
0
def test_output_func_to_standard_FSL_nonlinear():

    test_name = 'test_output_func_to_standard_FSL_nonlinear'

    # get the config and strat for the mock
    c, strat = configuration_strategy_mock(method='FSL')
    strat.append_name('anat_mni_fnirt_register_0')
    num_strat = 0

    # build the workflow
    workflow = pe.Workflow(name='test_output_func_to_standard_FSL_nonlinear')
    workflow.base_dir = c.workingDirectory
    workflow.config['execution'] = {
        'hash_method': 'timestamp',
        'crashdump_dir': os.path.abspath(c.crashLogDirectory)
    }

    output_func_to_standard(workflow,
            'mean_functional',
            'template_brain_for_func_preproc',
            'mean_functional_to_standard',
            strat, num_strat, c, input_image_type='func_derivative')

    out1_name = os.path.join(c.workingDirectory, test_name, 
            'func_mni_fsl_warp_mean_functional_to_standard_0',
            'sub-M10978008_ses-NFB3_task-test_bold_calc_tshift_resample_volreg_calc_tstat_warp.nii.gz')

    node, out_file = strat['mean_functional']
    output_func_to_standard(workflow,
            (node, out_file),
            'template_brain_for_func_preproc',
            'mean_functional_to_standard_node',
            strat, num_strat, c, input_image_type='func_derivative')

    out2_name = os.path.join(c.workingDirectory, test_name, 
            'func_mni_fsl_warp_mean_functional_to_standard_node_0',
            'sub-M10978008_ses-NFB3_task-test_bold_calc_tshift_resample_volreg_calc_tstat_warp.nii.gz')

    workflow.run()

    assert(test_utils.pearson_correlation(out1_name, out2_name) > .99)
Ejemplo n.º 21
0
def prep_basc_workflow(c, subject_infos):
    print('Preparing BASC workflow')
    p_id, s_ids, scan_ids, s_paths = (list(tup) for tup in zip(*subject_infos))
    print('Subjects', s_ids)

    wf = pe.Workflow(name='basc_workflow')
    wf.base_dir = c.pipeline_setup['working_directory']['path']

    from CPAC.basc import create_basc

    b = create_basc()
    b.inputs.inputspec.roi = c.bascROIFile
    b.inputs.inputspec.subjects = s_paths
    b.inputs.inputspec.k_clusters = c.bascClusters
    b.inputs.inputspec.dataset_bootstraps = c.bascDatasetBootstraps
    b.inputs.inputspec.timeseries_bootstraps = c.bascTimeseriesBootstraps

    aff_list = open(c.bascAffinityThresholdFile, 'r').readlines()
    aff_list = [float(aff.rstrip('\r\n')) for aff in aff_list]

    b.inputs.inputspec.affinity_threshold = aff_list

    ds = pe.Node(nio.DataSink(), name='basc_sink')
    out_dir = os.path.dirname(s_paths[0]).replace(s_ids[0], 'basc_results')
    ds.inputs.base_directory = out_dir
    ds.inputs.container = ''

    #    wf.connect(b, 'outputspec.gsm',
    #               ds, 'gsm')
    #    wf.connect(b, 'outputspec.gsclusters',
    #               ds, 'gsclusters')
    #    wf.connect(b, 'outputspec.gsmap',
    #               ds, 'gsmap')
    wf.connect(b, 'outputspec.gsclusters_img', ds, 'gsclusters_img')
    wf.connect(b, 'outputspec.ismap_imgs', ds, 'ismap_imgs')

    wf.run(plugin='MultiProc', plugin_args={'n_procs': c.numCoresPerSubject})
Ejemplo n.º 22
0
Archivo: qc.py Proyecto: gkiar/C-PAC
def create_qc_fd(wf_name='qc_fd'):

    wf = pe.Workflow(name=wf_name)

    input_node = pe.Node(
        util.IdentityInterface(fields=['fd', 'excluded_volumes']),
        name='inputspec')

    output_node = pe.Node(util.IdentityInterface(fields=['fd_histogram_plot']),
                          name='outputspec')

    fd_plot = pe.Node(Function(input_names=['arr', 'measure', 'ex_vol'],
                               output_names=['hist_path'],
                               function=gen_plot_png,
                               as_module=True),
                      name='fd_plot')

    fd_plot.inputs.measure = 'FD'

    wf.connect(input_node, 'fd', fd_plot, 'arr')
    wf.connect(input_node, 'excluded_volumes', fd_plot, 'ex_vol')
    wf.connect(fd_plot, 'hist_path', output_node, 'fd_histogram_plot')

    return wf
Ejemplo n.º 23
0
Archivo: qc.py Proyecto: gkiar/C-PAC
def create_qc_motion(wf_name='qc_motion'):

    wf = pe.Workflow(name=wf_name)

    input_node = pe.Node(util.IdentityInterface(fields=['motion_parameters']),
                         name='inputspec')

    output_node = pe.Node(util.IdentityInterface(
        fields=['motion_translation_plot', 'motion_rotation_plot']),
                          name='outputspec')

    mov_plot = pe.Node(Function(
        input_names=['motion_parameters'],
        output_names=['translation_plot', 'rotation_plot'],
        function=gen_motion_plt,
        as_module=True),
                       name='motion_plot')

    wf.connect(input_node, 'motion_parameters', mov_plot, 'motion_parameters')
    wf.connect(mov_plot, 'translation_plot', output_node,
               'motion_translation_plot')
    wf.connect(mov_plot, 'rotation_plot', output_node, 'motion_rotation_plot')

    return wf
Ejemplo n.º 24
0
def z_score_standardize(wf_name, input_image_type='func_derivative',
                        opt=None):

    wf = pe.Workflow(name=wf_name)

    map_node = False
    if input_image_type == 'func_derivative_multi':
        map_node = True

    inputnode = pe.Node(util.IdentityInterface(fields=['in_file',
                                                       'mask']),
                        name='inputspec')

    z_score_std = get_zscore(map_node, 'z_score_std')

    wf.connect(inputnode, 'in_file', z_score_std, 'inputspec.input_file')
    wf.connect(inputnode, 'mask', z_score_std, 'inputspec.mask_file')

    outputnode = pe.Node(util.IdentityInterface(fields=['out_file']),
                         name='outputspec')

    wf.connect(z_score_std, 'outputspec.z_score_img', outputnode, 'out_file')

    return wf
Ejemplo n.º 25
0
def create_fsl_flame_wf(ftest=False, wf_name='groupAnalysis'):
    """
    FSL `FEAT <http://fsl.fmrib.ox.ac.uk/fsl/fslwiki/FEAT>`_
    BASED Group Analysis

    Parameters
    ----------
    ftest : boolean, optional(default=False)
        Ftest help investigate several contrasts at the same time
        for example to see whether any of them (or any combination of them) is 
        significantly non-zero. Also, the F-test allows you to compare the 
        contribution of each contrast to the model and decide on significant 
        and non-significant ones
 
    wf_name : string 
        Workflow name
    
    Returns 
    -------
    grp_analysis : workflow object
        Group Analysis workflow object
    
    Notes
    -----
    `Source <https://github.com/openconnectome/C-PAC/blob/master/CPAC/group_analysis/group_analysis_preproc.py>`_
 
    Workflow Inputs::
        
        inputspec.mat_file : string (existing file)
           Mat file containing  matrix for design 
        
        inputspec.con_file : string (existing file)
           Contrast file containing contrast vectors 
        
        inputspec.grp_file : string (existing file)
           file containing matrix specifying the groups the covariance is split into
        
        inputspec.zmap_files : string (existing nifti file)
           derivative or the zmap file for which the group analysis is to be run
        
        inputspec.z_threshold : float
            Z Statistic threshold value for cluster thresholding. It is used to 
            determine what level of activation would be statistically significant. 
            Increasing this will result in higher estimates of required effect.
        
        inputspec.p_threshold : float
            Probability threshold for cluster thresholding.
            
        inputspec.fts_file : string (existing file)
           file containing matrix specifying f-contrasts
           
        inputspec.paramerters : string (tuple)
            tuple containing which MNI and FSLDIR path information
                      
    Workflow Outputs::
    
        outputspec.merged : string (nifti file)
            4D volume file after merging all the derivative 
            files from each specified subject.
            
        outputspec.zstats : list (nifti files)
            Z statistic image for each t contrast
            
        outputspec.zfstats : list (nifti files)
            Z statistic image for each f contrast
        
        outputspec.fstats : list (nifti files)
            F statistic for each contrast  
        
        outputspec.cluster_threshold : list (nifti files)
           the thresholded Z statistic image for each t contrast
        
        outputspec.cluster_index : list (nifti files)
            image of clusters for each t contrast; the values 
            in the clusters are the index numbers as used 
            in the cluster list.
        
        outputspec.cluster_localmax_txt : list (text files)
            local maxima text file for each t contrast, 
            defines the coordinates of maximum value in the cluster
        
        outputspec.overlay_threshold : list (nifti files)
            3D color rendered stats overlay image for t contrast
            After reloading this image, use the Statistics Color 
            Rendering GUI to reload the color look-up-table
        
        outputspec.overlay_rendered_image : list (nifti files)
           2D color rendered stats overlay picture for each t contrast
            
        outputspec.cluster_threshold_zf : list (nifti files)
           the thresholded Z statistic image for each f contrast
        
        outputspec.cluster_index_zf : list (nifti files)
            image of clusters for each f contrast; the values 
            in the clusters are the index numbers as used 
            in the cluster list.
            
        outputspec.cluster_localmax_txt_zf : list (text files)
            local maxima text file for each f contrast, 
            defines the coordinates of maximum value in the cluster
        
        outputspec.overlay_threshold_zf : list (nifti files)
            3D color rendered stats overlay image for f contrast
            After reloading this image, use the Statistics Color 
            Rendering GUI to reload the color look-up-table
        
        outputspec.overlay_rendered_image_zf : list (nifti files)
           2D color rendered stats overlay picture for each f contrast
    
    Order of commands:

    - Merge all the Z-map 3D images into 4D image file.  For details see `fslmerge <http://fsl.fmrib.ox.ac.uk/fsl/fslwiki/Fslutils>`_::
    
        fslmerge -t sub01/sca/seed1/sca_Z_FWHM_merged.nii 
                    sub02/sca/seed1/sca_Z_FWHM.nii.gz ....  
                    merge.nii.gz
                    
        arguments 
            -t : concatenate images in time
            
    - Create mask specific for analysis. For details see `fslmaths <http://fsl.fmrib.ox.ac.uk/fsl/fslwiki/Fslutils>`_::
    
        fslmaths merged.nii.gz 
                -abs -Tmin -bin mean_mask.nii.gz
        
        arguments 
             -Tmin  : min across time
             -abs   : absolute value
             -bin   : use (current image>0) to binarise
    
    - FSL FLAMEO to perform higher level analysis.  For details see `flameo <http://fsl.fmrib.ox.ac.uk/fsl/fslwiki/FEAT>`_::
        
        flameo --copefile = merged.nii.gz --covsplitfile = anova_with_meanFD.grp --designfile = anova_with_meanFD.mat 
               --fcontrastsfile = anova_with_meanFD.fts --ld=stats --maskfile = mean_mask.nii.gz --runmode=ols 
               --tcontrastsfile = anova_with_meanFD.con
           
        arguments
            --copefile        : cope regressor data file
            --designfile      : design matrix file
            --maskfile        : mask file
            --tcontrastsfile  : file containing an ASCII matrix specifying the t contrasts
            --fcontrastsfile  : file containing an ASCII matrix specifying the f contrasts
            --runmode         : Interference to perform (mixed effects - OLS)
            
    - Run FSL Easy thresh 
        
      Easy thresh is a simple script for carrying out cluster-based thresholding and colour activation overlaying::
        
        easythresh <raw_zstat> <brain_mask> <z_thresh> <prob_thresh> <background_image> <output_root> [--mm]
      
      A seperate workflow called easythresh is called to run easythresh steps.
      
    .. exec::
        from CPAC.group_analysis import create_fsl_flame_wf
        wf = create_fsl_flame_wf()
        wf.write_graph(
            graph2use='orig',
            dotfilename='./images/generated/group_analysis.dot'
        )

    High Level Workflow Graph:
    
    .. image:: ../../images/generated/group_analysis.png
       :width: 800
    
    
    Detailed Workflow Graph:
    
    .. image:: ../../images/generated/group_analysis_detailed.png
       :width: 800

    Examples
    --------
    
    >>> from group_analysis_preproc import create_group_analysis
    >>> preproc = create_group_analysis()
    >>> preproc.inputs.inputspec.mat_file = '../group_models/anova_with_meanFD/anova_with_meanFD.mat'
    >>> preproc.inputs.inputspec.con_file = '../group_models/anova_with_meanFD/anova_with_meanFD.con'
    >>> preproc.inputs.inputspec.grp_file = '../group_models/anova_with_meanFD/anova_with_meanFD.grp'
    >>> preproc.inputs.inputspec.zmap_files = ['subjects/sub01/seeds_rest_Dickstein_DLPFC/sca_Z_FWHM.nii.gz', 
                                               'subjects/sub02/seeds_rest_Dickstein_DLPFC/sca_Z_FWHM.nii.gz']
    >>> preproc.inputs.inputspec.z_threshold = 2.3
    >>> preproc.inputs.inputspec.p_threshold = 0.05
    >>> preproc.inputs.inputspec.parameters = ('/usr/local/fsl/', 'MNI152')
    >>> preproc.run()  -- SKIP doctest
            
    """
    grp_analysis = pe.Workflow(name=wf_name)

    inputnode = pe.Node(util.IdentityInterface(fields=[
        'merged_file', 'merge_mask', 'mat_file', 'con_file', 'grp_file',
        'fts_file', 'z_threshold', 'p_threshold', 'parameters'
    ]),
                        name='inputspec')

    outputnode = pe.Node(util.IdentityInterface(fields=[
        'merged', 'zstats', 'zfstats', 'fstats', 'cluster_threshold',
        'cluster_index', 'cluster_localmax_txt', 'overlay_threshold',
        'rendered_image', 'cluster_localmax_txt_zf', 'cluster_threshold_zf',
        'cluster_index_zf', 'overlay_threshold_zf', 'rendered_image_zf'
    ]),
                         name='outputspec')
    '''
    merge_to_4d = pe.Node(interface=fsl.Merge(),
                          name='merge_to_4d')
    merge_to_4d.inputs.dimension = 't'

    ### create analysis specific mask
    #-Tmin: min across time
    # -abs: absolute value
    #-bin: use (current image>0) to binarise
    merge_mask = pe.Node(interface=fsl.ImageMaths(),
                         name='merge_mask')
    merge_mask.inputs.op_string = '-abs -Tmin -bin'
    '''

    fsl_flameo = pe.Node(interface=fsl.FLAMEO(), name='fsl_flameo')
    fsl_flameo.inputs.run_mode = 'ols'

    # rename the FLAME zstat outputs after the contrast string labels for
    # easier interpretation
    label_zstat_imports = ["import os"]
    label_zstat = pe.Node(util.Function(input_names=['zstat_list', 'con_file'],
                                        output_names=['new_zstat_list'],
                                        function=label_zstat_files,
                                        imports=label_zstat_imports),
                          name='label_zstat')

    rename_zstats = pe.MapNode(interface=util.Rename(),
                               name='rename_zstats',
                               iterfield=['in_file', 'format_string'])
    rename_zstats.inputs.keep_ext = True

    # create analysis specific mask
    # fslmaths merged.nii.gz -abs -bin -Tmean -mul volume out.nii.gz
    # -Tmean: mean across time
    # create group_reg file
    # this file can provide an idea of how well the subjects
    # in our analysis overlay with each other and the MNI brain.
    # e.g., maybe there is one subject with limited coverage.
    # not attached to sink currently
    merge_mean_mask = pe.Node(interface=fsl.ImageMaths(),
                              name='merge_mean_mask')

    # function node to get the operation string for fslmaths command
    get_opstring = pe.Node(util.Function(input_names=['in_file'],
                                         output_names=['out_file'],
                                         function=get_operation),
                           name='get_opstring')

    # connections
    '''
    grp_analysis.connect(inputnode, 'zmap_files',
                         merge_to_4d, 'in_files')
    grp_analysis.connect(merge_to_4d, 'merged_file',
                         merge_mask, 'in_file')
    '''
    grp_analysis.connect(inputnode, 'merged_file', fsl_flameo, 'cope_file')
    grp_analysis.connect(inputnode, 'merge_mask', fsl_flameo, 'mask_file')
    grp_analysis.connect(inputnode, 'mat_file', fsl_flameo, 'design_file')
    grp_analysis.connect(inputnode, 'con_file', fsl_flameo, 't_con_file')
    grp_analysis.connect(inputnode, 'grp_file', fsl_flameo, 'cov_split_file')

    grp_analysis.connect(fsl_flameo, 'zstats', label_zstat, 'zstat_list')
    grp_analysis.connect(inputnode, 'con_file', label_zstat, 'con_file')

    grp_analysis.connect(fsl_flameo, 'zstats', rename_zstats, 'in_file')

    grp_analysis.connect(label_zstat, 'new_zstat_list', rename_zstats,
                         'format_string')

    if ftest:
        grp_analysis.connect(inputnode, 'fts_file', fsl_flameo, 'f_con_file')

        easy_thresh_zf = easy_thresh('easy_thresh_zf')

        grp_analysis.connect(fsl_flameo, 'zfstats', easy_thresh_zf,
                             'inputspec.z_stats')
        grp_analysis.connect(inputnode, 'merge_mask', easy_thresh_zf,
                             'inputspec.merge_mask')
        grp_analysis.connect(inputnode, 'z_threshold', easy_thresh_zf,
                             'inputspec.z_threshold')
        grp_analysis.connect(inputnode, 'p_threshold', easy_thresh_zf,
                             'inputspec.p_threshold')
        grp_analysis.connect(inputnode, 'parameters', easy_thresh_zf,
                             'inputspec.parameters')
        grp_analysis.connect(easy_thresh_zf, 'outputspec.cluster_threshold',
                             outputnode, 'cluster_threshold_zf')
        grp_analysis.connect(easy_thresh_zf, 'outputspec.cluster_index',
                             outputnode, 'cluster_index_zf')
        grp_analysis.connect(easy_thresh_zf, 'outputspec.cluster_localmax_txt',
                             outputnode, 'cluster_localmax_txt_zf')
        grp_analysis.connect(easy_thresh_zf, 'outputspec.overlay_threshold',
                             outputnode, 'overlay_threshold_zf')
        grp_analysis.connect(easy_thresh_zf, 'outputspec.rendered_image',
                             outputnode, 'rendered_image_zf')

    # calling easythresh for zstats files
    easy_thresh_z = easy_thresh('easy_thresh_z')
    grp_analysis.connect(rename_zstats, 'out_file', easy_thresh_z,
                         'inputspec.z_stats')
    grp_analysis.connect(inputnode, 'merge_mask', easy_thresh_z,
                         'inputspec.merge_mask')
    grp_analysis.connect(inputnode, 'z_threshold', easy_thresh_z,
                         'inputspec.z_threshold')
    grp_analysis.connect(inputnode, 'p_threshold', easy_thresh_z,
                         'inputspec.p_threshold')
    grp_analysis.connect(inputnode, 'parameters', easy_thresh_z,
                         'inputspec.parameters')

    grp_analysis.connect(inputnode, 'merged_file', get_opstring, 'in_file')
    grp_analysis.connect(inputnode, 'merged_file', merge_mean_mask, 'in_file')
    grp_analysis.connect(get_opstring, 'out_file', merge_mean_mask,
                         'op_string')

    grp_analysis.connect(fsl_flameo, 'zfstats', outputnode, 'zfstats')
    grp_analysis.connect(fsl_flameo, 'fstats', outputnode, 'fstats')
    grp_analysis.connect(inputnode, 'merged_file', outputnode, 'merged')

    grp_analysis.connect(rename_zstats, 'out_file', outputnode, 'zstats')

    grp_analysis.connect(easy_thresh_z, 'outputspec.cluster_threshold',
                         outputnode, 'cluster_threshold')
    grp_analysis.connect(easy_thresh_z, 'outputspec.cluster_index', outputnode,
                         'cluster_index')
    grp_analysis.connect(easy_thresh_z, 'outputspec.cluster_localmax_txt',
                         outputnode, 'cluster_localmax_txt')
    grp_analysis.connect(easy_thresh_z, 'outputspec.overlay_threshold',
                         outputnode, 'overlay_threshold')
    grp_analysis.connect(easy_thresh_z, 'outputspec.rendered_image',
                         outputnode, 'rendered_image')

    return grp_analysis
Ejemplo n.º 26
0
def run_feat_pipeline(group_config,
                      merge_file,
                      merge_mask,
                      f_test,
                      mat_file,
                      con_file,
                      grp_file,
                      out_dir,
                      work_dir,
                      log_dir,
                      model_name,
                      fts_file=None):
    '''
    needed:
      - z thresh, p thresh
      - out dir
      - work, crash, log etc.
      - 


    '''

    import nipype.interfaces.io as nio

    # get thresholds
    z_threshold = float(group_config.z_threshold[0])
    p_threshold = float(group_config.p_threshold[0])

    # workflow time
    wf_name = "fsl-feat_".format(model_name)
    wf = pe.Workflow(name=wf_name)

    wf.base_dir = work_dir

    wf.config['execution'] = {
        'hash_method': 'timestamp',
        'crashdump_dir': log_dir
    }

    gpa_wf = create_fsl_flame_wf(f_test, "fsl-flame")

    gpa_wf.inputs.inputspec.merged_file = merge_file
    gpa_wf.inputs.inputspec.merge_mask = merge_mask

    gpa_wf.inputs.inputspec.z_threshold = z_threshold
    gpa_wf.inputs.inputspec.p_threshold = p_threshold
    gpa_wf.inputs.inputspec.parameters = (group_config.FSLDIR, 'MNI152')

    gpa_wf.inputs.inputspec.mat_file = mat_file
    gpa_wf.inputs.inputspec.con_file = con_file
    gpa_wf.inputs.inputspec.grp_file = grp_file

    if f_test:
        gpa_wf.inputs.inputspec.fts_file = fts_file

    ds = pe.Node(nio.DataSink(), name='gpa_sink')

    ds.inputs.base_directory = str(out_dir)
    ds.inputs.container = ''

    ds.inputs.regexp_substitutions = [(r'(?<=rendered)(.)*[/]', '/'),
                                      (r'(?<=model_files)(.)*[/]', '/'),
                                      (r'(?<=merged)(.)*[/]', '/'),
                                      (r'(?<=stats/clusterMap)(.)*[/]', '/'),
                                      (r'(?<=stats/unthreshold)(.)*[/]', '/'),
                                      (r'(?<=stats/threshold)(.)*[/]', '/'),
                                      (r'_cluster(.)*[/]', ''),
                                      (r'_slicer(.)*[/]', ''),
                                      (r'_overlay(.)*[/]', '')]

    wf.connect(gpa_wf, 'outputspec.merged', ds, 'merged')
    wf.connect(gpa_wf, 'outputspec.zstats', ds, 'stats.unthreshold')
    wf.connect(gpa_wf, 'outputspec.zfstats', ds, 'stats.unthreshold.@01')
    wf.connect(gpa_wf, 'outputspec.fstats', ds, 'stats.unthreshold.@02')
    wf.connect(gpa_wf, 'outputspec.cluster_threshold_zf', ds,
               'stats.threshold')
    wf.connect(gpa_wf, 'outputspec.cluster_index_zf', ds, 'stats.clusterMap')
    wf.connect(gpa_wf, 'outputspec.cluster_localmax_txt_zf', ds,
               'stats.clusterMap.@01')
    wf.connect(gpa_wf, 'outputspec.overlay_threshold_zf', ds, 'rendered')
    wf.connect(gpa_wf, 'outputspec.rendered_image_zf', ds, 'rendered.@01')
    wf.connect(gpa_wf, 'outputspec.cluster_threshold', ds,
               'stats.threshold.@01')
    wf.connect(gpa_wf, 'outputspec.cluster_index', ds, 'stats.clusterMap.@02')
    wf.connect(gpa_wf, 'outputspec.cluster_localmax_txt', ds,
               'stats.clusterMap.@03')
    wf.connect(gpa_wf, 'outputspec.overlay_threshold', ds, 'rendered.@02')
    wf.connect(gpa_wf, 'outputspec.rendered_image', ds, 'rendered.@03')

    # Run the actual group analysis workflow
    wf.run()
Ejemplo n.º 27
0
Archivo: ants.py Proyecto: gkiar/C-PAC
def init_brain_extraction_wf(tpl_target_path,
                             tpl_mask_path,
                             tpl_regmask_path,
                             name='brain_extraction_wf',
                             template_spec=None,
                             use_float=True,
                             normalization_quality='precise',
                             omp_nthreads=None,
                             mem_gb=3.0,
                             bids_suffix='T1w',
                             atropos_refine=True,
                             atropos_use_random_seed=True,
                             atropos_model=None,
                             use_laplacian=True,
                             bspline_fitting_distance=200):
    """
    A Nipype implementation of the official ANTs' ``antsBrainExtraction.sh``
    workflow (only for 3D images).
    The official workflow is built as follows (and this implementation
    follows the same organization):
      1. Step 1 performs several clerical tasks (adding padding, calculating
         the Laplacian of inputs, affine initialization) and the core
         spatial normalization.
      2. Maps the brain mask into target space using the normalization
         calculated in 1.
      3. Superstep 1b: smart binarization of the brain mask
      4. Superstep 6: apply ATROPOS and massage its outputs
      5. Superstep 7: use results from 4 to refine the brain mask
    .. workflow::
        :graph2use: orig
        :simple_form: yes
        from niworkflows.anat import init_brain_extraction_wf
        wf = init_brain_extraction_wf()
    **Parameters**
        in_template : str
            Name of the skull-stripping template ('OASIS30ANTs', 'NKI', or
            path).
            The brain template from which regions will be projected
            Anatomical template created using e.g. LPBA40 data set with
            ``buildtemplateparallel.sh`` in ANTs.
            The workflow will automatically search for a brain probability
            mask created using e.g. LPBA40 data set which have brain masks
            defined, and warped to anatomical template and
            averaged resulting in a probability image.
        use_float : bool
            Whether single precision should be used
        normalization_quality : str
            Use more precise or faster registration parameters
            (default: ``precise``, other possible values: ``testing``)
        omp_nthreads : int
            Maximum number of threads an individual process may use
        mem_gb : float
            Estimated peak memory consumption of the most hungry nodes
            in the workflow
        bids_suffix : str
            Sequence type of the first input image. For a list of acceptable values
            see https://bids-specification.readthedocs.io/en/latest/\
04-modality-specific-files/01-magnetic-resonance-imaging-data.html#anatomy-imaging-data
        atropos_refine : bool
            Enables or disables the whole ATROPOS sub-workflow
        atropos_use_random_seed : bool
            Whether ATROPOS should generate a random seed based on the
            system's clock
        atropos_model : tuple or None
            Allows to specify a particular segmentation model, overwriting
            the defaults based on ``bids_suffix``
        use_laplacian : bool
            Enables or disables alignment of the Laplacian as an additional
            criterion for image registration quality (default: True)
        bspline_fitting_distance : float
            The size of the b-spline mesh grid elements, in mm (default: 200)
        name : str, optional
            Workflow name (default: antsBrainExtraction)
    **Inputs**
        in_files
            List of input anatomical images to be brain-extracted,
            typically T1-weighted.
            If a list of anatomical images is provided, subsequently
            specified images are used during the segmentation process.
            However, only the first image is used in the registration
            of priors.
            Our suggestion would be to specify the T1w as the first image.
        in_mask
            (optional) Mask used for registration to limit the metric
            computation to a specific region.
    **Outputs**
        out_file
            Skull-stripped and :abbr:`INU (intensity non-uniformity)`-corrected ``in_files``
        out_mask
            Calculated brain mask
        bias_corrected
            The ``in_files`` input images, after :abbr:`INU (intensity non-uniformity)`
            correction, before skull-stripping.
        bias_image
            The :abbr:`INU (intensity non-uniformity)` field estimated for each
            input in ``in_files``
        out_segm
            Output segmentation by ATROPOS
        out_tpms
            Output :abbr:`TPMs (tissue probability maps)` by ATROPOS
    """
    # from templateflow.api import get as get_template
    wf = pe.Workflow(name)

    template_spec = template_spec or {}

    # suffix passed via spec takes precedence
    template_spec['suffix'] = template_spec.get('suffix', bids_suffix)

    # # Get probabilistic brain mask if available
    inputnode = pe.Node(niu.IdentityInterface(fields=['in_files', 'in_mask']),
                        name='inputnode')

    # # Try to find a registration mask, set if available
    if tpl_regmask_path:
        inputnode.inputs.in_mask = str(tpl_regmask_path)

    outputnode = pe.Node(niu.IdentityInterface(fields=[
        'out_file', 'out_mask', 'bias_corrected', 'bias_image', 'out_segm',
        'out_tpms'
    ]),
                         name='outputnode')

    copy_xform = pe.Node(CopyXForm(
        fields=['out_file', 'out_mask', 'bias_corrected', 'bias_image']),
                         name='copy_xform',
                         run_without_submitting=True,
                         mem_gb=2.5)

    trunc = pe.MapNode(ImageMath(operation='TruncateImageIntensity',
                                 op2='0.01 0.999 256'),
                       name='truncate_images',
                       iterfield=['op1'])
    inu_n4 = pe.MapNode(N4BiasFieldCorrection(
        dimension=3,
        save_bias=False,
        copy_header=True,
        n_iterations=[50] * 4,
        convergence_threshold=1e-7,
        shrink_factor=4,
        bspline_fitting_distance=bspline_fitting_distance),
                        n_procs=omp_nthreads,
                        name='inu_n4',
                        iterfield=['input_image'])

    res_tmpl = pe.Node(ResampleImageBySpacing(out_spacing=(4, 4, 4),
                                              apply_smoothing=True),
                       name='res_tmpl')
    res_tmpl.inputs.input_image = tpl_target_path
    res_target = pe.Node(ResampleImageBySpacing(out_spacing=(4, 4, 4),
                                                apply_smoothing=True),
                         name='res_target')

    lap_tmpl = pe.Node(ImageMath(operation='Laplacian', op2='1.5 1'),
                       name='lap_tmpl')
    lap_tmpl.inputs.op1 = tpl_target_path
    lap_target = pe.Node(ImageMath(operation='Laplacian', op2='1.5 1'),
                         name='lap_target')
    mrg_tmpl = pe.Node(niu.Merge(2), name='mrg_tmpl')
    mrg_tmpl.inputs.in1 = tpl_target_path
    mrg_target = pe.Node(niu.Merge(2), name='mrg_target')

    # Initialize transforms with antsAI
    init_aff = pe.Node(AI(metric=('Mattes', 32, 'Regular', 0.25),
                          transform=('Affine', 0.1),
                          search_factor=(15, 0.1),
                          principal_axes=False,
                          convergence=(10, 1e-6, 10),
                          verbose=True),
                       name='init_aff',
                       n_procs=omp_nthreads)

    # Tolerate missing ANTs at construction time
    _ants_version = Registration().version
    if _ants_version and parseversion(_ants_version) >= Version('2.3.0'):
        init_aff.inputs.search_grid = (40, (0, 40, 40))

    # Set up spatial normalization
    settings_file = 'antsBrainExtraction_%s.json' if use_laplacian \
        else 'antsBrainExtractionNoLaplacian_%s.json'
    norm = pe.Node(Registration(
        from_file=pkgr_fn('CPAC.anat_preproc', 'data/' +
                          settings_file % normalization_quality)),
                   name='norm',
                   n_procs=omp_nthreads,
                   mem_gb=mem_gb)
    norm.inputs.float = use_float
    fixed_mask_trait = 'fixed_image_mask'
    if _ants_version and parseversion(_ants_version) >= Version('2.2.0'):
        fixed_mask_trait += 's'

    map_brainmask = pe.Node(ApplyTransforms(interpolation='Gaussian',
                                            float=True),
                            name='map_brainmask',
                            mem_gb=1)
    map_brainmask.inputs.input_image = str(tpl_mask_path)

    thr_brainmask = pe.Node(ThresholdImage(dimension=3,
                                           th_low=0.5,
                                           th_high=1.0,
                                           inside_value=1,
                                           outside_value=0),
                            name='thr_brainmask')

    # Morphological dilation, radius=2
    dil_brainmask = pe.Node(ImageMath(operation='MD', op2='2'),
                            name='dil_brainmask')
    # Get largest connected component
    get_brainmask = pe.Node(ImageMath(operation='GetLargestComponent'),
                            name='get_brainmask')

    # Refine INU correction
    inu_n4_final = pe.MapNode(N4BiasFieldCorrection(
        dimension=3,
        save_bias=True,
        copy_header=True,
        n_iterations=[50] * 5,
        convergence_threshold=1e-7,
        shrink_factor=4,
        bspline_fitting_distance=bspline_fitting_distance),
                              n_procs=omp_nthreads,
                              name='inu_n4_final',
                              iterfield=['input_image'])

    # Apply mask
    apply_mask = pe.MapNode(ApplyMask(),
                            iterfield=['in_file'],
                            name='apply_mask')

    wf.connect([
        (inputnode, trunc, [('in_files', 'op1')]),
        (inputnode, copy_xform, [(('in_files', _pop), 'hdr_file')]),
        (inputnode, inu_n4_final, [('in_files', 'input_image')]),
        (inputnode, init_aff, [('in_mask', 'fixed_image_mask')]),
        (inputnode, norm, [('in_mask', fixed_mask_trait)]),
        (inputnode, map_brainmask, [(('in_files', _pop), 'reference_image')]),
        (trunc, inu_n4, [('output_image', 'input_image')]),
        (inu_n4, res_target, [(('output_image', _pop), 'input_image')]),
        (res_tmpl, init_aff, [('output_image', 'fixed_image')]),
        (res_target, init_aff, [('output_image', 'moving_image')]),
        (init_aff, norm, [('output_transform', 'initial_moving_transform')]),
        (norm, map_brainmask, [('reverse_transforms', 'transforms'),
                               ('reverse_invert_flags',
                                'invert_transform_flags')]),
        (map_brainmask, thr_brainmask, [('output_image', 'input_image')]),
        (thr_brainmask, dil_brainmask, [('output_image', 'op1')]),
        (dil_brainmask, get_brainmask, [('output_image', 'op1')]),
        (inu_n4_final, apply_mask, [('output_image', 'in_file')]),
        (get_brainmask, apply_mask, [('output_image', 'mask_file')]),
        (get_brainmask, copy_xform, [('output_image', 'out_mask')]),
        (apply_mask, copy_xform, [('out_file', 'out_file')]),
        (inu_n4_final, copy_xform, [('output_image', 'bias_corrected'),
                                    ('bias_image', 'bias_image')]),
        (copy_xform, outputnode, [('out_file', 'out_file'),
                                  ('out_mask', 'out_mask'),
                                  ('bias_corrected', 'bias_corrected'),
                                  ('bias_image', 'bias_image')]),
    ])

    if use_laplacian:
        lap_tmpl = pe.Node(ImageMath(operation='Laplacian', op2='1.5 1'),
                           name='lap_tmpl')
        lap_tmpl.inputs.op1 = tpl_target_path
        lap_target = pe.Node(ImageMath(operation='Laplacian', op2='1.5 1'),
                             name='lap_target')
        mrg_tmpl = pe.Node(niu.Merge(2), name='mrg_tmpl')
        mrg_tmpl.inputs.in1 = tpl_target_path
        mrg_target = pe.Node(niu.Merge(2), name='mrg_target')
        wf.connect([
            (inu_n4, lap_target, [(('output_image', _pop), 'op1')]),
            (lap_tmpl, mrg_tmpl, [('output_image', 'in2')]),
            (inu_n4, mrg_target, [('output_image', 'in1')]),
            (lap_target, mrg_target, [('output_image', 'in2')]),
            (mrg_tmpl, norm, [('out', 'fixed_image')]),
            (mrg_target, norm, [('out', 'moving_image')]),
        ])
    else:
        norm.inputs.fixed_image = tpl_target_path
        wf.connect([
            (inu_n4, norm, [(('output_image', _pop), 'moving_image')]),
        ])

    if atropos_refine:
        atropos_model = atropos_model or list(
            ATROPOS_MODELS[bids_suffix].values())
        atropos_wf = init_atropos_wf(
            use_random_seed=atropos_use_random_seed,
            omp_nthreads=omp_nthreads,
            mem_gb=mem_gb,
            in_segmentation_model=atropos_model,
        )
        sel_wm = pe.Node(niu.Select(index=atropos_model[-1] - 1),
                         name='sel_wm',
                         run_without_submitting=True)

        wf.disconnect([
            (get_brainmask, apply_mask, [('output_image', 'mask_file')]),
            (copy_xform, outputnode, [('out_mask', 'out_mask')]),
        ])
        wf.connect([
            (inu_n4, atropos_wf, [('output_image', 'inputnode.in_files')]),
            (thr_brainmask, atropos_wf, [('output_image', 'inputnode.in_mask')
                                         ]),
            (get_brainmask, atropos_wf, [('output_image',
                                          'inputnode.in_mask_dilated')]),
            (atropos_wf, sel_wm, [('outputnode.out_tpms', 'inlist')]),
            (sel_wm, inu_n4_final, [('out', 'weight_image')]),
            (atropos_wf, apply_mask, [('outputnode.out_mask', 'mask_file')]),
            (atropos_wf, outputnode, [('outputnode.out_mask', 'out_mask'),
                                      ('outputnode.out_segm', 'out_segm'),
                                      ('outputnode.out_tpms', 'out_tpms')]),
        ])
    return wf
Ejemplo n.º 28
0
Archivo: ants.py Proyecto: gkiar/C-PAC
def init_atropos_wf(name='atropos_wf',
                    use_random_seed=True,
                    omp_nthreads=None,
                    mem_gb=3.0,
                    padding=10,
                    in_segmentation_model=list(
                        ATROPOS_MODELS['T1w'].values())):
    """
    Implements supersteps 6 and 7 of ``antsBrainExtraction.sh``,
    which refine the mask previously computed with the spatial
    normalization to the template.
    **Parameters**
        use_random_seed : bool
            Whether ATROPOS should generate a random seed based on the
            system's clock
        omp_nthreads : int
            Maximum number of threads an individual process may use
        mem_gb : float
            Estimated peak memory consumption of the most hungry nodes
            in the workflow
        padding : int
            Pad images with zeros before processing
        in_segmentation_model : tuple
            A k-means segmentation is run to find gray or white matter
            around the edge of the initial brain mask warped from the
            template.
            This produces a segmentation image with :math:`$K$` classes,
            ordered by mean intensity in increasing order.
            With this option, you can control  :math:`$K$` and tell
            the script which classes represent CSF, gray and white matter.
            Format (K, csfLabel, gmLabel, wmLabel).
            Examples:
              - ``(3,1,2,3)`` for T1 with K=3, CSF=1, GM=2, WM=3 (default)
              - ``(3,3,2,1)`` for T2 with K=3, CSF=3, GM=2, WM=1
              - ``(3,1,3,2)`` for FLAIR with K=3, CSF=1 GM=3, WM=2
              - ``(4,4,2,3)`` uses K=4, CSF=4, GM=2, WM=3
        name : str, optional
            Workflow name (default: atropos_wf)
    **Inputs**
        in_files
            :abbr:`INU (intensity non-uniformity)`-corrected files.
        in_mask
            Brain mask calculated previously
    **Outputs**
        out_mask
            Refined brain mask
        out_segm
            Output segmentation
        out_tpms
            Output :abbr:`TPMs (tissue probability maps)`
    """
    wf = pe.Workflow(name)

    inputnode = pe.Node(niu.IdentityInterface(
        fields=['in_files', 'in_mask', 'in_mask_dilated']),
                        name='inputnode')
    outputnode = pe.Node(
        niu.IdentityInterface(fields=['out_mask', 'out_segm', 'out_tpms']),
        name='outputnode')

    copy_xform = pe.Node(
        CopyXForm(fields=['out_mask', 'out_segm', 'out_tpms']),
        name='copy_xform',
        run_without_submitting=True,
        mem_gb=2.5)

    # Run atropos (core node)
    atropos = pe.Node(Atropos(
        dimension=3,
        initialization='KMeans',
        number_of_tissue_classes=in_segmentation_model[0],
        n_iterations=3,
        convergence_threshold=0.0,
        mrf_radius=[1, 1, 1],
        mrf_smoothing_factor=0.1,
        likelihood_model='Gaussian',
        use_random_seed=use_random_seed),
                      name='01_atropos',
                      n_procs=omp_nthreads,
                      mem_gb=mem_gb)

    # massage outputs
    pad_segm = pe.Node(ImageMath(operation='PadImage', op2='%d' % padding),
                       name='02_pad_segm')
    pad_mask = pe.Node(ImageMath(operation='PadImage', op2='%d' % padding),
                       name='03_pad_mask')

    # Split segmentation in binary masks
    sel_labels = pe.Node(niu.Function(
        function=_select_labels, output_names=['out_wm', 'out_gm', 'out_csf']),
                         name='04_sel_labels')
    sel_labels.inputs.labels = list(reversed(in_segmentation_model[1:]))

    # Select largest components (GM, WM)
    # ImageMath ${DIMENSION} ${EXTRACTION_WM} GetLargestComponent ${EXTRACTION_WM}
    get_wm = pe.Node(ImageMath(operation='GetLargestComponent'),
                     name='05_get_wm')
    get_gm = pe.Node(ImageMath(operation='GetLargestComponent'),
                     name='06_get_gm')

    # Fill holes and calculate intersection
    # ImageMath ${DIMENSION} ${EXTRACTION_TMP} FillHoles ${EXTRACTION_GM} 2
    # MultiplyImages ${DIMENSION} ${EXTRACTION_GM} ${EXTRACTION_TMP} ${EXTRACTION_GM}
    fill_gm = pe.Node(ImageMath(operation='FillHoles', op2='2'),
                      name='07_fill_gm')
    mult_gm = pe.Node(MultiplyImages(dimension=3,
                                     output_product_image='08_mult_gm.nii.gz'),
                      name='08_mult_gm')

    # MultiplyImages ${DIMENSION} ${EXTRACTION_WM} ${ATROPOS_WM_CLASS_LABEL} ${EXTRACTION_WM}
    # ImageMath ${DIMENSION} ${EXTRACTION_TMP} ME ${EXTRACTION_CSF} 10
    relabel_wm = pe.Node(MultiplyImages(
        dimension=3,
        second_input=in_segmentation_model[-1],
        output_product_image='09_relabel_wm.nii.gz'),
                         name='09_relabel_wm')
    me_csf = pe.Node(ImageMath(operation='ME', op2='10'), name='10_me_csf')

    # ImageMath ${DIMENSION} ${EXTRACTION_GM} addtozero ${EXTRACTION_GM} ${EXTRACTION_TMP}
    # MultiplyImages ${DIMENSION} ${EXTRACTION_GM} ${ATROPOS_GM_CLASS_LABEL} ${EXTRACTION_GM}
    # ImageMath ${DIMENSION} ${EXTRACTION_SEGMENTATION} addtozero ${EXTRACTION_WM} ${EXTRACTION_GM}
    add_gm = pe.Node(ImageMath(operation='addtozero'), name='11_add_gm')
    relabel_gm = pe.Node(MultiplyImages(
        dimension=3,
        second_input=in_segmentation_model[-2],
        output_product_image='12_relabel_gm.nii.gz'),
                         name='12_relabel_gm')
    add_gm_wm = pe.Node(ImageMath(operation='addtozero'), name='13_add_gm_wm')

    # Superstep 7
    # Split segmentation in binary masks
    sel_labels2 = pe.Node(niu.Function(function=_select_labels,
                                       output_names=['out_gm', 'out_wm']),
                          name='14_sel_labels2')
    sel_labels2.inputs.labels = in_segmentation_model[2:]

    # ImageMath ${DIMENSION} ${EXTRACTION_MASK} addtozero ${EXTRACTION_MASK} ${EXTRACTION_TMP}
    add_7 = pe.Node(ImageMath(operation='addtozero'), name='15_add_7')
    # ImageMath ${DIMENSION} ${EXTRACTION_MASK} ME ${EXTRACTION_MASK} 2
    me_7 = pe.Node(ImageMath(operation='ME', op2='2'), name='16_me_7')
    # ImageMath ${DIMENSION} ${EXTRACTION_MASK} GetLargestComponent ${EXTRACTION_MASK}
    comp_7 = pe.Node(ImageMath(operation='GetLargestComponent'),
                     name='17_comp_7')
    # ImageMath ${DIMENSION} ${EXTRACTION_MASK} MD ${EXTRACTION_MASK} 4
    md_7 = pe.Node(ImageMath(operation='MD', op2='4'), name='18_md_7')
    # ImageMath ${DIMENSION} ${EXTRACTION_MASK} FillHoles ${EXTRACTION_MASK} 2
    fill_7 = pe.Node(ImageMath(operation='FillHoles', op2='2'),
                     name='19_fill_7')
    # ImageMath ${DIMENSION} ${EXTRACTION_MASK} addtozero ${EXTRACTION_MASK} \
    # ${EXTRACTION_MASK_PRIOR_WARPED}
    add_7_2 = pe.Node(ImageMath(operation='addtozero'), name='20_add_7_2')
    # ImageMath ${DIMENSION} ${EXTRACTION_MASK} MD ${EXTRACTION_MASK} 5
    md_7_2 = pe.Node(ImageMath(operation='MD', op2='5'), name='21_md_7_2')
    # ImageMath ${DIMENSION} ${EXTRACTION_MASK} ME ${EXTRACTION_MASK} 5
    me_7_2 = pe.Node(ImageMath(operation='ME', op2='5'), name='22_me_7_2')

    # De-pad
    depad_mask = pe.Node(ImageMath(operation='PadImage', op2='-%d' % padding),
                         name='23_depad_mask')
    depad_segm = pe.Node(ImageMath(operation='PadImage', op2='-%d' % padding),
                         name='24_depad_segm')
    depad_gm = pe.Node(ImageMath(operation='PadImage', op2='-%d' % padding),
                       name='25_depad_gm')
    depad_wm = pe.Node(ImageMath(operation='PadImage', op2='-%d' % padding),
                       name='26_depad_wm')
    depad_csf = pe.Node(ImageMath(operation='PadImage', op2='-%d' % padding),
                        name='27_depad_csf')

    msk_conform = pe.Node(niu.Function(function=_conform_mask),
                          name='msk_conform')
    merge_tpms = pe.Node(niu.Merge(in_segmentation_model[0]),
                         name='merge_tpms')
    wf.connect([
        (inputnode, copy_xform, [(('in_files', _pop), 'hdr_file')]),
        (inputnode, pad_mask, [('in_mask', 'op1')]),
        (inputnode, atropos, [('in_files', 'intensity_images'),
                              ('in_mask_dilated', 'mask_image')]),
        (inputnode, msk_conform, [(('in_files', _pop), 'in_reference')]),
        (atropos, pad_segm, [('classified_image', 'op1')]),
        (pad_segm, sel_labels, [('output_image', 'in_segm')]),
        (sel_labels, get_wm, [('out_wm', 'op1')]),
        (sel_labels, get_gm, [('out_gm', 'op1')]),
        (get_gm, fill_gm, [('output_image', 'op1')]),
        (get_gm, mult_gm, [('output_image', 'first_input')]),
        (fill_gm, mult_gm, [('output_image', 'second_input')]),
        (get_wm, relabel_wm, [('output_image', 'first_input')]),
        (sel_labels, me_csf, [('out_csf', 'op1')]),
        (mult_gm, add_gm, [('output_product_image', 'op1')]),
        (me_csf, add_gm, [('output_image', 'op2')]),
        (add_gm, relabel_gm, [('output_image', 'first_input')]),
        (relabel_wm, add_gm_wm, [('output_product_image', 'op1')]),
        (relabel_gm, add_gm_wm, [('output_product_image', 'op2')]),
        (add_gm_wm, sel_labels2, [('output_image', 'in_segm')]),
        (sel_labels2, add_7, [('out_wm', 'op1'), ('out_gm', 'op2')]),
        (add_7, me_7, [('output_image', 'op1')]),
        (me_7, comp_7, [('output_image', 'op1')]),
        (comp_7, md_7, [('output_image', 'op1')]),
        (md_7, fill_7, [('output_image', 'op1')]),
        (fill_7, add_7_2, [('output_image', 'op1')]),
        (pad_mask, add_7_2, [('output_image', 'op2')]),
        (add_7_2, md_7_2, [('output_image', 'op1')]),
        (md_7_2, me_7_2, [('output_image', 'op1')]),
        (me_7_2, depad_mask, [('output_image', 'op1')]),
        (add_gm_wm, depad_segm, [('output_image', 'op1')]),
        (relabel_wm, depad_wm, [('output_product_image', 'op1')]),
        (relabel_gm, depad_gm, [('output_product_image', 'op1')]),
        (sel_labels, depad_csf, [('out_csf', 'op1')]),
        (depad_csf, merge_tpms, [('output_image', 'in1')]),
        (depad_gm, merge_tpms, [('output_image', 'in2')]),
        (depad_wm, merge_tpms, [('output_image', 'in3')]),
        (depad_mask, msk_conform, [('output_image', 'in_mask')]),
        (msk_conform, copy_xform, [('out', 'out_mask')]),
        (depad_segm, copy_xform, [('output_image', 'out_segm')]),
        (merge_tpms, copy_xform, [('out', 'out_tpms')]),
        (copy_xform, outputnode, [('out_mask', 'out_mask'),
                                  ('out_segm', 'out_segm'),
                                  ('out_tpms', 'out_tpms')]),
    ])
    return wf
Ejemplo n.º 29
0
def test_registration_lesion():
    import os
    from CPAC.pipeline import nipype_pipeline_engine as pe
    from ..registration import create_wf_calculate_ants_warp
    from CPAC.anat_preproc.anat_preproc import create_anat_preproc
    from CPAC.anat_preproc.lesion_preproc import create_lesion_preproc

    # Skull stripped anat image
    anat_file = '/bids_dataset/sub-0027228/ses-1/anat/sub-0027228_ses-1_run-1_T1w.nii.gz'
    lesion_file = '/bids_dataset/sub-0027228/ses-1/anat/sub-0027228_ses-1_run-1_T1w_lesion-mask.nii.gz'
    mni_brain_file = '/usr/share/fsl/5.0/data/standard/MNI152_T1_3mm_brain.nii.gz'

    if not os.path.exists(anat_file):
        raise IOError(anat_file + ' not found')
    if not os.path.exists(lesion_file):
        raise IOError(lesion_file + ' not found')
    if not os.path.exists(mni_brain_file):
        raise IOError(mni_brain_file + ' not found')

    wf = pe.Workflow(name='test_reg_lesion')

    anat_preproc = create_anat_preproc(method='mask',
                                       already_skullstripped=True,
                                       wf_name='anat_preproc')

    anat_preproc.inputs.inputspec.anat = anat_file

    lesion_preproc = create_lesion_preproc(wf_name='lesion_preproc')

    lesion_preproc.inputs.inputspec.lesion = lesion_file

    ants_reg_anat_mni = \
        create_wf_calculate_ants_warp(
            'anat_mni_ants_register',
            0,
            num_threads=4
        )

    # pass the reference file
    ants_reg_anat_mni.inputs.inputspec.reference_brain = mni_brain_file

    wf.connect(anat_preproc, 'outputspec.reorient', ants_reg_anat_mni,
               'inputspec.moving_brain')

    wf.connect(lesion_preproc, 'outputspec.reorient', ants_reg_anat_mni,
               'inputspec.fixed_image_mask')

    ants_reg_anat_mni.inputs.inputspec.set(
        dimension=3,
        use_histogram_matching=True,
        winsorize_lower_quantile=0.01,
        winsorize_upper_quantile=0.99,
        metric=['MI', 'MI', 'CC'],
        metric_weight=[1, 1, 1],
        radius_or_number_of_bins=[32, 32, 4],
        sampling_strategy=['Regular', 'Regular', None],
        sampling_percentage=[0.25, 0.25, None],
        number_of_iterations=[[1000, 500, 250, 100], [1000, 500, 250, 100],
                              [100, 100, 70, 20]],
        convergence_threshold=[1e-8, 1e-8, 1e-9],
        convergence_window_size=[10, 10, 15],
        transforms=['Rigid', 'Affine', 'SyN'],
        transform_parameters=[[0.1], [0.1], [0.1, 3, 0]],
        shrink_factors=[[8, 4, 2, 1], [8, 4, 2, 1], [6, 4, 2, 1]],
        smoothing_sigmas=[[3, 2, 1, 0], [3, 2, 1, 0], [3, 2, 1, 0]])

    wf.run()
Ejemplo n.º 30
0
def spatial_smoothing(wf_name,
                      fwhm,
                      input_image_type='func_derivative',
                      opt=None):

    wf = pe.Workflow(name=wf_name)

    inputnode = pe.Node(util.IdentityInterface(fields=['in_file', 'mask']),
                        name='inputspec')

    inputnode_fwhm = pe.Node(util.IdentityInterface(fields=['fwhm']),
                             name='fwhm_input')
    inputnode_fwhm.iterables = ("fwhm", fwhm)

    image_types = [
        'func_derivative', 'func_derivative_multi', 'func_4d', 'func_mask'
    ]

    if input_image_type not in image_types:
        raise ValueError('Input image type {0} should be one of '
                         '{1}'.format(input_image_type,
                                      ', '.join(image_types)))

    if opt == 'FSL':
        output_smooth_mem_gb = 4.0
        if input_image_type == 'func_derivative_multi':
            output_smooth = pe.MapNode(interface=fsl.MultiImageMaths(),
                                       name='smooth_multi',
                                       iterfield=['in_file'],
                                       mem_gb=output_smooth_mem_gb)
        else:
            output_smooth = pe.Node(interface=fsl.MultiImageMaths(),
                                    name='smooth',
                                    mem_gb=output_smooth_mem_gb)

    elif opt == 'AFNI':
        if input_image_type == 'func_derivative_multi':
            output_smooth = pe.MapNode(interface=afni.BlurToFWHM(),
                                       name='smooth_multi',
                                       iterfield=['in_file'])
        else:
            output_smooth = pe.Node(interface=afni.BlurToFWHM(),
                                    name='smooth',
                                    iterfield=['in_file'])
        output_smooth.inputs.outputtype = 'NIFTI_GZ'

    if opt == 'FSL':
        # wire in the resource to be smoothed
        wf.connect(inputnode, 'in_file', output_smooth, 'in_file')
        # get the parameters for fwhm
        wf.connect(inputnode_fwhm, ('fwhm', set_gauss), output_smooth,
                   'op_string')
        wf.connect(inputnode, 'mask', output_smooth, 'operand_files')
    elif opt == 'AFNI':
        wf.connect(inputnode, 'in_file', output_smooth, 'in_file')
        wf.connect(inputnode_fwhm, 'fwhm', output_smooth, 'fwhm')
        wf.connect(inputnode, 'mask', output_smooth, 'mask')

    outputnode = pe.Node(util.IdentityInterface(fields=['out_file', 'fwhm']),
                         name='outputspec')

    wf.connect(output_smooth, 'out_file', outputnode, 'out_file')
    wf.connect(inputnode_fwhm, 'fwhm', outputnode, 'fwhm')

    return wf