示例#1
0
    def _run_interface(self, runtime):
        distortion_groups, assignments = get_distortion_grouping(
            self.inputs.bids_dwi_files)
        num_distortion_groups = len(distortion_groups)
        if not num_distortion_groups == 2:
            raise Exception("Unable to merge using strategy 'average': exactly"
                            " two distortion groups must be present in data."
                            " Found %d" % num_distortion_groups)

        # Get the gradient info for each PE group
        original_bvecs = combined_bvec_array(self.inputs.original_bvec_files)
        rotated_bvecs = combined_bvec_array(self.inputs.bvec_files)
        bvals = combined_bval_array(self.inputs.bval_files)

        # Find which images should be averaged together in the output
        image_pairs, averaged_raw_bvec = find_image_pairs(
            original_bvecs, bvals, assignments)
        combined_images, combined_raw_images, combined_bvals, combined_bvecs, error_report = \
            average_image_pairs(image_pairs, self.inputs.dwi_files, rotated_bvecs,
                                bvals, self.inputs.denoising_confounds,
                                self.inputs.raw_concatenated_files,
                                verbose=self.inputs.verbose)

        # Save the averaged outputs
        out_dwi_path = op.join(runtime.cwd, "averaged_pairs.nii.gz")
        combined_images.to_filename(out_dwi_path)
        self._results['out_dwi'] = out_dwi_path
        out_bval_path = op.join(runtime.cwd, "averaged_pairs.bval")
        self._results['out_bval'] = combine_bvals(combined_bvals,
                                                  out_bval_path)
        out_bvec_path = op.join(runtime.cwd, "averaged_pairs.bvec")
        self._results['out_bvec'] = combine_bvecs(combined_bvecs,
                                                  out_bvec_path)
        out_confounds_path = op.join(runtime.cwd,
                                     "averaged_pairs_confounds.tsv")
        error_report.to_csv(out_confounds_path, index=False, sep="\t")
        self._results['merged_denoising_confounds'] = out_confounds_path
        self._results['original_images'] = self.inputs.bids_dwi_files

        # write the averaged raw data
        out_raw_concatenated = op.join(runtime.cwd, 'merged_raw.nii.gz')
        self._results['merged_raw_dwi'] = out_raw_concatenated
        combined_raw_images.to_filename(out_raw_concatenated)
        out_raw_bvec = op.join(runtime.cwd, 'merged_raw.bvec')
        self._results['merged_raw_bvec'] = combine_bvecs(
            averaged_raw_bvec, out_raw_bvec)

        # Make a new b=0 template
        b0_indices = np.flatnonzero(bvals < self.inputs.b0_threshold)
        b0_ref = ants.AverageImages(
            dimension=3,
            normalize=True,
            images=[self.inputs.dwi_files[idx] for idx in b0_indices])
        result = b0_ref.run()
        self._results['merged_b0_ref'] = result.outputs.output_average_image

        return runtime
def get_workflow(parameters, name=0):
    wf = pe.Workflow(name="%04d" % name + "regionGrowing")
    wf.base_dir = "/scratch/henry_temp/keshavan/region_growing_test"
    n = pe.Node(niu.Function(input_names=[
        "inputv", "seeds", "multiplier", "nbhd", "iterations", "timestep",
        "smoothingiterations"
    ],
                             output_names=["outfile"],
                             function=getSRGS),
                name="srgs")
    inputspec = pe.Node(niu.IdentityInterface(fields=["seeds", "in_file"]),
                        name="inputspec")
    n.iterables = [(q, parameters[q].tolist()) for q in [
        "multiplier", "nbhd", "iterations", "timestep", "smoothingiterations"
    ]]
    n.synchronize = True
    wf.connect(inputspec, "seeds", n, "seeds")
    wf.connect(inputspec, "in_file", n, "inputv")

    dt = pe.Node(fsl.ChangeDataType(output_datatype="short"), name="changedt")
    wf.connect(n, "outfile", dt, "in_file")

    stats = pe.Node(fsl.ImageStats(op_string="-c -w"), name="stats")
    wf.connect(dt, "out_file", stats, "in_file")

    avg = pe.JoinNode(ants.AverageImages(dimension=3, normalize=False),
                      name="average",
                      joinsource="srgs",
                      joinfield=["images"])
    wf.connect(dt, "out_file", avg, "images")

    st = pe.JoinNode(niu.Function(input_names=["out_stats", "parameters"],
                                  output_names=["outfile"],
                                  function=combine_stats),
                     name="combiner",
                     joinsource="srgs",
                     joinfield=["out_stats"])
    #wf.connect(dt, "out_file", st, "out_files")
    wf.connect(stats, "out_stat", st, "out_stats")
    st.inputs.parameters = parameters

    outputspec = pe.Node(niu.IdentityInterface(fields=["avg_image", "stats"]),
                         name="outputspec")
    wf.connect(avg, "output_average_image", outputspec, "avg_image")
    wf.connect(st, "outfile", outputspec, "stats")
    return wf, inputspec, outputspec
示例#3
0
def init_commonspace_wf(name="antsRegistrationTemplateBuilder"):
    # from nipype.workflows.smri.ants import antsRegistrationTemplateBuildSingleIterationWF

    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=['file_list']),
                        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(fields=[
        'PrimaryTemplate', 'PassiveTemplate', 'Transforms',
        'PreRegisterAverage'
    ]),
                         name='outputnode')

    datasource = pe.Node(Function(input_names=['InitialTemplateInputs'],
                                  output_names=[
                                      'InitialTemplateInputs',
                                      'ListOfImagesDictionaries',
                                      'registrationImageTypes',
                                      'interpolationMapping'
                                  ],
                                  function=prep_data),
                         name='datasource')

    # creates an average from the input images as initial target template
    initAvg = pe.Node(interface=ants.AverageImages(), name='initAvg')
    initAvg.inputs.dimension = 3
    initAvg.inputs.normalize = True

    # Define the iterations for template building
    buildTemplateIteration1 = antsRegistrationTemplateBuildSingleIterationWF(
        'iteration01')
    buildTemplateIteration2 = antsRegistrationTemplateBuildSingleIterationWF(
        'iteration02')
    buildTemplateIteration3 = antsRegistrationTemplateBuildSingleIterationWF(
        'iteration03')

    workflow.connect(inputnode, "file_list", datasource,
                     "InitialTemplateInputs")
    workflow.connect(datasource, "InitialTemplateInputs", initAvg, "images")

    workflow.connect(initAvg, 'output_average_image', buildTemplateIteration1,
                     'inputspec.fixed_image')
    workflow.connect(datasource, 'ListOfImagesDictionaries',
                     buildTemplateIteration1,
                     'inputspec.ListOfImagesDictionaries')
    workflow.connect(datasource, 'registrationImageTypes',
                     buildTemplateIteration1,
                     'inputspec.registrationImageTypes')
    workflow.connect(datasource, 'interpolationMapping',
                     buildTemplateIteration1, 'inputspec.interpolationMapping')
    '''
    #the template created from the previous iteration becomes the new target template
    workflow.connect(buildTemplateIteration1, 'outputspec.template',
                     buildTemplateIteration2, 'inputspec.fixed_image')
    workflow.connect(datasource, 'ListOfImagesDictionaries',
                     buildTemplateIteration2, 'inputspec.ListOfImagesDictionaries')
    workflow.connect(datasource, 'registrationImageTypes', buildTemplateIteration2,
                     'inputspec.registrationImageTypes')
    workflow.connect(datasource, 'interpolationMapping', buildTemplateIteration2,
                     'inputspec.interpolationMapping')
    #the template created from the previous iteration becomes the new target template
    workflow.connect(buildTemplateIteration2, 'outputspec.template',
                     buildTemplateIteration3, 'inputspec.fixed_image')
    workflow.connect(datasource, 'ListOfImagesDictionaries',
                     buildTemplateIteration3, 'inputspec.ListOfImagesDictionaries')
    workflow.connect(datasource, 'registrationImageTypes', buildTemplateIteration3,
                     'inputspec.registrationImageTypes')
    workflow.connect(datasource, 'interpolationMapping', buildTemplateIteration3,
                     'inputspec.interpolationMapping')
    '''

    workflow.connect(buildTemplateIteration1, 'outputspec.template',
                     outputnode, 'PrimaryTemplate')
    workflow.connect(buildTemplateIteration1,
                     'outputspec.passive_deformed_templates', outputnode,
                     'PassiveTemplate')
    workflow.connect(buildTemplateIteration1, 'outputspec.transforms_list',
                     outputnode, 'Transforms')
    workflow.connect(initAvg, 'output_average_image', outputnode,
                     'PreRegisterAverage')

    return workflow
示例#4
0
import nipype.pipeline.engine as pe
from nipype import Node, JoinNode, Workflow
from nipype.interfaces.utility import IdentityInterface
from nipype.interfaces import (ants, dcm2nii, fsl)

#make workflow object
wf = Workflow(name='preprocess')
#infospec is a node that makes a subject identity. identity matrix is what Subject.py was trying to acomplish
inputspec = Node(IdentityInterface(fields=['image']),
                    name='inputspec')
#these look like the images that will be processed in this node. Subject.seq might work here
inputspec.iterables = [('image',
                       ['img1.nii', 'img2.nii', 'img3.nii'])]
#not sure what this is, applys img2flt (?) need to ask Ryan
img2flt = Node(fsl.ImageMaths(out_data_type='float'),
                  name='img2flt')
#connects the first node with the second node
wf.connect(inputspec, 'image', img2flt, 'in_file')
#3rd node
average = JoinNode(ants.AverageImages(), joinsource='inputspec',
                      joinfield='images', name='average')
#add new node to wf
wf.connect(img2flt, 'out_file', average, 'images')
#new node
realign = Node(fsl.FLIRT(), name='realign')
#add to wf after img2flt & after average
wf.connect(img2flt, 'out_file', realign, 'in_file')
wf.connect(average, 'output_average_image', realign, 'reference')
strip = Node(fsl.BET(), name='strip')
wf.connect(realign, 'out_file', strip, 'in_file')
示例#5
0
def nonlinear_alignment_iteration(iternum=0, gradient_step=0.2):
    """
    Takes a template image and a set of input images, does
    a linear alignment to the template and updates it with the
    inverse of the average affine transform to the new template

    Returns a workflow

    """
    iteration_wf = Workflow(name="nl_iterative_alignment_%03d" % iternum)
    input_node_fields = ["image_paths", "template_image", "iteration_num"]
    inputnode = pe.Node(
        niu.IdentityInterface(fields=input_node_fields), name='inputnode')
    inputnode.inputs.iteration_num = iternum
    outputnode = pe.Node(
        niu.IdentityInterface(fields=["registered_image_paths", "affine_transforms",
                                      "warp_transforms", "composite_transforms",
                                      "updated_template"]), name='outputnode')
    ants_settings = pkgrf("qsiprep", "data/intramodal_nonlinear.json")
    reg = ants.Registration(from_file=ants_settings)
    iter_reg = pe.MapNode(
        reg, name="nlreg_%03d" % iternum, iterfield=["moving_image"])

    # Average the images
    averaged_images = pe.Node(
        ants.AverageImages(normalize=True, dimension=3),
        name="averaged_images")

    # Make an automask
    mask_average = pe.Node(afni.Automask(), name='mask_average')

    # Shape update to template:
    # Average the affines so that the inverse can be applied to the template
    affines_to_list = pe.Node(niu.Merge(1), name="affines_to_list")
    warps_to_list = pe.Node(niu.Merge(1), name="warps_to_list")
    avg_affines = pe.Node(
        ants.AverageAffineTransform(dimension=3,
                                    output_affine_transform="AveragedAffines.mat"),
        name="avg_affines")

    # Average the warps:
    average_warps = pe.Node(
        ants.AverageImages(dimension=3, normalize=False), name="average_warps")
    # Scale by the gradient step
    scale_warp = pe.Node(
        ants.MultiplyImages(dimension=3, second_input=gradient_step,
                            output_product_image="scaled_warp.nii.gz"),
        name="scale_warp")
    # Align the warps to the template image
    align_warp = pe.Node(
        ants.ApplyTransforms(
            input_image_type=1, invert_transform_flags=[True]),
        name="align_warp")

    # transform the template for the shape update
    shape_update_template = pe.Node(
        ants.ApplyTransforms(interpolation="LanczosWindowedSinc",
                             invert_transform_flags=[True, False, False, False, False]),
        name="shape_update_template")
    shape_update_merge = pe.Node(niu.Merge(5), name="shape_update_merge")

    # Run the images through antsRegistration
    def get_first(input_pairs):
        return [input_pair[0] for input_pair in input_pairs]

    def get_second(input_pairs):
        return [input_pair[1] for input_pair in input_pairs]

    iteration_wf.connect([
        (inputnode, iter_reg, [
            ('image_paths', 'moving_image'),
            ('template_image', 'fixed_image')]),
        (iter_reg, affines_to_list, [(('forward_transforms', get_first), 'in1')]),
        (affines_to_list, avg_affines, [('out', 'transforms')]),
        (iter_reg, warps_to_list, [(('forward_transforms', get_second), 'in1')]),
        (iter_reg, averaged_images, [('warped_image', 'images')]),

        # Average the warps, scale them, and transform to be aligned with the template
        (warps_to_list, average_warps, [('out', 'images')]),
        (average_warps, scale_warp, [('output_average_image', 'first_input')]),
        (scale_warp, align_warp, [
            ('output_product_image', 'input_image')]),
        (avg_affines, align_warp, [('affine_transform', 'transforms')]),
        (inputnode, align_warp, [('template_image', 'reference_image')]),
        (avg_affines, shape_update_merge, [('affine_transform', 'in1')]),
        (align_warp, shape_update_merge, [
            ('output_image', 'in2'), ('output_image', 'in3'),
            ('output_image', 'in4'), ('output_image', 'in5')]),
        (shape_update_merge, shape_update_template, [('out', 'transforms')]),
        (averaged_images, shape_update_template, [
            ('output_average_image', 'input_image'),
            ('output_average_image', 'reference_image')]),
        (shape_update_template, outputnode, [('output_image', 'updated_template')]),
        (iter_reg, outputnode, [
            ('forward_transforms', 'affine_transforms'),
            ('warped_image', 'registered_image_paths')])
    ])

    return iteration_wf
示例#6
0
datasource = pe.Node(interface=util.IdentityInterface(fields=[
    'InitialTemplateInputs', 'ListOfImagesDictionaries',
    'registrationImageTypes', 'interpolationMapping'
]),
                     run_without_submitting=True,
                     name='InputImages')
datasource.inputs.InitialTemplateInputs = InitialTemplateInputs
datasource.inputs.ListOfImagesDictionaries = ListOfImagesDictionaries
datasource.inputs.registrationImageTypes = registrationImageTypes
datasource.inputs.interpolationMapping = interpolationMapping
"""
5. Template is initialized by a simple average in this simple example,
   any reference image could be used (i.e. a previously created template)
"""

initAvg = pe.Node(interface=ants.AverageImages(), name='initAvg')
initAvg.inputs.dimension = 3
initAvg.inputs.normalize = True

tbuilder.connect(datasource, "InitialTemplateInputs", initAvg, "images")
"""
6. Define the first iteration of template building
"""

buildTemplateIteration1 = antsRegistrationTemplateBuildSingleIterationWF(
    'iteration01')
"""
Here we are fine tuning parameters of the SGE job (memory limit, numebr of cores etc.)
"""

BeginANTS = buildTemplateIteration1.get_node("BeginANTS")
示例#7
0
def main(args):
    subjects, master_config = args

    import os
    import sys
    import traceback

    # Set universal pipeline options
    from nipype import config
    config.update_config(master_config)
    assert config.get('execution',
                      'plugin') == master_config['execution']['plugin']

    import nipype.pipeline.engine as pe
    import nipype.interfaces.io as nio
    from nipype.interfaces.utility import IdentityInterface, Function
    import nipype.interfaces.ants as ants

    from template import MergeByExtendListElements, xml_filename
    from PipeLineFunctionHelpers import mapPosteriorList
    from atlasNode import GetAtlasNode, MakeNewAtlasTemplate
    from utilities.misc import GenerateSubjectOutputPattern as outputPattern
    from utilities.distributed import modify_qsub_args

    template = pe.Workflow(name='SubjectAtlas_Template')
    template.base_dir = master_config['logging']['log_directory']

    BAtlas = GetAtlasNode(master_config['previouscache'], 'BAtlas')

    inputspec = pe.Node(interface=IdentityInterface(fields=['subject']),
                        name='inputspec')
    inputspec.iterables = ('subject', subjects)

    baselineDG = pe.Node(nio.DataGrabber(infields=['subject'],
                                         outfields=[
                                             't1_average', 't2_average',
                                             'pd_average', 'fl_average',
                                             'outputLabels', 'posteriorImages'
                                         ]),
                         name='Baseline_DG')
    baselineDG.inputs.base_directory = master_config['previousresult']
    baselineDG.inputs.sort_filelist = True
    baselineDG.inputs.raise_on_empty = False
    baselineDG.inputs.template = '*/%s/*/Baseline/%s.nii.gz'
    baselineDG.inputs.template_args['t1_average'] = [[
        'subject', 't1_average_BRAINSABC'
    ]]
    baselineDG.inputs.template_args['t2_average'] = [[
        'subject', 't2_average_BRAINSABC'
    ]]
    baselineDG.inputs.template_args['pd_average'] = [[
        'subject', 'pd_average_BRAINSABC'
    ]]
    baselineDG.inputs.template_args['fl_average'] = [[
        'subject', 'fl_average_BRAINSABC'
    ]]
    baselineDG.inputs.template_args['outputLabels'] = [[
        'subject', 'brain_label_seg'
    ]]
    baselineDG.inputs.field_template = {
        'posteriorImages': '*/%s/*/TissueClassify/POSTERIOR_%s.nii.gz'
    }
    posterior_files = [
        'AIR', 'BASAL', 'CRBLGM', 'CRBLWM', 'CSF', 'GLOBUS', 'HIPPOCAMPUS',
        'NOTCSF', 'NOTGM', 'NOTVB', 'NOTWM', 'SURFGM', 'THALAMUS', 'VB', 'WM'
    ]
    baselineDG.inputs.template_args['posteriorImages'] = [[
        'subject', posterior_files
    ]]

    MergeByExtendListElementsNode = pe.Node(
        Function(
            function=MergeByExtendListElements,
            input_names=['t1s', 't2s', 'pds', 'fls', 'labels', 'posteriors'],
            output_names=[
                'ListOfImagesDictionaries', 'registrationImageTypes',
                'interpolationMapping'
            ]),
        run_without_submitting=True,
        name="99_MergeByExtendListElements")
    from PipeLineFunctionHelpers import WrapPosteriorImagesFromDictionaryFunction as wrapfunc
    template.connect([(inputspec, baselineDG, [('subject', 'subject')]),
                      (baselineDG, MergeByExtendListElementsNode,
                       [('t1_average', 't1s'), ('t2_average', 't2s'),
                        ('pd_average', 'pds'), ('fl_average', 'fls'),
                        ('outputLabels', 'labels'),
                        (('posteriorImages', wrapfunc), 'posteriors')])])

    myInitAvgWF = pe.Node(
        interface=ants.AverageImages(),
        name='Atlas_antsSimpleAverage')  # was 'Phase1_antsSimpleAverage'
    myInitAvgWF.inputs.dimension = 3
    myInitAvgWF.inputs.normalize = True
    template.connect(baselineDG, 't1_average', myInitAvgWF, "images")
    ####################################################################################################
    # TEMPLATE_BUILD_RUN_MODE = 'MULTI_IMAGE'
    # if numSessions == 1:
    #     TEMPLATE_BUILD_RUN_MODE = 'SINGLE_IMAGE'
    ####################################################################################################
    from BAWantsRegistrationBuildTemplate import BAWantsRegistrationTemplateBuildSingleIterationWF as registrationWF
    buildTemplateIteration1 = registrationWF('iteration01')
    # buildTemplateIteration2 = buildTemplateIteration1.clone(name='buildTemplateIteration2')
    buildTemplateIteration2 = registrationWF('Iteration02')

    MakeNewAtlasTemplateNode = pe.Node(
        interface=Function(
            function=MakeNewAtlasTemplate,
            input_names=[
                't1_image', 'deformed_list', 'AtlasTemplate', 'outDefinition'
            ],
            output_names=['outAtlasFullPath', 'clean_deformed_list']),
        # This is a lot of work, so submit it run_without_submitting=True,
        run_without_submitting=
        True,  # HACK:  THIS NODE REALLY SHOULD RUN ON THE CLUSTER!
        name='99_MakeNewAtlasTemplate')

    if master_config['execution'][
            'plugin'] == 'SGE':  # for some nodes, the qsub call needs to be modified on the cluster

        MakeNewAtlasTemplateNode.plugin_args = {
            'template': master_config['plugin_args']['template'],
            'qsub_args': modify_qsub_args(master_config['queue'], '1000M', 1,
                                          1),
            'overwrite': True
        }
        for bt in [buildTemplateIteration1, buildTemplateIteration2]:
            ##################################################
            # *** Hans, is this TODO already addressed? ***  #
            # ---->  # TODO:  Change these parameters  <---- #
            ##################################################
            BeginANTS = bt.get_node("BeginANTS")
            BeginANTS.plugin_args = {
                'template':
                master_config['plugin_args']['template'],
                'overwrite':
                True,
                'qsub_args':
                modify_qsub_args(master_config['queue'],
                                 '9000M',
                                 4,
                                 hard=False)
            }
            wimtdeformed = bt.get_node("wimtdeformed")
            wimtdeformed.plugin_args = {
                'template':
                master_config['plugin_args']['template'],
                'overwrite':
                True,
                'qsub_args':
                modify_qsub_args(master_config['queue'], '2000M', 1, 2)
            }
            AvgAffineTransform = bt.get_node("AvgAffineTransform")
            AvgAffineTransform.plugin_args = {
                'template': master_config['plugin_args']['template'],
                'overwrite': True,
                'qsub_args': modify_qsub_args(master_config['queue'], '2000M',
                                              1)
            }
            wimtPassivedeformed = bt.get_node("wimtPassivedeformed")
            wimtPassivedeformed.plugin_args = {
                'template':
                master_config['plugin_args']['template'],
                'overwrite':
                True,
                'qsub_args':
                modify_qsub_args(master_config['queue'], '2000M', 1, 2)
            }

    template.connect([
        (myInitAvgWF, buildTemplateIteration1, [('output_average_image',
                                                 'inputspec.fixed_image')]),
        (MergeByExtendListElementsNode, buildTemplateIteration1,
         [('ListOfImagesDictionaries', 'inputspec.ListOfImagesDictionaries'),
          ('registrationImageTypes', 'inputspec.registrationImageTypes'),
          ('interpolationMapping', 'inputspec.interpolationMapping')]),
        (buildTemplateIteration1, buildTemplateIteration2,
         [('outputspec.template', 'inputspec.fixed_image')]),
        (MergeByExtendListElementsNode, buildTemplateIteration2,
         [('ListOfImagesDictionaries', 'inputspec.ListOfImagesDictionaries'),
          ('registrationImageTypes', 'inputspec.registrationImageTypes'),
          ('interpolationMapping', 'inputspec.interpolationMapping')]),
        (inputspec, MakeNewAtlasTemplateNode, [(('subject', xml_filename),
                                                'outDefinition')]),
        (BAtlas, MakeNewAtlasTemplateNode, [('ExtendedAtlasDefinition_xml_in',
                                             'AtlasTemplate')]),
        (buildTemplateIteration2, MakeNewAtlasTemplateNode,
         [('outputspec.template', 't1_image'),
          ('outputspec.passive_deformed_templates', 'deformed_list')]),
    ])

    # Create DataSinks
    Atlas_DataSink = pe.Node(nio.DataSink(), name="Atlas_DS")
    Atlas_DataSink.overwrite = master_config['ds_overwrite']
    Atlas_DataSink.inputs.base_directory = master_config['resultdir']

    Subject_DataSink = pe.Node(nio.DataSink(), name="Subject_DS")
    Subject_DataSink.overwrite = master_config['ds_overwrite']
    Subject_DataSink.inputs.base_directory = master_config['resultdir']

    template.connect([
        (inputspec, Atlas_DataSink, [('subject', 'container')]),
        (buildTemplateIteration1, Atlas_DataSink,
         [('outputspec.template', 'Atlas.iteration1')]),  # Unnecessary
        (MakeNewAtlasTemplateNode, Atlas_DataSink, [('outAtlasFullPath',
                                                     'Atlas.definitions')]),
        (BAtlas, Atlas_DataSink,
         [('template_landmarks_50Lmks_fcsv', 'Atlas.20111119_BCD.@fcsv'),
          ('template_weights_50Lmks_wts', 'Atlas.20111119_BCD.@wts'),
          ('LLSModel_50Lmks_hdf5', 'Atlas.20111119_BCD.@hdf5'),
          ('T1_50Lmks_mdl', 'Atlas.20111119_BCD.@mdl')]),
        (inputspec, Subject_DataSink, [(('subject', outputPattern),
                                        'regexp_substitutions')]),
        (buildTemplateIteration2, Subject_DataSink,
         [('outputspec.template', 'ANTSTemplate.@template')]),
        (MakeNewAtlasTemplateNode, Subject_DataSink, [
            ('clean_deformed_list', 'ANTSTemplate.@passive_deformed_templates')
        ]),
    ])

    from utils import run_workflow, print_workflow
    if False:
        print_workflow(template,
                       plugin=master_config['execution']['plugin'],
                       dotfilename='template')
    return run_workflow(template,
                        plugin=master_config['execution']['plugin'],
                        plugin_args=master_config['plugin_args'])
示例#8
0
def init_b0_hmc_wf(align_to="iterative",
                   transform="Rigid",
                   spatial_bias_correct=False,
                   sloppy=False,
                   metric="Mattes",
                   num_iters=3,
                   name="b0_hmc_wf"):

    if align_to == "iterative" and num_iters < 2:
        raise ValueError("Must specify a positive number of iterations")

    alignment_wf = Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=['b0_images']),
                        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(fields=[
        "final_template", "forward_transforms", "iteration_templates",
        "motion_params", "aligned_images"
    ]),
                         name='outputnode')

    desc = "Initial motion correction was performed using only the b=0 images. "

    # Iteratively create a template
    if align_to == "iterative":
        desc += "An unbiased b=0 template was constructed over {num_iters} iterations "\
                "of {transform} registrations. ".format(num_iters=num_iters,
                                                        transform=transform)
        initial_template = pe.Node(ants.AverageImages(normalize=True,
                                                      dimension=3),
                                   name="initial_template")
        alignment_wf.connect(inputnode, "b0_images", initial_template,
                             "images")
        # Store the registration targets
        iter_templates = pe.Node(niu.Merge(num_iters),
                                 name="iteration_templates")
        alignment_wf.connect(initial_template, "output_average_image",
                             iter_templates, "in1")

        initial_reg = linear_alignment_workflow(transform=transform,
                                                metric=metric,
                                                precision="coarse",
                                                iternum=0)
        alignment_wf.connect(initial_template, "output_average_image",
                             initial_reg, "inputnode.template_image")
        alignment_wf.connect(inputnode, "b0_images", initial_reg,
                             "inputnode.image_paths")
        reg_iters = [initial_reg]
        for iternum in range(1, num_iters):
            reg_iters.append(
                linear_alignment_workflow(transform=transform,
                                          metric=metric,
                                          precision="precise",
                                          iternum=iternum))
            alignment_wf.connect(reg_iters[-2], "outputnode.updated_template",
                                 reg_iters[-1], "inputnode.template_image")
            alignment_wf.connect(inputnode, "b0_images", reg_iters[-1],
                                 "inputnode.image_paths")
            alignment_wf.connect(reg_iters[-1], "outputnode.updated_template",
                                 iter_templates, "in%d" % (iternum + 1))

        # Attach to outputs
        # The last iteration aligned to the output from the second-to-last
        alignment_wf.connect(reg_iters[-2], "outputnode.updated_template",
                             outputnode, "final_template")
        alignment_wf.connect(reg_iters[-1], "outputnode.affine_transforms",
                             outputnode, "forward_transforms")
        alignment_wf.connect(reg_iters[-1],
                             "outputnode.registered_image_paths", outputnode,
                             "aligned_images")
        alignment_wf.connect(iter_templates, "out", outputnode,
                             "iteration_templates")
    elif align_to == 'first':
        desc += "Each b=0 image was registered to the first b=0 image using " \
                "a {transform} registration. ".format(transform=transform)
        reg_to_first = linear_alignment_workflow(transform=transform,
                                                 metric=metric,
                                                 precision="coarse",
                                                 iternum=0)

        alignment_wf.connect([
            (inputnode, reg_to_first,
             [(('b0_images', first_image), 'inputnode.template_image'),
              ('b0_images', 'inputnode.image_paths')]),
            (reg_to_first, outputnode,
             [('averaged_images.output_average_image', 'final_template'),
              ('outputnode.affine_transforms', 'forward_transforms'),
              ('outputnode.registered_image_paths', 'aligned_images')])
        ])
    alignment_wf.__desc__ = desc
    return alignment_wf
示例#9
0
def linear_alignment_workflow(transform="Rigid",
                              metric="Mattes",
                              iternum=0,
                              precision="precise"):
    """
    Takes a template image and a set of input images, does
    a linear alignment to the template and updates it with the
    inverse of the average affine transform to the new template

    Returns a workflow

    """
    iteration_wf = Workflow(name="iterative_alignment_%03d" % iternum)
    input_node_fields = ["image_paths", "template_image", "iteration_num"]
    inputnode = pe.Node(niu.IdentityInterface(fields=input_node_fields),
                        name='inputnode')
    inputnode.inputs.iteration_num = iternum
    outputnode = pe.Node(niu.IdentityInterface(fields=[
        "registered_image_paths", "affine_transforms", "updated_template"
    ]),
                         name='outputnode')
    ants_settings = pkgrf(
        "qsiprep", "data/shoreline_{precision}_{transform}.json".format(
            precision=precision, transform=transform))
    reg = ants.Registration(from_file=ants_settings)
    iter_reg = pe.MapNode(reg,
                          name="reg_%03d" % iternum,
                          iterfield=["moving_image"])

    # Run the images through antsRegistration
    iteration_wf.connect(inputnode, "image_paths", iter_reg, "moving_image")
    iteration_wf.connect(inputnode, "template_image", iter_reg, "fixed_image")

    # Average the images
    averaged_images = pe.Node(ants.AverageImages(normalize=True, dimension=3),
                              name="averaged_images")
    iteration_wf.connect(iter_reg, "warped_image", averaged_images, "images")

    # Apply the inverse to the average image
    transforms_to_list = pe.Node(niu.Merge(1), name="transforms_to_list")
    transforms_to_list.inputs.ravel_inputs = True
    iteration_wf.connect(iter_reg, "forward_transforms", transforms_to_list,
                         "in1")
    avg_affines = pe.Node(ants.AverageAffineTransform(), name="avg_affine")
    avg_affines.inputs.dimension = 3
    avg_affines.inputs.output_affine_transform = "AveragedAffines.mat"
    iteration_wf.connect(transforms_to_list, "out", avg_affines, "transforms")

    invert_average = pe.Node(ants.ApplyTransforms(), name="invert_average")
    invert_average.inputs.interpolation = "HammingWindowedSinc"
    invert_average.inputs.invert_transform_flags = [True]

    avg_to_list = pe.Node(niu.Merge(1), name="to_list")
    iteration_wf.connect(avg_affines, "affine_transform", avg_to_list, "in1")
    iteration_wf.connect(avg_to_list, "out", invert_average, "transforms")
    iteration_wf.connect(averaged_images, "output_average_image",
                         invert_average, "input_image")
    iteration_wf.connect(averaged_images, "output_average_image",
                         invert_average, "reference_image")
    iteration_wf.connect(invert_average, "output_image", outputnode,
                         "updated_template")
    iteration_wf.connect(iter_reg, "forward_transforms", outputnode,
                         "affine_transforms")
    iteration_wf.connect(iter_reg, "warped_image", outputnode,
                         "registered_image_paths")

    return iteration_wf
def main():
    """Entry point"""
    from niflow.nipype1.workflows.smri.ants import antsRegistrationTemplateBuildSingleIterationWF
    """1. input images"""
    # folders conf
    homeDir = os.getenv("HOME")
    BASE_DIR = "/home/vincent/my_study/Abbas_dataset_raw_BIDS"
    out_base_dir = "~/my_study/codes/sMRI"
    #out_base_dir="/output"

    out_dir = os.path.join(out_base_dir, 'nipypeTestPath')
    if not os.path.exists(out_dir):
        os.makedirs(out_dir)
    print(out_dir)
    # file import
    sub_list1 = [
        'FMR-001', 'FMR-096', 'VT-004', 'VT-005', 'VT-007', 'VT-008', 'VT-009',
        'VT-010', 'VT-011', 'VT-015', 'VT-041'
    ]
    sub_list1 = [''.join(x.split('-')) for x in sub_list1]
    sub_list3 = ['VT006']
    RUN1 = 1
    RUN3 = 3
    TMPT = 'sub-%s/anat/*_run-%d_T1w.nii.gz'
    res_T1_1 = DataGraberRun(sub_list1, RUN1, BASE_DIR, TMPT)
    res_T1_3 = DataGraberRun(sub_list3, RUN3, BASE_DIR, TMPT)
    T1_file_list = res_T1_1.outputs.outfiles + [res_T1_3.outputs.outfiles]
    print(T1_file_list, len(T1_file_list))
    # configure workflow
    registrationImageTypes = ['T1']
    interpolationMapping = {
        'labelmap': 'NearestNeighbor',
        'FLAIR': 'WindowedSinc',
        'T1': 'Linear'
    }
    tbuilder = pe.Workflow(name="antsRegistrationTemplateBuilder")
    tbuilder.base_dir = out_dir
    InitialTemplateInputs = T1_file_list
    ListOfImagesDictionaries = [{'T1': FP} for FP in InitialTemplateInputs]

    datasource = pe.Node(interface=util.IdentityInterface(fields=[
        'InitialTemplateInputs', 'ListOfImagesDictionaries',
        'registrationImageTypes', 'interpolationMapping'
    ]),
                         run_without_submitting=True,
                         name='InputImages')
    datasource.inputs.InitialTemplateInputs = InitialTemplateInputs
    datasource.inputs.ListOfImagesDictionaries = ListOfImagesDictionaries
    datasource.inputs.registrationImageTypes = registrationImageTypes
    datasource.inputs.interpolationMapping = interpolationMapping
    datasource.inputs.sort_filelist = True
    # ave as the first image
    initAvg = pe.Node(interface=ants.AverageImages(), name='initAvg')
    initAvg.inputs.dimension = 3
    initAvg.inputs.normalize = True
    tbuilder.connect(datasource, "InitialTemplateInputs", initAvg, "images")
    # iter1
    buildTemplateIteration1 = antsRegistrationTemplateBuildSingleIterationWF(
        'iteration01')
    BeginANTS = buildTemplateIteration1.get_node("BeginANTS")
    tbuilder.connect(initAvg, 'output_average_image', buildTemplateIteration1,
                     'inputspec.fixed_image')
    tbuilder.connect(datasource, 'ListOfImagesDictionaries',
                     buildTemplateIteration1,
                     'inputspec.ListOfImagesDictionaries')
    tbuilder.connect(datasource, 'registrationImageTypes',
                     buildTemplateIteration1,
                     'inputspec.registrationImageTypes')
    tbuilder.connect(datasource, 'interpolationMapping',
                     buildTemplateIteration1, 'inputspec.interpolationMapping')
    # iter2
    buildTemplateIteration2 = antsRegistrationTemplateBuildSingleIterationWF(
        'iteration02')
    BeginANTS = buildTemplateIteration2.get_node("BeginANTS")
    tbuilder.connect(buildTemplateIteration1, 'outputspec.template',
                     buildTemplateIteration2, 'inputspec.fixed_image')
    tbuilder.connect(datasource, 'ListOfImagesDictionaries',
                     buildTemplateIteration2,
                     'inputspec.ListOfImagesDictionaries')
    tbuilder.connect(datasource, 'registrationImageTypes',
                     buildTemplateIteration2,
                     'inputspec.registrationImageTypes')
    tbuilder.connect(datasource, 'interpolationMapping',
                     buildTemplateIteration2, 'inputspec.interpolationMapping')
    #
    datasink = pe.Node(nio.DataSink(), name="datasink")
    datasink.inputs.base_directory = os.path.join(out_dir, "results")
    tbuilder.connect(buildTemplateIteration2, 'outputspec.template', datasink,
                     'PrimaryTemplate')
    tbuilder.connect(buildTemplateIteration2,
                     'outputspec.passive_deformed_templates', datasink,
                     'PassiveTemplate')
    tbuilder.connect(initAvg, 'output_average_image', datasink,
                     'PreRegisterAverage')
    tbuilder.run(plugin='MultiProc', plugin_args={'n_procs': 13})
示例#11
0
def init_fsl_hmc_wf(scan_groups,
                    b0_threshold,
                    impute_slice_threshold,
                    fmap_demean,
                    fmap_bspline,
                    eddy_config,
                    mem_gb=3,
                    omp_nthreads=1,
                    dwi_metadata=None,
                    slice_quality='outlier_n_sqr_stdev_map',
                    sloppy=False,
                    name="fsl_hmc_wf"):
    """
    This workflow controls the dwi preprocessing stages using FSL tools.

    I couldn't get this to work reliably unless everything was oriented in LAS+ before going to
    TOPUP and eddy. For this reason, if TOPUP is going to be used (for an epi fieldmap or an
    RPE series) or there is no fieldmap correction, operations occurring before eddy are done in
    LAS+. The fieldcoefs are applied during eddy's run and the corrected series comes out.
    This is finally converted to LPS+ and sent to the rest of the pipeline.

    If a GRE fieldmap is available, the correction is applied to eddy's outputs after they have
    been converted back to LPS+.

    Finally, if SyN is chosen, it is applied to the LPS+ converted, eddy-resampled data.


    **Parameters**

        scan_groups: dict
            dictionary with fieldmaps and warp space information for the dwis
        impute_slice_threshold: float
            threshold for a slice to be replaced with imputed values. Overrides the
            parameter in ``eddy_config`` if set to a number > 0.
        do_topup: bool
            Should topup be performed before eddy? requires an rpe series or an
            rpe_b0.
        eddy_config: str
            Path to a JSON file containing settings for the call to ``eddy``.


    **Inputs**

        dwi_files: list
            List of single-volume files across all DWI series
        b0_indices: list
            Indexes into ``dwi_files`` that correspond to b=0 volumes
        bvecs: list
            List of paths to single-line bvec files
        bvals: list
            List of paths to single-line bval files
        b0_images: list
            List of single b=0 volumes
        original_files: list
            List of the files from which each DWI volume came.


    """

    inputnode = pe.Node(
        niu.IdentityInterface(
            fields=['dwi_files', 'b0_indices', 'bvec_files', 'bval_files', 'b0_images',
                    'original_files', 't1_brain', 't1_2_mni_reverse_transform']),
        name='inputnode')

    outputnode = pe.Node(
        niu.IdentityInterface(
            fields=["b0_template", "b0_template_mask", "pre_sdc_template",
                    "hmc_optimization_data", "sdc_method", 'slice_quality', 'motion_params',
                    "cnr_map", "bvec_files_to_transform", "dwi_files_to_transform", "b0_indices",
                    "to_dwi_ref_affines", "to_dwi_ref_warps", "rpe_b0_info"]),
        name='outputnode')

    workflow = Workflow(name=name)
    gather_inputs = pe.Node(GatherEddyInputs(), name="gather_inputs")
    if eddy_config is None:
        # load from the defaults
        eddy_cfg_file = pkgr_fn('qsiprep.data', 'eddy_params.json')
    else:
        eddy_cfg_file = eddy_config

    with open(eddy_cfg_file, "r") as f:
        eddy_args = json.load(f)

    # Use run in parallel if possible
    LOGGER.info("Using %d threads in eddy", omp_nthreads)
    eddy_args["num_threads"] = omp_nthreads
    eddy = pe.Node(ExtendedEddy(**eddy_args), name="eddy")
    # These should be in LAS+
    dwi_merge = pe.Node(MergeDWIs(), name="dwi_merge")
    spm_motion = pe.Node(Eddy2SPMMotion(), name="spm_motion")
    # Convert eddy outputs back to LPS+, split them
    pre_topup_lps = pe.Node(ConformDwi(orientation="LPS"), name='pre_topup_lps')
    pre_topup_enhance = init_enhance_and_skullstrip_dwi_wf(name='pre_topup_enhance')
    back_to_lps = pe.Node(ConformDwi(orientation="LPS"), name='back_to_lps')
    cnr_lps = pe.Node(ConformDwi(orientation="LPS"), name='cnr_lps')
    split_eddy_lps = pe.Node(SplitDWIs(b0_threshold=b0_threshold), name="split_eddy_lps")
    mean_b0_lps = pe.Node(ants.AverageImages(dimension=3, normalize=True), name='mean_b0_lps')
    lps_b0_enhance = init_enhance_and_skullstrip_dwi_wf(name='lps_b0_enhance')

    workflow.connect([
        # These images and gradients should be in LAS+
        (inputnode, gather_inputs, [
            ('dwi_files', 'dwi_files'),
            ('bval_files', 'bval_files'),
            ('bvec_files', 'bvec_files'),
            ('b0_indices', 'b0_indices'),
            ('b0_images', 'b0_images'),
            ('original_files', 'original_files')]),
        # Re-concatenate
        (inputnode, dwi_merge, [
            ('dwi_files', 'dwi_files'),
            ('bval_files', 'bval_files'),
            ('bvec_files', 'bvec_files'),
            ('original_files', 'bids_dwi_files')]),
        (gather_inputs, eddy, [
            ('eddy_indices', 'in_index'),
            ('eddy_acqp', 'in_acqp')]),
        (dwi_merge, eddy, [
            ('out_dwi', 'in_file'),
            ('out_bval', 'in_bval'),
            ('out_bvec', 'in_bvec')]),
        (gather_inputs, pre_topup_lps, [
            ('pre_topup_image', 'dwi_file')]),
        (gather_inputs, outputnode, [('forward_transforms', 'to_dwi_ref_affines')]),
        (pre_topup_lps, pre_topup_enhance, [
            ('dwi_file', 'inputnode.in_file')]),
        (pre_topup_enhance, outputnode, [
            ('outputnode.bias_corrected_file', 'pre_sdc_template')]),
        (eddy, back_to_lps, [
            ('out_corrected', 'dwi_file'),
            ('out_rotated_bvecs', 'bvec_file')]),
        (dwi_merge, back_to_lps, [('out_bval', 'bval_file')]),
        (back_to_lps, split_eddy_lps, [
            ('dwi_file', 'dwi_file'),
            ('bval_file', 'bval_file'),
            ('bvec_file', 'bvec_file')]),
        (dwi_merge, outputnode, [
            ('original_images', 'original_files')]),
        (split_eddy_lps, outputnode, [
            ('dwi_files', 'dwi_files_to_transform'),
            ('bvec_files', 'bvec_files_to_transform')]),
        (split_eddy_lps, mean_b0_lps, [('b0_images', 'images')]),
        (mean_b0_lps, lps_b0_enhance, [('output_average_image', 'inputnode.in_file')]),
        (eddy, cnr_lps, [('out_cnr_maps', 'dwi_file')]),
        (cnr_lps, outputnode, [('dwi_file', 'cnr_map')]),
        (eddy, outputnode, [
            (slice_quality, 'slice_quality'),
            (slice_quality, 'hmc_optimization_data')]),
        (eddy, spm_motion, [('out_parameter', 'eddy_motion')]),
        (spm_motion, outputnode, [('spm_motion_file', 'motion_params')])
    ])

    # Fieldmap correction to be done in LAS+: TOPUP for rpe series or epi fieldmap
    # If a topupref is provided, use it for TOPUP
    rpe_b0 = None
    fieldmap_type = scan_groups['fieldmap_info']['suffix']
    if fieldmap_type == 'epi':
        rpe_b0 = scan_groups['fieldmap_info']['epi']
    elif fieldmap_type == 'rpe_series':
        rpe_b0 = scan_groups['fieldmap_info']['rpe_series']
    using_topup = rpe_b0 is not None

    if using_topup:
        outputnode.inputs.sdc_method = "TOPUP"
        # Whether an rpe series (from dwi/) or an epi fmap (in fmap/) extract just the
        # b=0s for topup
        prepare_rpe_b0 = pe.Node(
            B0RPEFieldmap(b0_file=rpe_b0, orientation='LAS', output_3d_images=False),
            name="prepare_rpe_b0")
        topup = pe.Node(fsl.TOPUP(out_field="fieldmap_HZ.nii.gz"), name="topup")
        # Enhance and skullstrip the TOPUP output to get a mask for eddy
        unwarped_mean = pe.Node(afni.TStat(outputtype='NIFTI_GZ'), name='unwarped_mean')
        unwarped_enhance = init_enhance_and_skullstrip_dwi_wf(name='unwarped_enhance')

        workflow.connect([
            (prepare_rpe_b0, outputnode, [('fmap_info', 'inputnode.rpe_b0_info')]),
            (prepare_rpe_b0, gather_inputs, [('fmap_file', 'rpe_b0')]),
            (gather_inputs, topup, [
                ('topup_datain', 'encoding_file'),
                ('topup_imain', 'in_file'),
                ('topup_config', 'config')]),
            (gather_inputs, outputnode, [('forward_warps', 'to_dwi_ref_warps')]),
            (topup, unwarped_mean, [('out_corrected', 'in_file')]),
            (unwarped_mean, unwarped_enhance, [('out_file', 'inputnode.in_file')]),
            (unwarped_enhance, eddy, [('outputnode.mask_file', 'in_mask')]),
            (topup, eddy, [
                ('out_field', 'field')]),
            (lps_b0_enhance, outputnode, [
                ('outputnode.bias_corrected_file', 'b0_template'),
                ('outputnode.mask_file', 'b0_template_mask')]),
            ])
        return workflow

    # Enhance and skullstrip the TOPUP output to get a mask for eddy
    distorted_enhance = init_enhance_and_skullstrip_dwi_wf(name='distorted_enhance')
    workflow.connect([
        # Use the distorted mask for eddy
        (gather_inputs, distorted_enhance, [('pre_topup_image', 'inputnode.in_file')]),
        (distorted_enhance, eddy, [('outputnode.mask_file', 'in_mask')]),
    ])
    if fieldmap_type in ('fieldmap', 'phasediff', 'phase', 'syn'):

        outputnode.inputs.sdc_method = fieldmap_type
        b0_sdc_wf = init_sdc_wf(
            scan_groups['fieldmap_info'], dwi_metadata, omp_nthreads=omp_nthreads,
            fmap_demean=fmap_demean, fmap_bspline=fmap_bspline)

        workflow.connect([
            # Calculate distortion correction on eddy-corrected data
            (lps_b0_enhance, b0_sdc_wf, [
                ('outputnode.bias_corrected_file', 'inputnode.b0_ref'),
                ('outputnode.skull_stripped_file', 'inputnode.b0_ref_brain'),
                ('outputnode.mask_file', 'inputnode.b0_mask')]),
            (inputnode, b0_sdc_wf, [
                ('t1_brain', 'inputnode.t1_brain'),
                ('t1_2_mni_reverse_transform',
                 'inputnode.t1_2_mni_reverse_transform')]),

            # These deformations will be applied later, use the unwarped image now
            (b0_sdc_wf, outputnode, [
                ('outputnode.out_warp', 'to_dwi_ref_warps'),
                ('outputnode.method', 'sdc_method'),
                ('outputnode.b0_ref', 'b0_template'),
                ('outputnode.b0_mask', 'b0_template_mask')])])

    else:
        outputnode.inputs.sdc_method = "None"
        workflow.connect([
            (lps_b0_enhance, outputnode, [
                ('outputnode.skull_stripped_file', 'b0_template'),
                ('outputnode.mask_file', 'b0_template_mask')]),
            ])

    return workflow
示例#12
0
def init_bidirectional_b0_unwarping_wf(template_plus_pe, omp_nthreads=1,
                                       name="bidirectional_pepolar_unwarping_wf"):
    """
    This workflow takes in a set of b0 files with opposite phase encoding
    direction and calculates displacement fields
    (in other words, an ANTs-compatible warp file). This is intended to be run
    in the case where there are two dwi series in the same session with reverse
    phase encoding directions.

    The warp field correcting for the distortions is estimated using AFNI's
    3dQwarp, with displacement estimation limited to the target file phase
    encoding direction.

    It also calculates a new mask for the input dataset that takes into
    account the distortions.

    .. workflow ::
        :graph2use: orig
        :simple_form: yes

        from qsiprep.workflows.fieldmap.pepolar import init_pepolar_unwarp_wf
        wf = init_pepolar_unwarp_wf(
            bold_meta={'PhaseEncodingDirection': 'j'},
            epi_fmaps=[('/dataset/sub-01/fmap/sub-01_epi.nii.gz', 'j-')],
            omp_nthreads=8)


    Inputs

        template_plus
            b0 template in one PE
        template_minus
            b0_template in the other PE

    Outputs

        out_reference
            the ``in_reference`` after unwarping
        out_reference_brain
            the ``in_reference`` after unwarping and skullstripping
        out_warp_plus
            the corresponding :abbr:`DFM (displacements field map)` to correct
            ``template_plus``
        out_warp_minus
            the corresponding :abbr:`DFM (displacements field map)` to correct
            ``template_minus``
        out_mask
            mask of the unwarped input file

    """
    args = '-noXdis -noYdis -noZdis'
    rm_arg = {'i': '-noXdis',
              'j': '-noYdis',
              'k': '-noZdis'}[template_plus_pe[0]]
    args = args.replace(rm_arg, '')

    workflow = Workflow(name=name)
    workflow.__desc__ = """\
A deformation field to correct for susceptibility distortions was estimated
based on two b0 templates created from dwi series with opposing phase-encoding
directions, using `3dQwarp` @afni (AFNI {afni_ver}).
""".format(afni_ver=''.join(['%02d' % v for v in afni.Info().version() or []]))

    inputnode = pe.Node(niu.IdentityInterface(
        fields=['template_plus', 'template_minus', 't1w_brain']),
        name='inputnode')

    outputnode = pe.Node(niu.IdentityInterface(
        fields=['out_reference', 'out_reference_brain', 'out_affine_plus', 'out_warp_plus',
                'out_affine_minus', 'out_warp_minus', 'out_mask']), name='outputnode')
    # Create high-contrast ref images
    plus_ref_wf = init_dwi_reference_wf(name='plus_ref_wf')
    minus_ref_wf = init_dwi_reference_wf(name='minus_ref_wf')

    # Align the two reference images to the midpoint
    inputs_to_list = pe.Node(niu.Merge(2), name='inputs_to_list')
    align_reverse_pe_wf = init_b0_hmc_wf(align_to='iterative', transform='Rigid')
    get_midpoint_transforms = pe.Node(niu.Split(splits=[1, 1], squeeze=True),
                                      name="get_midpoint_transforms")
    plus_to_midpoint = pe.Node(ants.ApplyTransforms(float=True,
                                                    interpolation='LanczosWindowedSinc',
                                                    dimension=3),
                               name='plus_to_midpoint')
    minus_to_midpoint = pe.Node(ants.ApplyTransforms(float=True,
                                                     interpolation='LanczosWindowedSinc',
                                                     dimension=3),
                                name='minus_to_midpoint')

    qwarp = pe.Node(afni.QwarpPlusMinus(pblur=[0.05, 0.05],
                                        blur=[-1, -1],
                                        noweight=True,
                                        minpatch=9,
                                        nopadWARP=True,
                                        environ={'OMP_NUM_THREADS': '%d' % omp_nthreads},
                                        args=args),
                    name='qwarp', n_procs=omp_nthreads)

    to_ants_plus = pe.Node(niu.Function(function=_fix_hdr), name='to_ants_plus',
                           mem_gb=0.01)
    to_ants_minus = pe.Node(niu.Function(function=_fix_hdr), name='to_ants_minus',
                            mem_gb=0.01)

    cphdr_plus_warp = pe.Node(CopyHeader(), name='cphdr_plus_warp', mem_gb=0.01)
    cphdr_minus_warp = pe.Node(CopyHeader(), name='cphdr_minus_warp', mem_gb=0.01)

    unwarp_plus_reference = pe.Node(ants.ApplyTransforms(dimension=3,
                                                         float=True,
                                                         interpolation='LanczosWindowedSinc'),
                                    name='unwarp_plus_reference')
    unwarp_minus_reference = pe.Node(ants.ApplyTransforms(dimension=3,
                                                          float=True,
                                                          interpolation='LanczosWindowedSinc'),
                                     name='unwarp_minus_reference')
    unwarped_to_list = pe.Node(niu.Merge(2), name="unwarped_to_list")
    merge_unwarped = pe.Node(ants.AverageImages(dimension=3, normalize=True),
                             name="merge_unwarped")

    final_ref = init_dwi_reference_wf(name="final_ref")

    workflow.connect([
        (inputnode, plus_ref_wf, [('template_plus', 'inputnode.b0_template')]),
        (plus_ref_wf, inputs_to_list, [('outputnode.ref_image', 'in1')]),
        (inputnode, minus_ref_wf, [('template_minus', 'inputnode.b0_template')]),
        (minus_ref_wf, inputs_to_list, [('outputnode.ref_image', 'in2')]),
        (inputs_to_list, align_reverse_pe_wf, [('out', 'inputnode.b0_images')]),
        (align_reverse_pe_wf, get_midpoint_transforms, [('outputnode.forward_transforms',
                                                         'inlist')]),
        (get_midpoint_transforms, outputnode, [('out1', 'out_affine_plus'),
                                               ('out2', 'out_affine_minus')]),
        (plus_ref_wf, plus_to_midpoint, [('outputnode.ref_image', 'input_image')]),
        (minus_ref_wf, minus_to_midpoint, [('outputnode.ref_image', 'input_image')]),
        (get_midpoint_transforms, plus_to_midpoint, [('out1', 'transforms')]),
        (align_reverse_pe_wf, plus_to_midpoint, [('outputnode.final_template',
                                                  'reference_image')]),
        (get_midpoint_transforms, minus_to_midpoint, [('out2', 'transforms')]),
        (align_reverse_pe_wf, minus_to_midpoint, [('outputnode.final_template',
                                                  'reference_image')]),
        (plus_to_midpoint, qwarp, [('output_image', 'in_file')]),
        (minus_to_midpoint, qwarp, [('output_image', 'base_file')]),
        (align_reverse_pe_wf, cphdr_plus_warp, [('outputnode.final_template', 'hdr_file')]),
        (align_reverse_pe_wf, cphdr_minus_warp, [('outputnode.final_template', 'hdr_file')]),
        (qwarp, cphdr_plus_warp, [('source_warp', 'in_file')]),
        (qwarp, cphdr_minus_warp, [('base_warp', 'in_file')]),
        (cphdr_plus_warp, to_ants_plus, [('out_file', 'in_file')]),
        (cphdr_minus_warp, to_ants_minus, [('out_file', 'in_file')]),

        (to_ants_minus, unwarp_minus_reference, [('out', 'transforms')]),
        (minus_to_midpoint, unwarp_minus_reference, [('output_image', 'reference_image'),
                                                     ('output_image', 'input_image')]),
        (to_ants_minus, outputnode, [('out', 'out_warp_minus')]),

        (to_ants_plus, unwarp_plus_reference, [('out', 'transforms')]),
        (plus_to_midpoint, unwarp_plus_reference, [('output_image', 'reference_image'),
                                                   ('output_image', 'input_image')]),
        (to_ants_plus, outputnode, [('out', 'out_warp_plus')]),

        (unwarp_plus_reference, unwarped_to_list, [('output_image', 'in1')]),
        (unwarp_minus_reference, unwarped_to_list, [('output_image', 'in2')]),
        (unwarped_to_list, merge_unwarped, [('out', 'images')]),

        (merge_unwarped, final_ref, [('output_average_image', 'inputnode.b0_template')]),
        (final_ref, outputnode, [('outputnode.ref_image', 'out_reference'),
                                 ('outputnode.ref_image_brain', 'out_reference_brain'),
                                 ('outputnode.dwi_mask', 'out_mask')])
    ])

    return workflow
示例#13
0
#Generic datasink module to store structured outputs
io_DataSink_4 = pe.Node(interface=io.DataSink(), name='io_DataSink_4')

#Generic datasink module to store structured outputs
io_DataSink_5 = pe.Node(interface=io.DataSink(), name='io_DataSink_5')

#Wraps the executable command ``antsRegistration``.
ants_Registration = pe.Node(interface=ants.Registration(),
                            name='ants_Registration')

#Wraps the executable command ``Atropos``.
ants_Atropos = pe.Node(interface=ants.Atropos(), name='ants_Atropos')

#Wraps the executable command ``AverageImages``.
ants_AverageImages = pe.Node(interface=ants.AverageImages(),
                             name='ants_AverageImages')

#Wraps the executable command ``modelfit``.
camino_ModelFit = pe.Node(interface=camino.ModelFit(), name='camino_ModelFit')

#Wraps the executable command ``vtkstreamlines``.
camino_VtkStreamlines = pe.Node(interface=camino.VtkStreamlines(),
                                name='camino_VtkStreamlines')

#Wraps the executable command ``dt2nii``.
camino_DT2NIfTI = pe.Node(interface=camino.DT2NIfTI(), name='camino_DT2NIfTI')

#Wraps the executable command ``erode``.
mrtrix_Erode = pe.Node(interface=mrtrix.Erode(), name='mrtrix_Erode')
def main(DATA_DIR, OUT_DIR, SUB_LIST, N_PROCESS):
    """Entry point"""
    from niflow.nipype1.workflows.smri.ants import antsRegistrationTemplateBuildSingleIterationWF
    """1. input images"""
    out_dir = OUT_DIR
    if not os.path.exists(OUT_DIR):
        os.makedirs(out_dir)

    with open(SUB_LIST, 'r') as f_sub:
        sub_list = f_sub.readlines()
    sub_list = [x[0:-1] for x in sub_list]
    print(len(sub_list), ' subjects included: ', sub_list)

    TMPT = '%s/anat/%s_desc-preproc_T1w.nii.gz'
    res_T1 = DataGraberRun(sub_list, DATA_DIR, TMPT)
    T1_file_list = res_T1.outputs.outfiles
    print('Subject T1 files: ', T1_file_list)
    # configure workflow
    registrationImageTypes = ['T1']
    interpolationMapping = {
        'labelmap': 'NearestNeighbor',
        'FLAIR': 'WindowedSinc',
        'T1': 'Linear'
    }
    tbuilder = pe.Workflow(name="antsRegistrationTemplateBuilder")
    tbuilder.base_dir = out_dir
    InitialTemplateInputs = [(isinstance(x_file, str) and x_file) or x_file[0]
                             for x_file in T1_file_list]
    ListOfImagesDictionaries = [{'T1': FP} for FP in InitialTemplateInputs]
    print("T1 files: ", ListOfImagesDictionaries,
          len(ListOfImagesDictionaries))
    datasource = pe.Node(interface=util.IdentityInterface(fields=[
        'InitialTemplateInputs', 'ListOfImagesDictionaries',
        'registrationImageTypes', 'interpolationMapping'
    ]),
                         run_without_submitting=True,
                         name='InputImages')
    datasource.inputs.InitialTemplateInputs = InitialTemplateInputs
    datasource.inputs.ListOfImagesDictionaries = ListOfImagesDictionaries
    datasource.inputs.registrationImageTypes = registrationImageTypes
    datasource.inputs.interpolationMapping = interpolationMapping
    datasource.inputs.sort_filelist = True
    # ave as the first image
    initAvg = pe.Node(interface=ants.AverageImages(), name='initAvg')
    initAvg.inputs.dimension = 3
    initAvg.inputs.normalize = True
    tbuilder.connect(datasource, "InitialTemplateInputs", initAvg, "images")
    # iter1
    buildTemplateIteration1 = antsRegistrationTemplateBuildSingleIterationWF(
        'iteration01')
    BeginANTS = buildTemplateIteration1.get_node("BeginANTS")
    tbuilder.connect(initAvg, 'output_average_image', buildTemplateIteration1,
                     'inputspec.fixed_image')
    tbuilder.connect(datasource, 'ListOfImagesDictionaries',
                     buildTemplateIteration1,
                     'inputspec.ListOfImagesDictionaries')
    tbuilder.connect(datasource, 'registrationImageTypes',
                     buildTemplateIteration1,
                     'inputspec.registrationImageTypes')
    tbuilder.connect(datasource, 'interpolationMapping',
                     buildTemplateIteration1, 'inputspec.interpolationMapping')

    # iter2
    buildTemplateIteration2 = antsRegistrationTemplateBuildSingleIterationWF(
        'iteration02')
    BeginANTS = buildTemplateIteration2.get_node("BeginANTS")
    tbuilder.connect(buildTemplateIteration1, 'outputspec.template',
                     buildTemplateIteration2, 'inputspec.fixed_image')
    tbuilder.connect(datasource, 'ListOfImagesDictionaries',
                     buildTemplateIteration2,
                     'inputspec.ListOfImagesDictionaries')
    tbuilder.connect(datasource, 'registrationImageTypes',
                     buildTemplateIteration2,
                     'inputspec.registrationImageTypes')
    tbuilder.connect(datasource, 'interpolationMapping',
                     buildTemplateIteration2, 'inputspec.interpolationMapping')

    # data Sink
    datasink = pe.Node(nio.DataSink(), name="datasink")
    datasink.inputs.base_directory = os.path.join(out_dir, "results")
    tbuilder.connect(buildTemplateIteration2, 'outputspec.template', datasink,
                     'PrimaryTemplate')
    tbuilder.connect(buildTemplateIteration2,
                     'outputspec.passive_deformed_templates', datasink,
                     'PassiveTemplate')
    tbuilder.connect(initAvg, 'output_average_image', datasink,
                     'PreRegisterAverage')
    tbuilder.config['execution']['crashfile_format'] = 'txt'
    #run
    try:
        tbuilder.run(plugin='MultiProc', plugin_args={'n_procs': N_PROCESS})
    except (RuntimeError) as err:
        print("RuntimeError:", err)
    else:
        raise