Ejemplo n.º 1
0
def biasfieldcorr(input_file,outputPath):
    #output_file = os.path.join(os.path.dirname(input_file),os.path.basename(input_file).split('.')[0] + 'Bias.nii.gz')
    output_file = os.path.join(outputPath, os.path.basename(input_file).split('.')[0] + 'Bias.nii.gz')
    myAnts = ants.N4BiasFieldCorrection(input_image=input_file,output_image=output_file,shrink_factor=4,dimension=3)
    myAnts.run()
    print('Bias correction DONE!')
    return output_file
Ejemplo n.º 2
0
def epi_sbref_registration(name='EPI_SBrefRegistration'):
    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(
        niu.IdentityInterface(fields=['epi_brain', 'sbref_brain']),
        name='inputnode')
    outputnode = pe.Node(
        niu.IdentityInterface(fields=['epi_registered', 'out_mat']),
        name='outputnode')

    mean = pe.Node(fsl.MeanImage(dimension='T'), name='EPImean')
    inu = pe.Node(ants.N4BiasFieldCorrection(dimension=3), name='EPImeanBias')
    epi_sbref = pe.Node(fsl.FLIRT(dof=6, out_matrix_file='init.mat'),
                        name='EPI2SBRefRegistration')

    epi_split = pe.Node(fsl.Split(dimension='t'), name='EPIsplit')
    epi_xfm = pe.MapNode(fsl.ApplyXfm(),
                         name='EPIapplyxfm',
                         iterfield=['in_file'])
    epi_merge = pe.Node(fsl.Merge(dimension='t'), name='EPImergeback')
    workflow.connect([
        (inputnode, epi_split, [('epi_brain', 'in_file')]),
        (inputnode, epi_sbref, [('sbref_brain', 'reference')]),
        (inputnode, epi_xfm, [('sbref_brain', 'reference')]),
        (inputnode, mean, [('epi_brain', 'in_file')]),
        (mean, inu, [('out_file', 'input_image')]),
        (inu, epi_sbref, [('output_image', 'in_file')]),
        (epi_split, epi_xfm, [('out_files', 'in_file')]),
        (epi_sbref, epi_xfm, [('out_matrix_file', 'in_matrix_file')]),
        (epi_xfm, epi_merge, [('out_file', 'in_files')]),
        (epi_sbref, outputnode, [('out_matrix_file', 'out_mat')]),
        (epi_merge, outputnode, [('merged_file', 'epi_registered')])
    ])
    return workflow
Ejemplo n.º 3
0
    def bet_T1(self, **name_maps):

        pipeline = self.new_pipeline(name='BET_T1',
                                     name_maps=name_maps,
                                     desc=("python implementation of BET"),
                                     references=[fsl_cite])

        bias = pipeline.add('n4_bias_correction',
                            ants.N4BiasFieldCorrection(),
                            inputs={'input_image': ('t1', nifti_gz_format)},
                            requirements=[ants_req.v('1.9')],
                            wall_time=60,
                            mem_gb=12)

        pipeline.add('bet',
                     fsl.BET(frac=0.15, reduce_bias=True),
                     connections={'in_file': (bias, 'output_image')},
                     outputs={
                         'out_file': ('betted_T1', nifti_gz_format),
                         'mask_file': ('betted_T1_mask', nifti_gz_format)
                     },
                     requirements=[fsl_req.v('5.0.8')],
                     mem_gb=8,
                     wall_time=45)

        return pipeline
Ejemplo n.º 4
0
    def bet_T1(self, **name_maps):

        pipeline = self.new_pipeline(
            name='BET_T1',
            name_maps=name_maps,
            desc=("Brain extraction pipeline using FSL's BET"),
            citations=[fsl_cite])

        bias = pipeline.add('n4_bias_correction',
                            ants.N4BiasFieldCorrection(),
                            inputs={'input_image': ('t1', nifti_gz_format)},
                            requirements=[ants_req.v('1.9')],
                            wall_time=60,
                            mem_gb=12)

        pipeline.add('bet',
                     fsl.BET(frac=0.15,
                             reduce_bias=True,
                             output_type='NIFTI_GZ'),
                     inputs={'in_file': (bias, 'output_image')},
                     outputs={
                         'betted_T1': ('out_file', nifti_gz_format),
                         'betted_T1_mask': ('mask_file', nifti_gz_format)
                     },
                     requirements=[fsl_req.v('5.0.8')],
                     mem_gb=8,
                     wall_time=45)

        return pipeline
Ejemplo n.º 5
0
def ants_n4_bf_correction(ffVOL,
                          ffOUT,
                          run=False,
                          bspline_fitting_distance=300,
                          shrink_factor=3,
                          n_iterations=[50, 50, 30, 20]):
    """
    Run N4 bias field correction on given MRI volume.
    Parameters
    ----------
    ffVOL: str
    ffOUT: str
    run: bool
    bspline_fitting_distance: int
    shrink_factor: int
    n_iterations: list

    Returns
    -------
    n4: nipype.interfaces.ants.N4BiasFieldCorrection
        Will write output to desire ffout path.
    """
    n4 = ants.N4BiasFieldCorrection(
    )  # FILTER/nipype module, here:wrapping ants
    n4.inputs.dimension = 3
    n4.inputs.bspline_fitting_distance = bspline_fitting_distance
    n4.inputs.shrink_factor = shrink_factor
    n4.inputs.n_iterations = n_iterations
    n4.inputs.num_threads = 4
    # N4 Bias Field correction for all volumes
    n4.inputs.input_image = ffVOL
    n4.inputs.output_image = ffOUT
    if run:
        n4.run()
    return n4
Ejemplo n.º 6
0
def remove_bias(name='bias_correct'):
    """
    This workflow estimates a single multiplicative bias field from the
    averaged *b0* image, as suggested in [Jeurissen2014]_.

    .. admonition:: References

      .. [Jeurissen2014] Jeurissen B. et al., `Multi-tissue constrained
        spherical deconvolution for improved analysis of multi-shell diffusion
        MRI data <https://doi.org/10.1016/j.neuroimage.2014.07.061>`_.
        NeuroImage (2014). doi: 10.1016/j.neuroimage.2014.07.061


    Example
    -------

    >>> from nipype.workflows.dmri.fsl.artifacts import remove_bias
    >>> bias = remove_bias()
    >>> bias.inputs.inputnode.in_file = 'epi.nii'
    >>> bias.inputs.inputnode.in_bval = 'diffusion.bval'
    >>> bias.inputs.inputnode.in_mask = 'mask.nii'
    >>> bias.run() # doctest: +SKIP

    """
    inputnode = pe.Node(
        niu.IdentityInterface(fields=['in_file', 'in_bval', 'in_mask']),
        name='inputnode')

    outputnode = pe.Node(niu.IdentityInterface(fields=['out_file']),
                         name='outputnode')

    avg_b0 = pe.Node(niu.Function(input_names=['in_dwi', 'in_bval'],
                                  output_names=['out_file'],
                                  function=b0_average),
                     name='b0_avg')
    n4 = pe.Node(ants.N4BiasFieldCorrection(dimension=3,
                                            save_bias=True,
                                            bspline_fitting_distance=600),
                 name='Bias_b0')
    split = pe.Node(fsl.Split(dimension='t'), name='SplitDWIs')
    mult = pe.MapNode(fsl.MultiImageMaths(op_string='-div %s'),
                      iterfield=['in_file'],
                      name='RemoveBiasOfDWIs')
    thres = pe.MapNode(fsl.Threshold(thresh=0.0),
                       iterfield=['in_file'],
                       name='RemoveNegative')
    merge = pe.Node(fsl.utils.Merge(dimension='t'), name='MergeDWIs')

    wf = pe.Workflow(name=name)
    wf.connect([(inputnode, avg_b0, [('in_file', 'in_dwi'),
                                     ('in_bval', 'in_bval')]),
                (avg_b0, n4, [('out_file', 'input_image')]),
                (inputnode, n4, [('in_mask', 'mask_image')]),
                (inputnode, split, [('in_file', 'in_file')]),
                (n4, mult, [('bias_image', 'operand_files')]),
                (split, mult, [('out_files', 'in_file')]),
                (mult, thres, [('out_file', 'in_file')]),
                (thres, merge, [('out_file', 'in_files')]),
                (merge, outputnode, [('merged_file', 'out_file')])])
    return wf
Ejemplo n.º 7
0
def doN4BiasFieldCorrection(infile, outfile):  # Doing N4 Bias-Field Correction
    '''
    Parameters
    ----------
    infile : str
        path containing the input image.
    outfile : str
        path to save the image after N4 bias-field correction (N4 is from ANTS).
    
    Returns
    -------
    an image which is the bias-field corrected version of input image
    '''
    print('doing bias correction using N4 for', infile)
    n4 = ants.N4BiasFieldCorrection()
    n4.inputs.dimension = 3
    n4.inputs.input_image = infile
    n4.inputs.save_bias = False
    n4.inputs.output_image = outfile
    n4.inputs.bspline_fitting_distance = 100
    n4.inputs.rescale_intensities = True
    n4.inputs.convergence_threshold = 0
    n4.inputs.shrink_factor = 2
    n4.inputs.n_iterations = [50, 50, 50, 50]
    n4.inputs.histogram_sharpening = (0.14, 0.01, 200)
    n4.run()
    print('bias-corection done', outfile, '\n')
Ejemplo n.º 8
0
def inflated_size_nodes():
	s_biascorrect = pe.Node(interface=ants.N4BiasFieldCorrection(), name="s_biascorrect")
	s_biascorrect.inputs.dimension = 3
	s_biascorrect.inputs.bspline_fitting_distance = 100
	s_biascorrect.inputs.shrink_factor = 2
	s_biascorrect.inputs.n_iterations = [200,200,200,200]
	s_biascorrect.inputs.convergence_threshold = 1e-11

	f_biascorrect = pe.Node(interface=ants.N4BiasFieldCorrection(), name="f_biascorrect")
	f_biascorrect.inputs.dimension = 3
	f_biascorrect.inputs.bspline_fitting_distance = 100
	f_biascorrect.inputs.shrink_factor = 2
	f_biascorrect.inputs.n_iterations = [200,200,200,200]
	f_biascorrect.inputs.convergence_threshold = 1e-11

	return s_biascorrect, f_biascorrect
Ejemplo n.º 9
0
def main(sourcedata,
         derivatives,
         subject,
         session,
         run,
         wf_dir):

    layout = BIDSLayout(sourcedata)

    bolds = layout.get(subject=subject,
                       session=session,
                       run=run,
                       suffix='bold',
                       return_type='file')

    
    bold = bolds 
    for bold in bolds:
        print('Making reference image of {}'.format(bold))

    inputnode = pe.Node(niu.IdentityInterface(fields=['bold']),
                        name='inputnode')
    inputnode.inputs.bold = bolds

    wf = pe.Workflow(name='make_ref_{}_{}_{}'.format(subject,
                                                     session,
                                                     run))

    wf.base_dir = wf_dir

    mc_wf_bold = create_motion_correction_workflow(name='mc_wf_bold',
                                                   method='FSL',
                                                   lightweight=True)

                                              

    wf.connect(inputnode, 'bold', mc_wf_bold, 'inputspec.in_files')
    wf.connect(inputnode, ('bold', pickfirst), mc_wf_bold, 'inputspec.which_file_is_EPI_space')

    mean_bold = pe.MapNode(fsl.MeanImage(dimension='T'), 
                                 iterfield=['in_file'],
                                 name='mean_bold1')

    n4_correct = pe.MapNode(ants.N4BiasFieldCorrection(), 
                            iterfield=['input_image'],
                            name='n4_correct')
    wf.connect(mean_bold, 'out_file', n4_correct, 'input_image')
    
    ds = pe.MapNode(DerivativesDataSink(out_path_base='simple_bold_ref',
                                        suffix='reference',
                                        base_directory=derivatives),
                                iterfield=['in_file', 'source_file'],
                                name='ds_reg_report')
    
    wf.connect(mc_wf_bold, 'outputspec.motion_corrected_files', mean_bold, 'in_file')
    wf.connect(n4_correct, 'output_image', ds, 'in_file')
    wf.connect(inputnode, 'bold', ds, 'source_file')
    

    wf.run()
Ejemplo n.º 10
0
def create_workflow(config: AttrDict, resource_pool: ResourcePool,
                    context: Context):
    for _, rp in resource_pool[['T1w']]:
        anat_image = rp[R('T1w')]
        anat_deoblique = NipypeJob(interface=afni.Refit(deoblique=True),
                                   reference='anat_deoblique')
        anat_deoblique.in_file = anat_image
        output_node = anat_deoblique.out_file

        if config.non_local_means_filtering:
            denoise = NipypeJob(interface=ants.DenoiseImage(),
                                reference='anat_denoise')
            denoise.input_image = output_node
            output_node = denoise.output_image

        if config.n4_bias_field_correction:
            n4 = NipypeJob(interface=ants.N4BiasFieldCorrection(
                dimension=3, shrink_factor=2, copy_header=True),
                           reference='anat_n4')
            n4.input_image = output_node
            output_node = n4.output_image

        anat_reorient = NipypeJob(interface=afni.Resample(
            orientation='RPI', outputtype='NIFTI_GZ'),
                                  reference='anat_reorient')
        anat_reorient.in_file = output_node
        rp[R('T1w', label='reorient')] = anat_reorient.out_file
Ejemplo n.º 11
0
def real_size_nodes():
	s_biascorrect = pe.Node(interface=ants.N4BiasFieldCorrection(), name="s_biascorrect")
	s_biascorrect.inputs.dimension = 3
	s_biascorrect.inputs.bspline_fitting_distance = 10
	s_biascorrect.inputs.bspline_order = 4
	s_biascorrect.inputs.shrink_factor = 2
	s_biascorrect.inputs.n_iterations = [150,100,50,30]
	s_biascorrect.inputs.convergence_threshold = 1e-16

	f_biascorrect = pe.Node(interface=ants.N4BiasFieldCorrection(), name="f_biascorrect")
	f_biascorrect.inputs.dimension = 3
	f_biascorrect.inputs.bspline_fitting_distance = 10
	f_biascorrect.inputs.bspline_order = 4
	f_biascorrect.inputs.shrink_factor = 2
	f_biascorrect.inputs.n_iterations = [150,100,50,30]
	f_biascorrect.inputs.convergence_threshold = 1e-11

	return s_biascorrect, f_biascorrect
Ejemplo n.º 12
0
def n4_correction(in_file):
    n4 = ants.N4BiasFieldCorrection()
    n4.inputs.dimension=3
    n4.inputs.input_image = in_file
    n4.inputs.bspline_fitting_distance = 300
    n4.inputs.shrink_factor = 3
    n4.inputs.n_iterations = [50, 50, 30, 20]
    n4.inputs.output_image = in_file.replace('.nii.gz', '_correcred.nii.gz')
    n4.run()
    return n4.inputs.output_image
Ejemplo n.º 13
0
def dwi_flirt(name='DWICoregistration', excl_nodiff=False,
              flirt_param={}):
    """
    Generates a workflow for linear registration of dwi volumes
    """
    inputnode = pe.Node(niu.IdentityInterface(fields=['reference',
                        'in_file', 'ref_mask', 'in_xfms', 'in_bval']),
                        name='inputnode')

    initmat = pe.Node(niu.Function(input_names=['in_bval', 'in_xfms',
                      'excl_nodiff'], output_names=['init_xfms'],
                                   function=_checkinitxfm), name='InitXforms')
    initmat.inputs.excl_nodiff = excl_nodiff
    dilate = pe.Node(fsl.maths.MathsCommand(nan2zeros=True,
                     args='-kernel sphere 5 -dilM'), name='MskDilate')
    split = pe.Node(fsl.Split(dimension='t'), name='SplitDWIs')
    pick_ref = pe.Node(niu.Select(), name='Pick_b0')
    n4 = pe.Node(ants.N4BiasFieldCorrection(dimension=3), name='Bias')
    enhb0 = pe.Node(niu.Function(input_names=['in_file', 'in_mask',
                    'clip_limit'], output_names=['out_file'],
                                 function=enhance), name='B0Equalize')
    enhb0.inputs.clip_limit = 0.015
    enhdw = pe.MapNode(niu.Function(input_names=['in_file', 'in_mask'],
                       output_names=['out_file'], function=enhance),
                       name='DWEqualize', iterfield=['in_file'])
    flirt = pe.MapNode(fsl.FLIRT(**flirt_param), name='CoRegistration',
                       iterfield=['in_file', 'in_matrix_file'])
    thres = pe.MapNode(fsl.Threshold(thresh=0.0), iterfield=['in_file'],
                       name='RemoveNegative')
    merge = pe.Node(fsl.Merge(dimension='t'), name='MergeDWIs')
    outputnode = pe.Node(niu.IdentityInterface(fields=['out_file',
                         'out_xfms']), name='outputnode')
    wf = pe.Workflow(name=name)
    wf.connect([
        (inputnode,  split,      [('in_file', 'in_file')]),
        (inputnode,  dilate,     [('ref_mask', 'in_file')]),
        (inputnode,  enhb0,      [('ref_mask', 'in_mask')]),
        (inputnode,  initmat,    [('in_xfms', 'in_xfms'),
                                  ('in_bval', 'in_bval')]),
        (inputnode,  n4,         [('reference', 'input_image'),
                                  ('ref_mask', 'mask_image')]),
        (dilate,     flirt,      [('out_file', 'ref_weight'),
                                  ('out_file', 'in_weight')]),
        (n4,         enhb0,      [('output_image', 'in_file')]),
        (split,      enhdw,      [('out_files', 'in_file')]),
        (dilate,     enhdw,      [('out_file', 'in_mask')]),
        (enhb0,      flirt,      [('out_file', 'reference')]),
        (enhdw,      flirt,      [('out_file', 'in_file')]),
        (initmat,    flirt,      [('init_xfms', 'in_matrix_file')]),
        (flirt,      thres,      [('out_file', 'in_file')]),
        (thres,      merge,      [('out_file', 'in_files')]),
        (merge,      outputnode, [('merged_file', 'out_file')]),
        (flirt,      outputnode, [('out_matrix_file', 'out_xfms')])
    ])
    return wf
Ejemplo n.º 14
0
def additional_s_biascorrect(node_name='_s_biascorrect'):
    s_biascorrect = pe.Node(interface=ants.N4BiasFieldCorrection(),
                            name=node_name)
    s_biascorrect.inputs.dimension = 3
    s_biascorrect.inputs.bspline_fitting_distance = 10
    s_biascorrect.inputs.bspline_order = 4
    s_biascorrect.inputs.shrink_factor = 2
    s_biascorrect.inputs.n_iterations = [150, 100, 50, 30]
    s_biascorrect.inputs.convergence_threshold = 1e-16

    return s_biascorrect
Ejemplo n.º 15
0
def bias_corr(images, brain_mask, output_dir):
    '''
    Uses N4 bias correction to remove intensity inhomogeneities

    Input:
    images = List of images (w/ paths) to be corrected
    '''

    subj = os.path.split(output_dir)[-1]
    dir_files = os.listdir(output_dir)

    if fnmatch.filter(dir_files, '*bias*'):
        print("Bias correction already run. Not re-running for subj {}".format(
            subj))

        bias_output = glob.glob(os.path.join(output_dir, '*_bias_corr.nii.gz'))
        bias_output.sort(
            key=len)  #Assumes T2 image has been rigidly transformed to t1

    else:
        bias_output = []

        for n, image in enumerate(images):
            image_file = os.path.split(image)[1]
            image_name = image_file.split('.')[0]
            print(image, image_name)

            n4 = ants.N4BiasFieldCorrection()
            n4.inputs.dimension = 3
            n4.inputs.input_image = image
            n4.inputs.bspline_fitting_distance = 300
            n4.inputs.shrink_factor = 2
            n4.inputs.n_iterations = [50, 50, 50, 50]
            n4.inputs.save_bias = True
            n4.inputs.bias_image = os.path.join(
                output_dir, image_name + '_bias_field.nii.gz')
            n4.inputs.output_image = os.path.join(
                output_dir, image_name + '_bias_corr.nii.gz')
            n4.inputs.num_threads = os.cpu_count() - 1
            n4.inputs.mask_image = brain_mask
            n4_results = n4.run()

            output_image = n4_results.outputs.get()['output_image']

            bias_output.append(output_image)

    return (bias_output)
def ants_getmask(name):
    import nipype.interfaces.fsl as fsl
    import nipype.pipeline.engine as pe
    import nipype.interfaces.utility as niu
    import nipype.interfaces.ants as ants
    import nipype.interfaces.freesurfer as fs

    wf = pe.Workflow(name=name)
    inputspec = pe.Node(
        niu.IdentityInterface(fields=['functional', 'structural']),
        name='inputspec')
    bet = pe.Node(fsl.BET(mask=True, remove_eyes=True), name='bet')
    applymask = pe.Node(fs.ApplyMask(), name='applymask')
    #flirt = pe.Node(fsl.FLIRT(),name='flirt')
    #applyxfm_mask = pe.Node(fsl.ApplyXfm(interp='nearestneighbour',apply_xfm=True),name='applyxfm_mask')
    #applyxfm_seg = pe.MapNode(fsl.ApplyXfm(interp='nearestneighbour',apply_xfm=True),name='applyxfm_seg',iterfield=['in_file'])
    dilate = pe.Node(fsl.DilateImage(operation='mean'), name='dilate')
    atropos = pe.Node(ants.Atropos(initialization='KMeans',
                                   number_of_tissue_classes=3,
                                   dimension=3),
                      name='atropos')
    n4 = pe.Node(ants.N4BiasFieldCorrection(dimension=3), name='n4corrections')
    outputspec = pe.Node(niu.IdentityInterface(
        fields=['mask', 'reg_file', 'segments', 'warped_struct']),
                         name='outputspec')

    #create brain mask
    wf.connect(inputspec, "structural", bet, "in_file")

    #dilate bets mask a bit for atropos
    wf.connect(bet, "mask_file", dilate, "in_file")

    # apply dilated mask to img

    wf.connect(dilate, 'out_file', applymask, 'mask_file')
    wf.connect(inputspec, 'structural', applymask, 'in_file')

    #N4 bias correction

    wf.connect(applymask, "out_file", n4, 'input_image')

    # atropos it

    wf.connect(n4, "output_image", atropos, 'intensity_images')
    wf.connect(dilate, 'out_file', atropos, 'mask_image')

    return wf
Ejemplo n.º 17
0
def init_enhance_and_skullstrip_epi_wf(name='enhance_and_skullstrip_epi_wf'):
    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=['in_file']),
                        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(fields=[
        'mask_file', 'skull_stripped_file', 'bias_corrected_file', 'out_report'
    ]),
                         name='outputnode')
    n4_correct = pe.Node(ants.N4BiasFieldCorrection(dimension=3,
                                                    copy_header=True),
                         name='n4_correct')
    skullstrip_first_pass = pe.Node(fsl.BET(frac=0.2, mask=True),
                                    name='skullstrip_first_pass')
    unifize = pe.Node(afni.Unifize(t2=True,
                                   outputtype='NIFTI_GZ',
                                   args='-clfrac 0.4',
                                   out_file="uni.nii.gz"),
                      name='unifize')
    skullstrip_second_pass = pe.Node(afni.Automask(dilate=1,
                                                   outputtype='NIFTI_GZ'),
                                     name='skullstrip_second_pass')
    combine_masks = pe.Node(fsl.BinaryMaths(operation='mul'),
                            name='combine_masks')
    apply_mask = pe.Node(fsl.ApplyMask(), name='apply_mask')
    mask_reportlet = pe.Node(SimpleShowMaskRPT(), name='mask_reportlet')

    workflow.connect([
        (inputnode, n4_correct, [('in_file', 'input_image')]),
        (n4_correct, skullstrip_first_pass, [('output_image', 'in_file')]),
        (skullstrip_first_pass, unifize, [('out_file', 'in_file')]),
        (unifize, skullstrip_second_pass, [('out_file', 'in_file')]),
        (skullstrip_first_pass, combine_masks, [('mask_file', 'in_file')]),
        (skullstrip_second_pass, combine_masks, [('out_file', 'operand_file')
                                                 ]),
        (unifize, apply_mask, [('out_file', 'in_file')]),
        (combine_masks, apply_mask, [('out_file', 'mask_file')]),
        (n4_correct, mask_reportlet, [('output_image', 'background_file')]),
        (combine_masks, mask_reportlet, [('out_file', 'mask_file')]),
        (combine_masks, outputnode, [('out_file', 'mask_file')]),
        (mask_reportlet, outputnode, [('out_report', 'out_report')]),
        (apply_mask, outputnode, [('out_file', 'skull_stripped_file')]),
        (n4_correct, outputnode, [('output_image', 'bias_corrected_file')]),
    ])

    return workflow
Ejemplo n.º 18
0
def ants_n4(in_file,
            write_dir=None,
            caching=False,
            terminal_output='allatonce',
            environ=None,
            copy_geometry=True):
    if write_dir is None:
        write_dir = os.path.dirname(in_file)

    if environ is None:
        environ = {'AFNI_DECONFLICT': 'OVERWRITE'}

    if caching:
        memory = Memory(write_dir)
        bias_correct = memory.cache(ants.N4BiasFieldCorrection)
        copy = memory.cache(afni.Copy)
        copy_geom = memory.cache(fsl.CopyGeom)
        bias_correct.interface().set_default_terminal_output(terminal_output)
        copy.interface().set_default_terminal_output(terminal_output)
    else:
        bias_correct = ants.N4BiasFieldCorrection(
            terminal_output=terminal_output).run
        copy = afni.Copy(terminal_output=terminal_output).run
        copy_geom = fsl.CopyGeom(terminal_output=terminal_output).run

    unbiased_file = fname_presuffix(in_file, suffix='_n4', newpath=write_dir)
    if copy_geometry:
        output_image = fname_presuffix(in_file,
                                       suffix='_n4_rough_geom',
                                       newpath=write_dir)
    else:
        output_image = unbiased_file

    out_bias_correct = bias_correct(
        input_image=in_file,
        shrink_factor=_compute_n4_max_shrink(in_file),
        output_image=output_image)

    if copy_geometry:
        out_copy = copy(in_file=out_bias_correct.outputs.output_image,
                        out_file=unbiased_file,
                        environ=environ)
        out_copy_geom = copy_geom(dest_file=out_copy.outputs.out_file,
                                  in_file=in_file)
    return unbiased_file
Ejemplo n.º 19
0
def _multiple_pe_hmc(in_files, in_movpar, in_ref=None):
    """
    This function interprets that we are dealing with a
    multiple PE (phase encoding) input if it finds several
    files in in_files.

    If we have several images with various PE directions,
    it will compute the HMC parameters between them using
    an embedded workflow.

    It just forwards the two inputs otherwise.
    """
    import os
    from nipype.interfaces import fsl
    from nipype.interfaces import ants

    if len(in_files) == 1:
        out_file = in_files[0]
        out_movpar = in_movpar
    else:
        if in_ref is None:
            in_ref = 0

        # Head motion correction
        fslmerge = fsl.Merge(dimension='t', in_files=in_files)
        hmc = fsl.MCFLIRT(ref_vol=in_ref, save_mats=True, save_plots=True)
        hmc.inputs.in_file = fslmerge.run().outputs.merged_file
        hmc_res = hmc.run()
        out_file = hmc_res.outputs.out_file
        out_movpar = hmc_res.outputs.par_file

    mean = fsl.MeanImage(
        dimension='T', in_file=out_file)
    inu = ants.N4BiasFieldCorrection(
        dimension=3, input_image=mean.run().outputs.out_file)
    inu_res = inu.run()
    out_ref = inu_res.outputs.output_image
    bet = fsl.BET(
        frac=0.6, mask=True, in_file=out_ref)
    out_mask = bet.run().outputs.mask_file

    return (out_file, out_ref, out_mask, out_movpar)
Ejemplo n.º 20
0
def afni_wf(name='AFNISkullStripWorkflow'):
    """
    Skull-stripping workflow

    Derived from the codebase of the QAP:
    https://github.com/preprocessed-connectomes-project/\
quality-assessment-protocol/blob/master/qap/anatomical_preproc.py#L105


    """

    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=['in_file']),
                        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(
        fields=['bias_corrected', 'out_file', 'out_mask', 'bias_image']),
                         name='outputnode')

    inu_n4 = pe.Node(ants.N4BiasFieldCorrection(dimension=3, save_bias=True),
                     name='CorrectINU')

    sstrip = pe.Node(afni.SkullStrip(outputtype='NIFTI_GZ'), name='skullstrip')
    sstrip_orig_vol = pe.Node(afni.Calc(expr='a*step(b)',
                                        outputtype='NIFTI_GZ'),
                              name='sstrip_orig_vol')
    binarize = pe.Node(fsl.Threshold(args='-bin', thresh=1.e-3),
                       name='binarize')

    workflow.connect([(inputnode, sstrip_orig_vol, [('in_file', 'in_file_a')]),
                      (inputnode, inu_n4, [('in_file', 'input_image')]),
                      (inu_n4, sstrip, [('output_image', 'in_file')]),
                      (sstrip, sstrip_orig_vol, [('out_file', 'in_file_b')]),
                      (sstrip_orig_vol, binarize, [('out_file', 'in_file')]),
                      (sstrip_orig_vol, outputnode, [('out_file', 'out_file')
                                                     ]),
                      (binarize, outputnode, [('out_file', 'out_mask')]),
                      (inu_n4, outputnode, [('output_image', 'bias_corrected'),
                                            ('bias_image', 'bias_image')])])
    return workflow
Ejemplo n.º 21
0
def n4_bias_correction(wf, cfg, strat_pool, pipe_num, opt=None):
    '''
    {"name": "n4_bias_correction",
     "config": ["anatomical_preproc"],
     "switch": ["n4_bias_field_correction"],
     "option_key": "None",
     "option_val": "None",
     "inputs": [["desc-preproc_T1w", "desc-reorient_T1w", "T1w"]],
     "outputs": ["desc-preproc_T1w"]}
    '''

    n4 = pe.Node(interface=ants.N4BiasFieldCorrection(dimension=3,
                                                      shrink_factor=2,
                                                      copy_header=True),
                 name=f'anat_n4_{pipe_num}')

    node, out = strat_pool.get_data(
        ['desc-preproc_T1w', 'desc-reorient_T1w', 'T1w'])
    wf.connect(node, out, n4, 'input_image')

    outputs = {'desc-preproc_T1w': (n4, 'output_image')}

    return (wf, outputs)
Ejemplo n.º 22
0
def bruker(
    measurements_base,
    template,
    DEBUG=False,
    exclude={},
    functional_match={},
    structural_match={},
    sessions=[],
    subjects=[],
    actual_size=True,
    functional_blur_xy=False,
    functional_registration_method="structural",
    highpass_sigma=225,
    lowpass_sigma=None,
    negative_contrast_agent=False,
    n_procs=N_PROCS,
    realign="time",
    registration_mask=False,
    tr=1,
    very_nasty_bruker_delay_hack=False,
    workflow_name="generic",
    keep_work=False,
    autorotate=False,
    strict=False,
    verbose=False,
):
    '''

	realign: {"space","time","spacetime",""}
		Parameter that dictates slictiming correction and realignment of slices. "time" (FSL.SliceTimer) is default, since it works safely. Use others only with caution!

	'''
    if template:
        if template == "mouse":
            template = fetch_mouse_DSURQE()['template']
            registration_mask = fetch_mouse_DSURQE()['mask']
        elif template == "rat":
            template = fetch_rat_waxholm()['template']
            registration_mask = fetch_rat_waxholm()['mask']
        else:
            pass
    else:
        raise ValueError("No species or template specified")
        return -1

    measurements_base = path.abspath(path.expanduser(measurements_base))

    # add subject and session filters if present
    if subjects:
        structural_scan_types['subject'] = subjects
    if sessions:
        structural_scan_types['session'] = sessions

    # define measurement directories to be processed, and populate the list either with the given include_measurements, or with an intelligent selection
    data_selection = pd.DataFrame([])
    if structural_match:
        s_data_selection = get_data_selection(
            measurements_base,
            match=structural_match,
            exclude=exclude,
        )
        structural_scan_types = s_data_selection['scan_type'].unique()
        data_selection = pd.concat([data_selection, s_data_selection])
    if functional_match:
        f_data_selection = get_data_selection(
            measurements_base,
            match=functional_match,
            exclude=exclude,
        )
        functional_scan_types = f_data_selection['scan_type'].unique()
        data_selection = pd.concat([data_selection, f_data_selection])

    # we currently only support one structural scan type per session
    #if functional_registration_method in ("structural", "composite") and structural_scan_types:
    #	structural_scan_types = [structural_scan_types[0]]

    # we start to define nipype workflow elements (nodes, connections, meta)
    subjects_sessions = data_selection[["subject", "session"
                                        ]].drop_duplicates().values.tolist()
    if debug:
        print('Data selection:')
        print(data_selection)
        print('Iterating over:')
        print(subjects_sessions)
    infosource = pe.Node(interface=util.IdentityInterface(
        fields=['subject_session'], mandatory_inputs=False),
                         name="infosource")
    infosource.iterables = [('subject_session', subjects_sessions)]

    get_f_scan = pe.Node(name='get_f_scan',
                         interface=util.Function(
                             function=get_scan,
                             input_names=inspect.getargspec(get_scan)[0],
                             output_names=['scan_path', 'scan_type', 'trial']))
    if not strict:
        get_f_scan.inputs.ignore_exception = True
    get_f_scan.inputs.data_selection = data_selection
    get_f_scan.inputs.measurements_base = measurements_base
    get_f_scan.iterables = ("scan_type", functional_scan_types)

    f_bru2nii = pe.Node(interface=bru2nii.Bru2(), name="f_bru2nii")
    f_bru2nii.inputs.actual_size = actual_size

    dummy_scans = pe.Node(
        name='dummy_scans',
        interface=util.Function(
            function=force_dummy_scans,
            input_names=inspect.getargspec(force_dummy_scans)[0],
            output_names=['out_file']))
    dummy_scans.inputs.desired_dummy_scans = DUMMY_SCANS

    bandpass = pe.Node(interface=fsl.maths.TemporalFilter(), name="bandpass")
    bandpass.inputs.highpass_sigma = highpass_sigma
    if lowpass_sigma:
        bandpass.inputs.lowpass_sigma = lowpass_sigma
    else:
        bandpass.inputs.lowpass_sigma = tr

    #bids_filename = pe.Node(name='bids_filename', interface=util.Function(function=sss_filename,input_names=inspect.getargspec(sss_filename)[0], output_names=['filename']))
    bids_filename = pe.Node(name='bids_filename',
                            interface=util.Function(
                                function=bids_naming,
                                input_names=inspect.getargspec(bids_naming)[0],
                                output_names=['filename']))
    bids_filename.inputs.metadata = data_selection

    #bids_stim_filename = pe.Node(name='bids_stim_filename', interface=util.Function(function=sss_filename,input_names=inspect.getargspec(sss_filename)[0], output_names=['filename']))
    bids_stim_filename = pe.Node(
        name='bids_stim_filename',
        interface=util.Function(function=bids_naming,
                                input_names=inspect.getargspec(bids_naming)[0],
                                output_names=['filename']))
    bids_stim_filename.inputs.suffix = "events"
    bids_stim_filename.inputs.extension = ".tsv"
    bids_stim_filename.inputs.metadata = data_selection

    events_file = pe.Node(
        name='events_file',
        interface=util.Function(
            function=write_events_file,
            input_names=inspect.getargspec(write_events_file)[0],
            output_names=['out_file']))
    events_file.inputs.dummy_scans_ms = DUMMY_SCANS * tr * 1000
    events_file.inputs.stim_protocol_dictionary = STIM_PROTOCOL_DICTIONARY
    events_file.inputs.very_nasty_bruker_delay_hack = very_nasty_bruker_delay_hack
    if not (strict or verbose):
        events_file.inputs.ignore_exception = True

    datasink = pe.Node(nio.DataSink(), name='datasink')
    datasink.inputs.base_directory = path.join(measurements_base,
                                               "preprocessing", workflow_name)
    datasink.inputs.parameterization = False
    if not (strict or verbose):
        datasink.inputs.ignore_exception = True

    workflow_connections = [
        (infosource, get_f_scan, [('subject_session', 'selector')]),
        (infosource, bids_stim_filename, [('subject_session',
                                           'subject_session')]),
        (get_f_scan, bids_stim_filename, [('scan_type', 'scan_type')]),
        (get_f_scan, f_bru2nii, [('scan_path', 'input_dir')]),
        (f_bru2nii, dummy_scans, [('nii_file', 'in_file')]),
        (get_f_scan, dummy_scans, [('scan_path', 'scan_dir')]),
        (get_f_scan, events_file, [('trial', 'trial'),
                                   ('scan_path', 'scan_dir')]),
        (events_file, datasink, [('out_file', 'func.@events')]),
        (bids_stim_filename, events_file, [('filename', 'out_file')]),
        (infosource, datasink, [(('subject_session', ss_to_path), 'container')
                                ]),
        (infosource, bids_filename, [('subject_session', 'subject_session')]),
        (get_f_scan, bids_filename, [('scan_type', 'scan_type')]),
        (bids_filename, bandpass, [('filename', 'out_file')]),
        (bandpass, datasink, [('out_file', 'func')]),
    ]

    if realign == "space":
        realigner = pe.Node(interface=spm.Realign(), name="realigner")
        realigner.inputs.register_to_mean = True
        workflow_connections.extend([
            (dummy_scans, realigner, [('out_file', 'in_file')]),
        ])

    elif realign == "spacetime":
        realigner = pe.Node(interface=nipy.SpaceTimeRealigner(),
                            name="realigner")
        realigner.inputs.slice_times = "asc_alt_2"
        realigner.inputs.tr = tr
        realigner.inputs.slice_info = 3  #3 for coronal slices (2 for horizontal, 1 for sagittal)
        workflow_connections.extend([
            (dummy_scans, realigner, [('out_file', 'in_file')]),
        ])

    elif realign == "time":
        realigner = pe.Node(interface=fsl.SliceTimer(), name="slicetimer")
        realigner.inputs.time_repetition = tr
        workflow_connections.extend([
            (dummy_scans, realigner, [('out_file', 'in_file')]),
        ])

    #ADDING SELECTABLE NODES AND EXTENDING WORKFLOW AS APPROPRIATE:
    if actual_size:
        s_biascorrect, f_biascorrect = real_size_nodes()
    else:
        s_biascorrect, f_biascorrect = inflated_size_nodes()

    if structural_scan_types.any():
        get_s_scan = pe.Node(
            name='get_s_scan',
            interface=util.Function(
                function=get_scan,
                input_names=inspect.getargspec(get_scan)[0],
                output_names=['scan_path', 'scan_type', 'trial']))
        if not strict:
            get_s_scan.inputs.ignore_exception = True
        get_s_scan.inputs.data_selection = data_selection
        get_s_scan.inputs.measurements_base = measurements_base
        get_s_scan.iterables = ("scan_type", structural_scan_types)

        s_bru2nii = pe.Node(interface=bru2nii.Bru2(), name="s_bru2nii")
        s_bru2nii.inputs.force_conversion = True
        s_bru2nii.inputs.actual_size = actual_size

        #s_bids_filename = pe.Node(name='s_bids_filename', interface=util.Function(function=sss_filename,input_names=inspect.getargspec(sss_filename)[0], output_names=['filename']))
        s_bids_filename = pe.Node(
            name='s_bids_filename',
            interface=util.Function(
                function=bids_naming,
                input_names=inspect.getargspec(bids_naming)[0],
                output_names=['filename']))
        s_bids_filename.inputs.metadata = data_selection

        if actual_size:
            s_register, s_warp, _, _ = DSURQEc_structural_registration(
                template, registration_mask)
            #TODO: incl. in func registration
            if autorotate:
                workflow_connections.extend([
                    (s_biascorrect, s_rotated, [('output_image', 'out_file')]),
                    (s_rotated, s_register, [('out_file', 'moving_image')]),
                ])
            else:
                workflow_connections.extend([
                    (s_biascorrect, s_register, [('output_image',
                                                  'moving_image')]),
                    (s_register, s_warp, [('composite_transform', 'transforms')
                                          ]),
                    (s_bru2nii, s_warp, [('nii_file', 'input_image')]),
                    (s_warp, datasink, [('output_image', 'anat')]),
                ])
        else:
            s_reg_biascorrect = pe.Node(interface=ants.N4BiasFieldCorrection(),
                                        name="s_reg_biascorrect")
            s_reg_biascorrect.inputs.dimension = 3
            s_reg_biascorrect.inputs.bspline_fitting_distance = 95
            s_reg_biascorrect.inputs.shrink_factor = 2
            s_reg_biascorrect.inputs.n_iterations = [500, 500, 500, 500]
            s_reg_biascorrect.inputs.convergence_threshold = 1e-14

            s_cutoff = pe.Node(interface=fsl.ImageMaths(), name="s_cutoff")
            s_cutoff.inputs.op_string = "-thrP 20 -uthrp 98"

            s_BET = pe.Node(interface=fsl.BET(), name="s_BET")
            s_BET.inputs.mask = True
            s_BET.inputs.frac = 0.3
            s_BET.inputs.robust = True

            s_mask = pe.Node(interface=fsl.ApplyMask(), name="s_mask")
            s_register, s_warp, f_warp = structural_registration(template)

            workflow_connections.extend([
                (s_bru2nii, s_reg_biascorrect, [('nii_file', 'input_image')]),
                (s_reg_biascorrect, s_cutoff, [('output_image', 'in_file')]),
                (s_cutoff, s_BET, [('out_file', 'in_file')]),
                (s_biascorrect, s_mask, [('output_image', 'in_file')]),
                (s_BET, s_mask, [('mask_file', 'mask_file')]),
            ])

            #TODO: incl. in func registration
            if autorotate:
                workflow_connections.extend([
                    (s_mask, s_rotated, [('out_file', 'out_file')]),
                    (s_rotated, s_register, [('out_file', 'moving_image')]),
                ])
            else:
                workflow_connections.extend([
                    (s_mask, s_register, [('out_file', 'moving_image')]),
                    (s_register, s_warp, [('composite_transform', 'transforms')
                                          ]),
                    (s_bru2nii, s_warp, [('nii_file', 'input_image')]),
                    (s_warp, datasink, [('output_image', 'anat')]),
                ])

        if autorotate:
            s_rotated = autorotate(template)

        workflow_connections.extend([
            (infosource, get_s_scan, [('subject_session', 'selector')]),
            (infosource, s_bids_filename, [('subject_session',
                                            'subject_session')]),
            (get_s_scan, s_bru2nii, [('scan_path', 'input_dir')]),
            (get_s_scan, s_bids_filename, [('scan_type', 'scan_type')]),
            (s_bids_filename, s_warp, [('filename', 'output_image')]),
            (s_bru2nii, s_biascorrect, [('nii_file', 'input_image')]),
        ])

    if functional_registration_method == "structural":
        if not structural_scan_types:
            raise ValueError(
                'The option `registration="structural"` requires there to be a structural scan type.'
            )
        workflow_connections.extend([
            (s_register, f_warp, [('composite_transform', 'transforms')]),
        ])
        if realign == "space":
            workflow_connections.extend([
                (realigner, f_warp, [('realigned_files', 'input_image')]),
            ])
        elif realign == "spacetime":
            workflow_connections.extend([
                (realigner, f_warp, [('out_file', 'input_image')]),
            ])
        elif realign == "time":
            workflow_connections.extend([
                (realigner, f_warp, [('slice_time_corrected_file',
                                      'input_image')]),
            ])
        else:
            workflow_connections.extend([
                (dummy_scans, f_warp, [('out_file', 'input_image')]),
            ])

    if functional_registration_method == "composite":
        if not structural_scan_types.any():
            raise ValueError(
                'The option `registration="composite"` requires there to be a structural scan type.'
            )
        _, _, f_register, f_warp = DSURQEc_structural_registration(
            template, registration_mask)

        temporal_mean = pe.Node(interface=fsl.MeanImage(),
                                name="temporal_mean")

        merge = pe.Node(util.Merge(2), name='merge')

        workflow_connections.extend([
            (temporal_mean, f_biascorrect, [('out_file', 'input_image')]),
            (f_biascorrect, f_register, [('output_image', 'moving_image')]),
            (s_biascorrect, f_register, [('output_image', 'fixed_image')]),
            (f_register, merge, [('composite_transform', 'in1')]),
            (s_register, merge, [('composite_transform', 'in2')]),
            (merge, f_warp, [('out', 'transforms')]),
        ])
        if realign == "space":
            workflow_connections.extend([
                (realigner, temporal_mean, [('realigned_files', 'in_file')]),
                (realigner, f_warp, [('realigned_files', 'input_image')]),
            ])
        elif realign == "spacetime":
            workflow_connections.extend([
                (realigner, temporal_mean, [('out_file', 'in_file')]),
                (realigner, f_warp, [('out_file', 'input_image')]),
            ])
        elif realign == "time":
            workflow_connections.extend([
                (realigner, temporal_mean, [('slice_time_corrected_file',
                                             'in_file')]),
                (realigner, f_warp, [('slice_time_corrected_file',
                                      'input_image')]),
            ])
        else:
            workflow_connections.extend([
                (dummy_scans, temporal_mean, [('out_file', 'in_file')]),
                (dummy_scans, f_warp, [('out_file', 'input_image')]),
            ])

    elif functional_registration_method == "functional":
        f_register, f_warp = functional_registration(template)

        temporal_mean = pe.Node(interface=fsl.MeanImage(),
                                name="temporal_mean")

        #f_cutoff = pe.Node(interface=fsl.ImageMaths(), name="f_cutoff")
        #f_cutoff.inputs.op_string = "-thrP 30"

        #f_BET = pe.Node(interface=fsl.BET(), name="f_BET")
        #f_BET.inputs.mask = True
        #f_BET.inputs.frac = 0.5

        workflow_connections.extend([
            (temporal_mean, f_biascorrect, [('out_file', 'input_image')]),
            #(f_biascorrect, f_cutoff, [('output_image', 'in_file')]),
            #(f_cutoff, f_BET, [('out_file', 'in_file')]),
            #(f_BET, f_register, [('out_file', 'moving_image')]),
            (f_biascorrect, f_register, [('output_image', 'moving_image')]),
            (f_register, f_warp, [('composite_transform', 'transforms')]),
        ])
        if realign == "space":
            workflow_connections.extend([
                (realigner, temporal_mean, [('realigned_files', 'in_file')]),
                (realigner, f_warp, [('realigned_files', 'input_image')]),
            ])
        elif realign == "spacetime":
            workflow_connections.extend([
                (realigner, temporal_mean, [('out_file', 'in_file')]),
                (realigner, f_warp, [('out_file', 'input_image')]),
            ])
        elif realign == "time":
            workflow_connections.extend([
                (realigner, temporal_mean, [('slice_time_corrected_file',
                                             'in_file')]),
                (realigner, f_warp, [('slice_time_corrected_file',
                                      'input_image')]),
            ])
        else:
            workflow_connections.extend([
                (dummy_scans, temporal_mean, [('out_file', 'in_file')]),
                (dummy_scans, f_warp, [('out_file', 'input_image')]),
            ])

    invert = pe.Node(interface=fsl.ImageMaths(), name="invert")
    if functional_blur_xy and negative_contrast_agent:
        blur = pe.Node(interface=afni.preprocess.BlurToFWHM(), name="blur")
        blur.inputs.fwhmxy = functional_blur_xy
        workflow_connections.extend([
            (f_warp, blur, [('output_image', 'in_file')]),
            (blur, invert, [(('out_file', fslmaths_invert_values), 'op_string')
                            ]),
            (blur, invert, [('out_file', 'in_file')]),
            (invert, bandpass, [('out_file', 'in_file')]),
        ])
    elif functional_blur_xy:
        blur = pe.Node(interface=afni.preprocess.BlurToFWHM(), name="blur")
        blur.inputs.fwhmxy = functional_blur_xy
        workflow_connections.extend([
            (f_warp, blur, [('output_image', 'in_file')]),
            (blur, bandpass, [('out_file', 'in_file')]),
        ])
    elif negative_contrast_agent:
        blur = pe.Node(interface=afni.preprocess.BlurToFWHM(), name="blur")
        blur.inputs.fwhmxy = functional_blur_xy
        workflow_connections.extend([
            (f_warp, invert, [(('output_image', fslmaths_invert_values),
                               'op_string')]),
            (f_warp, invert, [('output_image', 'in_file')]),
            (invert, bandpass, [('out_file', 'in_file')]),
        ])
    else:
        workflow_connections.extend([
            (f_warp, bandpass, [('output_image', 'in_file')]),
        ])

    workflow_config = {
        'execution': {
            'crashdump_dir':
            path.join(measurements_base, 'preprocessing/crashdump'),
        }
    }
    if debug:
        workflow_config['logging'] = {
            'workflow_level': 'DEBUG',
            'utils_level': 'DEBUG',
            'interface_level': 'DEBUG',
            'filemanip_level': 'DEBUG',
            'log_to_file': 'true',
        }

    workdir_name = workflow_name + "_work"
    workflow = pe.Workflow(name=workdir_name)
    workflow.connect(workflow_connections)
    workflow.base_dir = path.join(measurements_base, "preprocessing")
    workflow.config = workflow_config
    workflow.write_graph(dotfilename=path.join(workflow.base_dir, workdir_name,
                                               "graph.dot"),
                         graph2use="hierarchical",
                         format="png")

    workflow.run(plugin="MultiProc", plugin_args={'n_procs': n_procs})
    if not keep_work:
        shutil.rmtree(path.join(workflow.base_dir, workdir_name))
Ejemplo n.º 23
0
def anat_qc_workflow(dataset, settings, name='anatMRIQC'):
    """
    One-subject-one-session-one-run pipeline to extract the NR-IQMs from
    anatomical images
    """

    workflow = pe.Workflow(name=name)
    WFLOGGER.info(
        'Building anatomical MRI QC workflow, datasets list: %s',
        sorted([d.replace(settings['bids_dir'] + '/', '') for d in dataset]))

    # Define workflow, inputs and outputs
    # 0. Get data
    inputnode = pe.Node(niu.IdentityInterface(fields=['in_file']),
                        name='inputnode')
    inputnode.iterables = [('in_file', dataset)]

    outputnode = pe.Node(niu.IdentityInterface(fields=['out_json']),
                         name='outputnode')

    meta = pe.Node(ReadSidecarJSON(), name='metadata')

    # 1a. Reorient anatomical image
    to_ras = pe.Node(ConformImage(), name='conform')
    # 1b. Estimate bias
    n4itk = pe.Node(ants.N4BiasFieldCorrection(dimension=3, save_bias=True),
                    name='Bias')
    # 2. Skull-stripping (afni)
    asw = skullstrip_wf()
    # 3. Head mask (including nasial-cerebelum mask)
    hmsk = headmsk_wf()
    # 4. Spatial Normalization, using ANTs
    norm = pe.Node(RobustMNINormalization(
        num_threads=settings.get('ants_nthreads', 6),
        template='mni_icbm152_nlin_asym_09c',
        testing=settings.get('testing', False),
        generate_report=True),
                   name='SpatialNormalization')
    # 5. Air mask (with and without artifacts)
    amw = airmsk_wf()
    # 6. Brain tissue segmentation
    segment = pe.Node(fsl.FAST(img_type=1,
                               segments=True,
                               out_basename='segment'),
                      name='segmentation')
    # 7. Compute IQMs
    iqmswf = compute_iqms(settings)
    # Reports
    repwf = individual_reports(settings)

    # Connect all nodes
    workflow.connect([
        (inputnode, to_ras, [('in_file', 'in_file')]),
        (inputnode, meta, [('in_file', 'in_file')]),
        (to_ras, n4itk, [('out_file', 'input_image')]),
        (meta, iqmswf, [('subject_id', 'inputnode.subject_id'),
                        ('session_id', 'inputnode.session_id'),
                        ('run_id', 'inputnode.run_id')]),
        (n4itk, asw, [('output_image', 'inputnode.in_file')]),
        (asw, segment, [('outputnode.out_file', 'in_files')]),
        (n4itk, hmsk, [('output_image', 'inputnode.in_file')]),
        (segment, hmsk, [('tissue_class_map', 'inputnode.in_segm')]),
        (n4itk, norm, [('output_image', 'moving_image')]),
        (asw, norm, [('outputnode.out_mask', 'moving_mask')]),
        (to_ras, amw, [('out_file', 'inputnode.in_file')]),
        (norm, amw, [('reverse_transforms', 'inputnode.reverse_transforms'),
                     ('reverse_invert_flags', 'inputnode.reverse_invert_flags')
                     ]),
        (norm, iqmswf, [('reverse_transforms', 'inputnode.reverse_transforms'),
                        ('reverse_invert_flags',
                         'inputnode.reverse_invert_flags')]),
        (norm, repwf, ([('out_report', 'inputnode.mni_report')])),
        (asw, amw, [('outputnode.out_mask', 'inputnode.in_mask')]),
        (hmsk, amw, [('outputnode.out_file', 'inputnode.head_mask')]),
        (to_ras, iqmswf, [('out_file', 'inputnode.orig')]),
        (n4itk, iqmswf, [('output_image', 'inputnode.inu_corrected'),
                         ('bias_image', 'inputnode.in_inu')]),
        (asw, iqmswf, [('outputnode.out_mask', 'inputnode.brainmask')]),
        (amw, iqmswf, [('outputnode.out_file', 'inputnode.airmask'),
                       ('outputnode.artifact_msk', 'inputnode.artmask')]),
        (segment, iqmswf, [('tissue_class_map', 'inputnode.segmentation'),
                           ('partial_volume_files', 'inputnode.pvms')]),
        (meta, iqmswf, [('out_dict', 'inputnode.metadata')]),
        (hmsk, iqmswf, [('outputnode.out_file', 'inputnode.headmask')]),
        (to_ras, repwf, [('out_file', 'inputnode.orig')]),
        (n4itk, repwf, [('output_image', 'inputnode.inu_corrected')]),
        (asw, repwf, [('outputnode.out_mask', 'inputnode.brainmask')]),
        (hmsk, repwf, [('outputnode.out_file', 'inputnode.headmask')]),
        (amw, repwf, [('outputnode.out_file', 'inputnode.airmask'),
                      ('outputnode.artifact_msk', 'inputnode.artmask')]),
        (segment, repwf, [('tissue_class_map', 'inputnode.segmentation')]),
        (iqmswf, repwf, [('outputnode.out_noisefit', 'inputnode.noisefit')]),
        (iqmswf, repwf, [('outputnode.out_file', 'inputnode.in_iqms')]),
        (iqmswf, outputnode, [('outputnode.out_file', 'out_json')])
    ])

    return workflow
Ejemplo n.º 24
0
def create_extract_noT1_pipe(params_template, params={},
                             name="extract_noT1_pipe"):
    """
    Description: Extract T1 brain using AtlasBrex

    Inputs:

        inputnode:
            restore_T1: preprocessed (debiased/denoised) T1 file name

            restore_T1: preprocessed (debiased/denoised)T2 file name

        arguments:
            params_template: dictionary of info about template

            params: dictionary of node sub-parameters (from a json file)

            name: pipeline name (default = "extract_pipe")

    Outputs:

        smooth_mask.out_file:
            Computed mask (after some smoothing)

    """

    # creating pipeline
    extract_pipe = pe.Workflow(name=name)

    # creating inputnode
    inputnode = pe.Node(
        niu.IdentityInterface(fields=['restore_T1',
                                      "indiv_params"]),
        name='inputnode')

    # N4 intensity normalization with parameters from json
    norm_intensity = NodeParams(ants.N4BiasFieldCorrection(),
                                params=parse_key(params, "norm_intensity"),
                                name='norm_intensity')

    extract_pipe.connect(inputnode, 'restore_T1',
                         norm_intensity, "input_image")

    # atlas_brex
    atlas_brex = NodeParams(AtlasBREX(),
                            params=parse_key(params, "atlas_brex"),
                            name='atlas_brex')

    extract_pipe.connect(norm_intensity, "output_image",
                         atlas_brex, 't1_restored_file')

    atlas_brex.inputs.NMT_file = params_template["template_head"]
    atlas_brex.inputs.NMT_SS_file = params_template["template_brain"]

    extract_pipe.connect(
            inputnode, ("indiv_params", parse_key, "atlas_brex"),
            atlas_brex, 'indiv_params')

    # mask_brex
    mask_brex = pe.Node(fsl.UnaryMaths(), name='mask_brex')
    mask_brex.inputs.operation = 'bin'

    extract_pipe.connect(atlas_brex, 'brain_file', mask_brex, 'in_file')

    # smooth_mask
    smooth_mask = pe.Node(fsl.UnaryMaths(), name='smooth_mask')
    smooth_mask.inputs.operation = "bin"
    smooth_mask.inputs.args = "-s 1 -thr 0.5 -bin"

    extract_pipe.connect(mask_brex, 'out_file', smooth_mask, 'in_file')

    return extract_pipe
Ejemplo n.º 25
0
    def __init__(self, settings):
        # call base constructor
        super().__init__(settings)

        # define input/output node
        self.set_input(['T1', 'orig', 'brainmask'])
        self.set_output(['T1_skullstrip', 'allineate_freesurfer2anat'])

        # define datasink substitutions
        self.set_subs([('_maskop40', ''), ('_calc_calc_calc_calc_calc', '')])

        # 3dAllineate (FSorig)
        self.allineate_orig = MapNode(afni.Allineate(
            out_matrix='FSorig2MPR.aff12.1D',
            overwrite=True,
            outputtype='NIFTI_GZ'),
                                      iterfield=['in_file', 'reference'],
                                      name='3dallineate_orig')
        # 3dAllineate (FSbrainmask)
        self.allineate_bm = MapNode(
            afni.Allineate(overwrite=True, no_pad=True, outputtype='NIFTI_GZ'),
            iterfield=['in_file', 'reference', 'in_matrix'],
            name='3dallineate_brainmask')

        # skullstrip mprage (afni)
        self.afni_skullstrip = MapNode(afni.SkullStrip(args="-orig_vol",
                                                       outputtype="NIFTI_GZ"),
                                       iterfield=['in_file'],
                                       name='afni_skullstrip')
        # 3dcalc operations for achieving final mask
        self.maskop1 = MapNode(afni.Calc(expr='step(a)',
                                         overwrite=True,
                                         outputtype='NIFTI_GZ'),
                               iterfield=['in_file_a'],
                               name='maskop1')
        self.maskop2 = []
        for n in range(3):
            self.maskop2.append(
                MapNode(afni.Calc(
                    args='-b a+i -c a-i -d a+j -e a-j -f a+k -g a-k',
                    expr='ispositive(a+b+c+d+e+f+g)',
                    overwrite=True,
                    outputtype='NIFTI_GZ'),
                        iterfield=['in_file_a'],
                        name='maskop2_{}'.format(n)))
        # Inline function for setting up to copy IJK_TO_DICOM_REAL file attribute
        self.refit_setup = MapNode(Function(input_names=['noskull_T1'],
                                            output_names=['refit_input'],
                                            function=lambda noskull_T1:
                                            (noskull_T1, 'IJK_TO_DICOM_REAL')),
                                   iterfield=['noskull_T1'],
                                   name='refitsetup')
        # 3dRefit
        self.refit = MapNode(afni.Refit(),
                             iterfield=['in_file', 'atrcopy'],
                             name='3drefit')
        # 3dcalc for uniform intensity
        self.uniform = MapNode(afni.Calc(expr='a*and(b,b)',
                                         overwrite=True,
                                         outputtype='NIFTI_GZ'),
                               iterfield=['in_file_a', 'in_file_b'],
                               name='uniformintensity')

        # skullstrip mprage (fsl)
        self.fsl_skullstrip = MapNode(fsl.BET(),
                                      iterfield=['in_file'],
                                      name='fsl_skullstrip')
        self.maskop3 = MapNode(
            afni.Calc(expr='or(a,b,c)', overwrite=True, outputtype='NIFTI_GZ'),
            iterfield=['in_file_a', 'in_file_b', 'in_file_c'],
            name='maskop3')
        self.maskop4 = MapNode(
            afni.Calc(expr='c*and(a,b)', overwrite=True,
                      outputtype='NIFTI_GZ'),
            iterfield=['in_file_a', 'in_file_b', 'in_file_c'],
            name='maskop4')

        # Convert from list to string input
        self.select0T1 = Node(Function(input_names=['T1_list'],
                                       output_names=['T1_0'],
                                       function=lambda T1_list: T1_list[0]),
                              name='select0T1')

        # apply bias field correction
        self.biasfieldcorrect = Node(ants.N4BiasFieldCorrection(
            num_threads=settings['num_threads'], copy_header=True),
                                     name='biasfieldcorrect')
Ejemplo n.º 26
0
def init_phdiff_wf(omp_nthreads, phasetype='phasediff', name='phdiff_wf'):
    """
    Estimates the fieldmap using a phase-difference image and one or more
    magnitude images corresponding to two or more :abbr:`GRE (Gradient Echo sequence)`
    acquisitions. The `original code was taken from nipype
    <https://github.com/nipy/nipype/blob/master/nipype/workflows/dmri/fsl/artifacts.py#L514>`_.

    .. workflow ::
        :graph2use: orig
        :simple_form: yes

        from qsiprep.workflows.fieldmap.phdiff import init_phdiff_wf
        wf = init_phdiff_wf(omp_nthreads=1)


    Outputs::

      outputnode.fmap_ref - The average magnitude image, skull-stripped
      outputnode.fmap_mask - The brain mask applied to the fieldmap
      outputnode.fmap - The estimated fieldmap in Hz


    """

    workflow = Workflow(name=name)
    workflow.__desc__ = """\
A deformation field to correct for susceptibility distortions was estimated
based on a field map that was co-registered to the BOLD reference,
using a custom workflow of *fMRIPrep* derived from D. Greve's `epidewarp.fsl`
[script](http://www.nmr.mgh.harvard.edu/~greve/fbirn/b0/epidewarp.fsl) and
further improvements of HCP Pipelines [@hcppipelines].
"""

    inputnode = pe.Node(
        niu.IdentityInterface(fields=['magnitude', 'phasediff']),
        name='inputnode')

    outputnode = pe.Node(
        niu.IdentityInterface(fields=['fmap', 'fmap_ref', 'fmap_mask']),
        name='outputnode')

    # Merge input magnitude images
    magmrg = pe.Node(IntraModalMerge(), name='magmrg')

    # de-gradient the fields ("bias/illumination artifact")
    n4 = pe.Node(ants.N4BiasFieldCorrection(dimension=3, copy_header=True),
                 name='n4',
                 n_procs=omp_nthreads)
    bet = pe.Node(BETRPT(generate_report=True, frac=0.6, mask=True),
                  name='bet')
    ds_report_fmap_mask = pe.Node(DerivativesDataSink(desc='brain',
                                                      suffix='mask'),
                                  name='ds_report_fmap_mask',
                                  mem_gb=0.01,
                                  run_without_submitting=True)
    # uses mask from bet; outputs a mask
    # dilate = pe.Node(fsl.maths.MathsCommand(
    #     nan2zeros=True, args='-kernel sphere 5 -dilM'), name='MskDilate')

    # FSL PRELUDE will perform phase-unwrapping
    prelude = pe.Node(fsl.PRELUDE(), name='prelude')

    denoise = pe.Node(fsl.SpatialFilter(operation='median',
                                        kernel_shape='sphere',
                                        kernel_size=5),
                      name='denoise')

    demean = pe.Node(niu.Function(function=demean_image), name='demean')

    cleanup_wf = cleanup_edge_pipeline(name="cleanup_wf")

    compfmap = pe.Node(Phasediff2Fieldmap(), name='compfmap')

    # The phdiff2fmap interface is equivalent to:
    # rad2rsec (using rads2radsec from nipype.workflows.dmri.fsl.utils)
    # pre_fugue = pe.Node(fsl.FUGUE(save_fmap=True), name='ComputeFieldmapFUGUE')
    # rsec2hz (divide by 2pi)

    if phasetype == "phasediff":
        # Read phasediff echo times
        meta = pe.Node(ReadSidecarJSON(), name='meta', mem_gb=0.01)

        # phase diff -> radians
        pha2rads = pe.Node(niu.Function(function=siemens2rads),
                           name='pha2rads')
        # Read phasediff echo times
        meta = pe.Node(ReadSidecarJSON(),
                       name='meta',
                       mem_gb=0.01,
                       run_without_submitting=True)
        workflow.connect([
            (meta, compfmap, [('out_dict', 'metadata')]),
            (inputnode, pha2rads, [('phasediff', 'in_file')]),
            (pha2rads, prelude, [('out', 'phase_file')]),
            (inputnode, ds_report_fmap_mask, [('phasediff', 'source_file')]),
        ])

    elif phasetype == "phase":
        workflow.__desc__ += """\
The phase difference used for unwarping was calculated using two separate phase measurements
 [@pncprocessing].
    """
        # Special case for phase1, phase2 images
        meta = pe.MapNode(ReadSidecarJSON(),
                          name='meta',
                          mem_gb=0.01,
                          run_without_submitting=True,
                          iterfield=['in_file'])
        phases2fmap = pe.Node(Phases2Fieldmap(), name='phases2fmap')
        workflow.connect([
            (meta, phases2fmap, [('out_dict', 'metadatas')]),
            (inputnode, phases2fmap, [('phasediff', 'phase_files')]),
            (phases2fmap, prelude, [('out_file', 'phase_file')]),
            (phases2fmap, compfmap, [('phasediff_metadata', 'metadata')]),
            (phases2fmap, ds_report_fmap_mask, [('out_file', 'source_file')])
        ])

    workflow.connect([
        (inputnode, meta, [('phasediff', 'in_file')]),
        (inputnode, magmrg, [('magnitude', 'in_files')]),
        (magmrg, n4, [('out_avg', 'input_image')]),
        (n4, prelude, [('output_image', 'magnitude_file')]),
        (n4, bet, [('output_image', 'in_file')]),
        (bet, prelude, [('mask_file', 'mask_file')]),
        (prelude, denoise, [('unwrapped_phase_file', 'in_file')]),
        (denoise, demean, [('out_file', 'in_file')]),
        (demean, cleanup_wf, [('out', 'inputnode.in_file')]),
        (bet, cleanup_wf, [('mask_file', 'inputnode.in_mask')]),
        (cleanup_wf, compfmap, [('outputnode.out_file', 'in_file')]),
        (compfmap, outputnode, [('out_file', 'fmap')]),
        (bet, outputnode, [('mask_file', 'fmap_mask'),
                           ('out_file', 'fmap_ref')]),
        (bet, ds_report_fmap_mask, [('out_report', 'in_file')]),
    ])

    return workflow
Ejemplo n.º 27
0
def init_fmap_wf(omp_nthreads, fmap_bspline, name='fmap_wf'):
    """
    Fieldmap workflow - when we have a sequence that directly measures the fieldmap
    we just need to mask it (using the corresponding magnitude image) to remove the
    noise in the surrounding air region, and ensure that units are Hz.

    .. workflow ::
        :graph2use: orig
        :simple_form: yes

        from fmriprep.workflows.fieldmap.fmap import init_fmap_wf
        wf = init_fmap_wf(omp_nthreads=6, fmap_bspline=False)

    """

    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(
        niu.IdentityInterface(fields=['magnitude', 'fieldmap']),
        name='inputnode')
    outputnode = pe.Node(
        niu.IdentityInterface(fields=['fmap', 'fmap_ref', 'fmap_mask']),
        name='outputnode')

    # Merge input magnitude images
    magmrg = pe.Node(IntraModalMerge(), name='magmrg')
    # Merge input fieldmap images
    fmapmrg = pe.Node(IntraModalMerge(zero_based_avg=False, hmc=False),
                      name='fmapmrg')

    # de-gradient the fields ("bias/illumination artifact")
    n4_correct = pe.Node(ants.N4BiasFieldCorrection(dimension=3,
                                                    copy_header=True),
                         name='n4_correct',
                         n_procs=omp_nthreads)
    bet = pe.Node(BETRPT(generate_report=True, frac=0.6, mask=True),
                  name='bet')
    ds_fmap_mask = pe.Node(DerivativesDataSink(suffix='fmap_mask'),
                           name='ds_report_fmap_mask',
                           run_without_submitting=True)

    workflow.connect([
        (inputnode, magmrg, [('magnitude', 'in_files')]),
        (inputnode, fmapmrg, [('fieldmap', 'in_files')]),
        (magmrg, n4_correct, [('out_file', 'input_image')]),
        (n4_correct, bet, [('output_image', 'in_file')]),
        (bet, outputnode, [('mask_file', 'fmap_mask'),
                           ('out_file', 'fmap_ref')]),
        (inputnode, ds_fmap_mask, [('fieldmap', 'source_file')]),
        (bet, ds_fmap_mask, [('out_report', 'in_file')]),
    ])

    if fmap_bspline:
        # despike_threshold=1.0, mask_erode=1),
        fmapenh = pe.Node(FieldEnhance(unwrap=False, despike=False),
                          name='fmapenh',
                          mem_gb=4,
                          n_procs=omp_nthreads)

        workflow.connect([
            (bet, fmapenh, [('mask_file', 'in_mask'),
                            ('out_file', 'in_magnitude')]),
            (fmapmrg, fmapenh, [('out_file', 'in_file')]),
            (fmapenh, outputnode, [('out_file', 'fmap')]),
        ])

    else:
        torads = pe.Node(FieldToRadS(), name='torads')
        prelude = pe.Node(fsl.PRELUDE(), name='prelude')
        tohz = pe.Node(FieldToHz(), name='tohz')

        denoise = pe.Node(fsl.SpatialFilter(operation='median',
                                            kernel_shape='sphere',
                                            kernel_size=3),
                          name='denoise')
        demean = pe.Node(niu.Function(function=demean_image), name='demean')
        cleanup_wf = cleanup_edge_pipeline(name='cleanup_wf')

        applymsk = pe.Node(fsl.ApplyMask(), name='applymsk')

        workflow.connect([
            (bet, prelude, [('mask_file', 'mask_file'),
                            ('out_file', 'magnitude_file')]),
            (fmapmrg, torads, [('out_file', 'in_file')]),
            (torads, tohz, [('fmap_range', 'range_hz')]),
            (torads, prelude, [('out_file', 'phase_file')]),
            (prelude, tohz, [('unwrapped_phase_file', 'in_file')]),
            (tohz, denoise, [('out_file', 'in_file')]),
            (denoise, demean, [('out_file', 'in_file')]),
            (demean, cleanup_wf, [('out', 'inputnode.in_file')]),
            (bet, cleanup_wf, [('mask_file', 'inputnode.in_mask')]),
            (cleanup_wf, applymsk, [('outputnode.out_file', 'in_file')]),
            (bet, applymsk, [('mask_file', 'mask_file')]),
            (applymsk, outputnode, [('out_file', 'fmap')]),
        ])

    return workflow
Ejemplo n.º 28
0
datasink_anat.inputs.substitutions = substitutions

# ========================================================================================================
# In[6]:

template_brain = '/media/amr/Amr_4TB/Work/October_Acquistion/anat_temp_enhanced_3.nii.gz'
template_mask = '/media/amr/Amr_4TB/Work/October_Acquistion/anat_template_enhanced_mask_2.nii.gz'

TR = 2.0

# =======================================================================================================
# In[7]:

# Remove skull using antsBrainExtraction.sh, i am using the study-based template that I build and remove
# the skull manually using ITKsnap
biasfield_correction_anat = Node(ants.N4BiasFieldCorrection(),
                                 name='biasfield_correction_anat')
biasfield_correction_anat.inputs.dimension = 3
biasfield_correction_anat.inputs.save_bias = True
# biasfield_correction_anat.inputs.output_image = 'anat_bet_biasfield_corrected.nii.gz' #better not to,
# it confuses the Registration

# ======================================================================================================
# In[8]:
# Extract one fmri image to use fro brain extraction, the same one you will use for mcflirt as reference
roi = Node(fsl.ExtractROI(), name='extract_one_fMRI_volume')

roi.inputs.t_min = 75
roi.inputs.t_size = 1

# ======================================================================================================
Ejemplo n.º 29
0
def init_enhance_and_skullstrip_bold_wf(name='enhance_and_skullstrip_bold_wf',
                                        pre_mask=False,
                                        omp_nthreads=1):
    """
    This workflow takes in a :abbr:`BOLD (blood-oxygen level-dependant)`
    :abbr:`fMRI (functional MRI)` average/summary (e.g., a reference image
    averaging non-steady-state timepoints), and sharpens the histogram
    with the application of the N4 algorithm for removing the
    :abbr:`INU (intensity non-uniformity)` bias field and calculates a signal
    mask.

    Steps of this workflow are:

      1. Calculate a tentative mask by registering (9-parameters) to *fMRIPrep*'s
         :abbr:`EPI (echo-planar imaging)` -*boldref* template, which
         is in MNI space.
         The tentative mask is obtained by resampling the MNI template's
         brainmask into *boldref*-space.
      2. Binary dilation of the tentative mask with a sphere of 3mm diameter.
      3. Run ANTs' ``N4BiasFieldCorrection`` on the input
         :abbr:`BOLD (blood-oxygen level-dependant)` average, using the
         mask generated in 1) instead of the internal Otsu thresholding.
      4. Calculate a loose mask using FSL's ``bet``, with one mathematical morphology
         dilation of one iteration and a sphere of 6mm as structuring element.
      5. Mask the :abbr:`INU (intensity non-uniformity)`-corrected image
         with the latest mask calculated in 3), then use AFNI's ``3dUnifize``
         to *standardize* the T2* contrast distribution.
      6. Calculate a mask using AFNI's ``3dAutomask`` after the contrast
         enhancement of 4).
      7. Calculate a final mask as the intersection of 4) and 6).
      8. Apply final mask on the enhanced reference.

    Step 1 can be skipped if the ``pre_mask`` argument is set to ``True`` and
    a tentative mask is passed in to the workflow throught the ``pre_mask``
    Nipype input.


    .. workflow ::
        :graph2use: orig
        :simple_form: yes

        from niworkflows.func.util import init_enhance_and_skullstrip_bold_wf
        wf = init_enhance_and_skullstrip_bold_wf(omp_nthreads=1)

    **Parameters**
        name : str
            Name of workflow (default: ``enhance_and_skullstrip_bold_wf``)
        pre_mask : bool
            Indicates whether the ``pre_mask`` input will be set (and thus, step 1
            should be skipped).
        omp_nthreads : int
            number of threads available to parallel nodes

    **Inputs**

        in_file
            BOLD image (single volume)
        pre_mask : bool
            A tentative brain mask to initialize the workflow (requires ``pre_mask``
            parameter set ``True``).


    **Outputs**

        bias_corrected_file
            the ``in_file`` after `N4BiasFieldCorrection`_
        skull_stripped_file
            the ``bias_corrected_file`` after skull-stripping
        mask_file
            mask of the skull-stripped input file
        out_report
            reportlet for the skull-stripping

    .. _N4BiasFieldCorrection: https://hdl.handle.net/10380/3053
    """
    workflow = Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=['in_file', 'pre_mask']),
                        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(
        fields=['mask_file', 'skull_stripped_file', 'bias_corrected_file']),
                         name='outputnode')

    # Dilate pre_mask
    pre_dilate = pe.Node(fsl.DilateImage(operation='max',
                                         kernel_shape='sphere',
                                         kernel_size=3.0,
                                         internal_datatype='char'),
                         name='pre_mask_dilate')

    # Ensure mask's header matches reference's
    check_hdr = pe.Node(MatchHeader(),
                        name='check_hdr',
                        run_without_submitting=True)

    # Run N4 normally, force num_threads=1 for stability (images are small, no need for >1)
    n4_correct = pe.Node(ants.N4BiasFieldCorrection(
        dimension=3, copy_header=True, bspline_fitting_distance=200),
                         name='n4_correct',
                         n_procs=1)

    # Create a generous BET mask out of the bias-corrected EPI
    skullstrip_first_pass = pe.Node(fsl.BET(frac=0.2, mask=True),
                                    name='skullstrip_first_pass')
    bet_dilate = pe.Node(fsl.DilateImage(operation='max',
                                         kernel_shape='sphere',
                                         kernel_size=6.0,
                                         internal_datatype='char'),
                         name='skullstrip_first_dilate')
    bet_mask = pe.Node(fsl.ApplyMask(), name='skullstrip_first_mask')

    # Use AFNI's unifize for T2 constrast & fix header
    unifize = pe.Node(
        afni.Unifize(
            t2=True,
            outputtype='NIFTI_GZ',
            # Default -clfrac is 0.1, 0.4 was too conservative
            # -rbt because I'm a Jedi AFNI Master (see 3dUnifize's documentation)
            args='-clfrac 0.2 -rbt 18.3 65.0 90.0',
            out_file="uni.nii.gz"),
        name='unifize')
    fixhdr_unifize = pe.Node(CopyXForm(), name='fixhdr_unifize', mem_gb=0.1)

    # Run ANFI's 3dAutomask to extract a refined brain mask
    skullstrip_second_pass = pe.Node(afni.Automask(dilate=1,
                                                   outputtype='NIFTI_GZ'),
                                     name='skullstrip_second_pass')
    fixhdr_skullstrip2 = pe.Node(CopyXForm(),
                                 name='fixhdr_skullstrip2',
                                 mem_gb=0.1)

    # Take intersection of both masks
    combine_masks = pe.Node(fsl.BinaryMaths(operation='mul'),
                            name='combine_masks')

    # Compute masked brain
    apply_mask = pe.Node(fsl.ApplyMask(), name='apply_mask')

    if not pre_mask:
        bold_template = get_template('MNI152NLin2009cAsym',
                                     resolution=2,
                                     desc='fMRIPrep',
                                     suffix='boldref')
        brain_mask = get_template('MNI152NLin2009cAsym',
                                  resolution=2,
                                  desc='brain',
                                  suffix='mask')

        # Initialize transforms with antsAI
        init_aff = pe.Node(AI(fixed_image=str(bold_template),
                              fixed_image_mask=str(brain_mask),
                              metric=('Mattes', 32, 'Regular', 0.2),
                              transform=('Affine', 0.1),
                              search_factor=(20, 0.12),
                              principal_axes=False,
                              convergence=(10, 1e-6, 10),
                              verbose=True),
                           name='init_aff',
                           n_procs=omp_nthreads)

        # Registration().version may be None
        if parseversion(Registration().version or '0.0.0') > Version('2.2.0'):
            init_aff.inputs.search_grid = (40, (0, 40, 40))

        # Set up spatial normalization
        norm = pe.Node(Registration(from_file=pkgr_fn(
            'fmriprep.data', 'epi_atlasbased_brainmask.json')),
                       name='norm',
                       n_procs=omp_nthreads)
        norm.inputs.fixed_image = str(bold_template)
        map_brainmask = pe.Node(ApplyTransforms(interpolation='MultiLabel',
                                                float=True,
                                                input_image=str(brain_mask)),
                                name='map_brainmask')
        workflow.connect([
            (inputnode, init_aff, [('in_file', 'moving_image')]),
            (inputnode, map_brainmask, [('in_file', 'reference_image')]),
            (inputnode, norm, [('in_file', 'moving_image')]),
            (init_aff, norm, [('output_transform', 'initial_moving_transform')
                              ]),
            (norm, map_brainmask, [('reverse_invert_flags',
                                    'invert_transform_flags'),
                                   ('reverse_transforms', 'transforms')]),
            (map_brainmask, pre_dilate, [('output_image', 'in_file')]),
        ])
    else:
        workflow.connect([
            (inputnode, pre_dilate, [('pre_mask', 'in_file')]),
        ])

    workflow.connect([
        (inputnode, check_hdr, [('in_file', 'reference')]),
        (pre_dilate, check_hdr, [('out_file', 'in_file')]),
        (check_hdr, n4_correct, [('out_file', 'mask_image')]),
        (inputnode, n4_correct, [('in_file', 'input_image')]),
        (inputnode, fixhdr_unifize, [('in_file', 'hdr_file')]),
        (inputnode, fixhdr_skullstrip2, [('in_file', 'hdr_file')]),
        (n4_correct, skullstrip_first_pass, [('output_image', 'in_file')]),
        (skullstrip_first_pass, bet_dilate, [('mask_file', 'in_file')]),
        (bet_dilate, bet_mask, [('out_file', 'mask_file')]),
        (skullstrip_first_pass, bet_mask, [('out_file', 'in_file')]),
        (bet_mask, unifize, [('out_file', 'in_file')]),
        (unifize, fixhdr_unifize, [('out_file', 'in_file')]),
        (fixhdr_unifize, skullstrip_second_pass, [('out_file', 'in_file')]),
        (skullstrip_first_pass, combine_masks, [('mask_file', 'in_file')]),
        (skullstrip_second_pass, fixhdr_skullstrip2, [('out_file', 'in_file')
                                                      ]),
        (fixhdr_skullstrip2, combine_masks, [('out_file', 'operand_file')]),
        (fixhdr_unifize, apply_mask, [('out_file', 'in_file')]),
        (combine_masks, apply_mask, [('out_file', 'mask_file')]),
        (combine_masks, outputnode, [('out_file', 'mask_file')]),
        (apply_mask, outputnode, [('out_file', 'skull_stripped_file')]),
        (n4_correct, outputnode, [('output_image', 'bias_corrected_file')]),
    ])

    return workflow
Ejemplo n.º 30
0
def create_anat_preproc(template_path=None,
                        mask_path=None,
                        regmask_path=None,
                        method='afni',
                        already_skullstripped=False,
                        non_local_means_filtering=True,
                        n4_correction=True,
                        wf_name='anat_preproc'):
    """ 
    The main purpose of this workflow is to process T1 scans. Raw mprage file is deobliqued, reoriented
    into RPI and skullstripped. Also, a whole brain only mask is generated from the skull stripped image
    for later use in registration.

    Returns
    -------
    anat_preproc : workflow
        Anatomical Preprocessing Workflow

    Notes
    -----
    `Source <https://github.com/FCP-INDI/C-PAC/blob/master/CPAC/anat_preproc/anat_preproc.py>`_
    
    Workflow Inputs::
        inputspec.anat : string
            User input anatomical (T1) Image, in any of the 8 orientations
    
    Workflow Outputs::

        outputspec.refit : string
            Path to deobliqued anatomical image
    
        outputspec.reorient : string
            Path to RPI oriented anatomical image
    
        outputspec.skullstrip : string
            Path to skull stripped RPI oriented mprage file with normalized intensities.
    
        outputspec.brain : string
            Path to skull stripped RPI brain image with original intensity values and not normalized or scaled.
    
    Order of commands:
    - Deobliqing the scans. ::
        3drefit -deoblique mprage.nii.gz

    - Re-orienting the Image into Right-to-Left Posterior-to-Anterior Inferior-to-Superior  (RPI) orientation ::
        3dresample -orient RPI
                   -prefix mprage_RPI.nii.gz
                   -inset mprage.nii.gz
                   
    - Skull-Stripping the image ::
        Using AFNI ::
            3dSkullStrip -input mprage_RPI.nii.gz
                         -o_ply mprage_RPI_3dT.nii.gz
        or using BET ::
            bet mprage_RPI.nii.gz

    - The skull-stripping step modifies the intensity values. To get back the original intensity values, we do an element wise product of RPI data with step function of skull-stripped data ::
        3dcalc -a mprage_RPI.nii.gz
               -b mprage_RPI_3dT.nii.gz
               -expr 'a*step(b)'
               -prefix mprage_RPI_3dc.nii.gz
    
    High Level Workflow Graph:
    .. image:: ../images/anatpreproc_graph.dot.png
       :width: 500
    
    Detailed Workflow Graph:
    .. image:: ../images/anatpreproc_graph_detailed.dot.png
       :width: 500

    Examples
    --------
    >>> from CPAC.anat_preproc import create_anat_preproc
    >>> preproc = create_anat_preproc()
    >>> preproc.inputs.inputspec.anat = 'sub1/anat/mprage.nii.gz'
    >>> preproc.run() #doctest: +SKIP
    """

    preproc = pe.Workflow(name=wf_name)

    inputnode = pe.Node(util.IdentityInterface(fields=['anat', 'brain_mask']),
                        name='inputspec')

    outputnode = pe.Node(util.IdentityInterface(
        fields=['refit', 'reorient', 'skullstrip', 'brain', 'brain_mask']),
                         name='outputspec')

    anat_deoblique = pe.Node(interface=afni.Refit(), name='anat_deoblique')

    anat_deoblique.inputs.deoblique = True
    preproc.connect(inputnode, 'anat', anat_deoblique, 'in_file')
    preproc.connect(anat_deoblique, 'out_file', outputnode, 'refit')
    # Disable non_local_means_filtering and n4_correction when run niworkflows-ants
    if method == 'niworkflows-ants':
        non_local_means_filtering = False
        n4_correction = False

    if non_local_means_filtering and n4_correction:
        denoise = pe.Node(interface=ants.DenoiseImage(), name='anat_denoise')
        preproc.connect(anat_deoblique, 'out_file', denoise, 'input_image')
        n4 = pe.Node(interface=ants.N4BiasFieldCorrection(dimension=3,
                                                          shrink_factor=2,
                                                          copy_header=True),
                     name='anat_n4')
        preproc.connect(denoise, 'output_image', n4, 'input_image')
    elif non_local_means_filtering and not n4_correction:
        denoise = pe.Node(interface=ants.DenoiseImage(), name='anat_denoise')
        preproc.connect(anat_deoblique, 'out_file', denoise, 'input_image')
    elif not non_local_means_filtering and n4_correction:
        n4 = pe.Node(interface=ants.N4BiasFieldCorrection(dimension=3,
                                                          shrink_factor=2,
                                                          copy_header=True),
                     name='anat_n4')
        preproc.connect(anat_deoblique, 'out_file', n4, 'input_image')

    # Anatomical reorientation
    anat_reorient = pe.Node(interface=afni.Resample(), name='anat_reorient')

    anat_reorient.inputs.orientation = 'RPI'
    anat_reorient.inputs.outputtype = 'NIFTI_GZ'

    if n4_correction:
        preproc.connect(n4, 'output_image', anat_reorient, 'in_file')
    elif non_local_means_filtering and not n4_correction:
        preproc.connect(denoise, 'output_image', anat_reorient, 'in_file')
    else:
        preproc.connect(anat_deoblique, 'out_file', anat_reorient, 'in_file')

    preproc.connect(anat_reorient, 'out_file', outputnode, 'reorient')

    if already_skullstripped:

        anat_skullstrip = pe.Node(
            interface=util.IdentityInterface(fields=['out_file']),
            name='anat_skullstrip')

        preproc.connect(anat_reorient, 'out_file', anat_skullstrip, 'out_file')

        preproc.connect(anat_skullstrip, 'out_file', outputnode, 'skullstrip')

        preproc.connect(anat_skullstrip, 'out_file', outputnode, 'brain')

    else:

        if method == 'afni':
            # Skull-stripping using AFNI 3dSkullStrip
            inputnode_afni = pe.Node(util.IdentityInterface(fields=[
                'shrink_factor', 'var_shrink_fac', 'shrink_fac_bot_lim',
                'avoid_vent', 'niter', 'pushout', 'touchup', 'fill_hole',
                'avoid_eyes', 'use_edge', 'exp_frac', 'smooth_final',
                'push_to_edge', 'use_skull', 'perc_int', 'max_inter_iter',
                'blur_fwhm', 'fac', 'monkey'
            ]),
                                     name='AFNI_options')

            skullstrip_args = pe.Node(util.Function(
                input_names=[
                    'spat_norm', 'spat_norm_dxyz', 'shrink_fac',
                    'var_shrink_fac', 'shrink_fac_bot_lim', 'avoid_vent',
                    'niter', 'pushout', 'touchup', 'fill_hole', 'avoid_eyes',
                    'use_edge', 'exp_frac', 'smooth_final', 'push_to_edge',
                    'use_skull', 'perc_int', 'max_inter_iter', 'blur_fwhm',
                    'fac', 'monkey'
                ],
                output_names=['expr'],
                function=create_3dskullstrip_arg_string),
                                      name='anat_skullstrip_args')

            preproc.connect([(inputnode_afni, skullstrip_args,
                              [('shrink_factor', 'shrink_fac'),
                               ('var_shrink_fac', 'var_shrink_fac'),
                               ('shrink_fac_bot_lim', 'shrink_fac_bot_lim'),
                               ('avoid_vent', 'avoid_vent'),
                               ('niter', 'niter'), ('pushout', 'pushout'),
                               ('touchup', 'touchup'),
                               ('fill_hole', 'fill_hole'),
                               ('avoid_eyes', 'avoid_eyes'),
                               ('use_edge', 'use_edge'),
                               ('exp_frac', 'exp_frac'),
                               ('smooth_final', 'smooth_final'),
                               ('push_to_edge', 'push_to_edge'),
                               ('use_skull', 'use_skull'),
                               ('perc_int', 'perc_int'),
                               ('max_inter_iter', 'max_inter_iter'),
                               ('blur_fwhm', 'blur_fwhm'), ('fac', 'fac'),
                               ('monkey', 'monkey')])])

            anat_skullstrip = pe.Node(interface=afni.SkullStrip(),
                                      name='anat_skullstrip')

            anat_skullstrip.inputs.outputtype = 'NIFTI_GZ'

            preproc.connect(anat_reorient, 'out_file', anat_skullstrip,
                            'in_file')
            preproc.connect(skullstrip_args, 'expr', anat_skullstrip, 'args')

            preproc.connect(anat_skullstrip, 'out_file', outputnode,
                            'skullstrip')
            # Apply skull-stripping step mask to original volume
            anat_skullstrip_orig_vol = pe.Node(interface=afni.Calc(),
                                               name='anat_skullstrip_orig_vol')

            anat_skullstrip_orig_vol.inputs.expr = 'a*step(b)'
            anat_skullstrip_orig_vol.inputs.outputtype = 'NIFTI_GZ'

            preproc.connect(anat_reorient, 'out_file',
                            anat_skullstrip_orig_vol, 'in_file_a')

            anat_brain_mask = pe.Node(interface=afni.Calc(),
                                      name='anat_brain_mask')

            anat_brain_mask.inputs.expr = 'step(a)'
            anat_brain_mask.inputs.outputtype = 'NIFTI_GZ'

            preproc.connect(anat_skullstrip, 'out_file', anat_brain_mask,
                            'in_file_a')

            preproc.connect(anat_skullstrip, 'out_file',
                            anat_skullstrip_orig_vol, 'in_file_b')

            preproc.connect(anat_brain_mask, 'out_file', outputnode,
                            'brain_mask')

            preproc.connect(anat_skullstrip_orig_vol, 'out_file', outputnode,
                            'brain')

        elif method == 'fsl':
            # Skull-stripping using FSL BET
            inputnode_bet = pe.Node(util.IdentityInterface(fields=[
                'frac', 'mask_boolean', 'mesh_boolean', 'outline', 'padding',
                'radius', 'reduce_bias', 'remove_eyes', 'robust', 'skull',
                'surfaces', 'threshold', 'vertical_gradient'
            ]),
                                    name='BET_options')

            anat_skullstrip = pe.Node(interface=fsl.BET(),
                                      name='anat_skullstrip')

            preproc.connect(anat_reorient, 'out_file', anat_skullstrip,
                            'in_file')

            preproc.connect([(inputnode_bet, anat_skullstrip, [
                ('frac', 'frac'),
                ('mask_boolean', 'mask'),
                ('mesh_boolean', 'mesh'),
                ('outline', 'outline'),
                ('padding', 'padding'),
                ('radius', 'radius'),
                ('reduce_bias', 'reduce_bias'),
                ('remove_eyes', 'remove_eyes'),
                ('robust', 'robust'),
                ('skull', 'skull'),
                ('surfaces', 'surfaces'),
                ('threshold', 'threshold'),
                ('vertical_gradient', 'vertical_gradient'),
            ])])

            preproc.connect(anat_skullstrip, 'out_file', outputnode,
                            'skullstrip')

            # Apply skull-stripping step mask to original volume
            anat_skullstrip_orig_vol = pe.Node(interface=afni.Calc(),
                                               name='anat_skullstrip_orig_vol')

            anat_skullstrip_orig_vol.inputs.expr = 'a*step(b)'
            anat_skullstrip_orig_vol.inputs.outputtype = 'NIFTI_GZ'

            preproc.connect(anat_reorient, 'out_file',
                            anat_skullstrip_orig_vol, 'in_file_a')

            preproc.connect(anat_skullstrip, 'out_file',
                            anat_skullstrip_orig_vol, 'in_file_b')

            preproc.connect(anat_skullstrip, 'mask_file', outputnode,
                            'brain_mask')

            preproc.connect(anat_skullstrip_orig_vol, 'out_file', outputnode,
                            'brain')

        elif method == 'niworkflows-ants':
            # Skull-stripping using niworkflows-ants
            anat_skullstrip_ants = init_brain_extraction_wf(
                tpl_target_path=template_path,
                tpl_mask_path=mask_path,
                tpl_regmask_path=regmask_path,
                name='anat_skullstrip_ants')

            preproc.connect(anat_reorient, 'out_file', anat_skullstrip_ants,
                            'inputnode.in_files')

            preproc.connect(anat_skullstrip_ants, 'copy_xform.out_file',
                            outputnode, 'skullstrip')

            preproc.connect(anat_skullstrip_ants, 'copy_xform.out_file',
                            outputnode, 'brain')

            preproc.connect(anat_skullstrip_ants,
                            'atropos_wf.copy_xform.out_mask', outputnode,
                            'brain_mask')

        elif method == 'mask':

            anat_skullstrip_orig_vol = pe.Node(interface=afni.Calc(),
                                               name='anat_skullstrip_orig_vol')

            anat_skullstrip_orig_vol.inputs.expr = 'a*step(b)'
            anat_skullstrip_orig_vol.inputs.outputtype = 'NIFTI_GZ'

            preproc.connect(anat_reorient, 'out_file',
                            anat_skullstrip_orig_vol, 'in_file_a')

            preproc.connect(inputnode, 'brain_mask', anat_skullstrip_orig_vol,
                            'in_file_b')

            preproc.connect(inputnode, 'brain_mask', outputnode, 'brain_mask')

            preproc.connect(anat_skullstrip_orig_vol, 'out_file', outputnode,
                            'brain')

    return preproc