def calc(self,
             in_file_a,
             expr,
             in_file_b=None,
             in_file_c=None,
             output=None,
             suffix=None):

        in_file_a, output = self.FuncHandler(in_file_a, output, suffix)
        in_file_b, _ = self.FuncHandler(in_file_b, output, suffix)
        in_file_c, _ = self.FuncHandler(in_file_c, output, suffix)

        mycalc = afni.Calc(in_file_a=in_file_a,
                           in_file_b=in_file_b,
                           expr=expr,
                           out_file=output)
        #https://nipype.readthedocs.io/en/latest/interfaces/generated/interfaces.afni/utils.html#calc

        mycalc.inputs.outputtype = "NIFTI_GZ"
        mycalc.inputs.num_threads = cpu_count()
        mycalc.run()

        for in_file in [in_file_a, in_file_b]:
            if type(in_file) == models.BIDSImageFile:
                in_file = os.path.join(self._output_dir, in_file.filename)
            if "_desc-temp" in in_file:
                os.remove(in_file)
Exemple #2
0
    def __init__(self, settings):
        # call base constructor
        super().__init__(settings)

        # define input/output node
        self.set_input(['refimg', 'T1_skullstrip'])
        self.set_output(['affine_func_2_anat', 'warp_func_2_anat'])

        # define datasink substitutions
        self.set_subs([
            ('_calc_calc_calc_calc_calc', ''),
            ('_roi', '_reference'),
            ('_unwarped_Warped', '_unwarped'),
            ('_masked_calc', '_skullstrip'),
            ('_Warped', '_anat'),
        ])

        # Skullstrip the EPI image
        self.epi_skullstrip = Node(fsl.BET(), name='epi_skullstrip')
        self.epi_automask = Node(afni.Automask(args='-overwrite',
                                               outputtype='NIFTI_GZ'),
                                 name='epi_automask')
        self.epi_3dcalc = Node(afni.Calc(expr='c*or(a,b)',
                                         overwrite=True,
                                         outputtype='NIFTI_GZ'),
                               name='epi_3dcalc')

        # create the output name for the registration
        self.create_prefix = Node(Function(input_names=['filename'],
                                           output_names=['basename'],
                                           function=get_prefix),
                                  name='create_prefix')

        # align func to anat
        self.align_func_2_anat = Node(ants.Registration(
            num_threads=settings['num_threads'],
            collapse_output_transforms=False,
            initial_moving_transform_com=1,
            write_composite_transform=True,
            initialize_transforms_per_stage=True,
            transforms=['Rigid', 'Affine'],
            transform_parameters=[(0.1, ), (0.1, )],
            metric=['MI', 'MI'],
            metric_weight=[1, 1],
            radius_or_number_of_bins=[32, 32],
            sampling_strategy=['Regular', 'Regular'],
            sampling_percentage=[0.25, 0.25],
            convergence_threshold=[1.e-6, 1.e-8],
            convergence_window_size=[10, 10],
            smoothing_sigmas=[[3, 2, 1, 0], [2, 1, 0]],
            sigma_units=['vox', 'vox'],
            shrink_factors=[[8, 4, 2, 1], [4, 2, 1]],
            number_of_iterations=[[1000, 500, 250, 100], [500, 250, 100]],
            use_estimate_learning_rate_once=[False, True],
            use_histogram_matching=False,
            verbose=True,
            output_warped_image=True),
                                      name='align_func_2_anat')
        self.align_func_2_anat.n_procs = settings['num_threads']
Exemple #3
0
def create_workflow(config: AttrDict, resource_pool: ResourcePool,
                    context: Context):
    for _, rp in resource_pool[['label-reorient_T1w']]:
        anat_image = rp[R('T1w', label='reorient')]
        anat_skullstrip = NipypeJob(interface=afni.SkullStrip(
            outputtype='NIFTI_GZ',
            args=create_3dskullstrip_arg_string(**config)),
                                    reference='anat_skullstrip')
        anat_skullstrip.in_file = anat_image
        anat_brain_mask = NipypeJob(interface=afni.Calc(expr='step(a)',
                                                        outputtype='NIFTI_GZ'),
                                    reference='anat_brain_mask')
        anat_skullstrip_orig_vol = NipypeJob(
            interface=afni.Calc(expr='a*step(b)', outputtype='NIFTI_GZ'),
            reference='anat_skullstrip_orig_vol')
        anat_brain_mask.in_file_a = anat_skullstrip.out_file
        anat_skullstrip_orig_vol.in_file_a = anat_image
        anat_skullstrip_orig_vol.in_file_b = anat_brain_mask.out_file
        rp[R('T1w', desc='skullstrip-afni',
             suffix='mask')] = anat_brain_mask.out_file
        rp[R('T1w', desc='skullstrip-afni',
             suffix='brain')] = anat_skullstrip_orig_vol.out_file
def create_corr_ts(name='corr_ts'):

    corr_ts = Workflow(name='corr_ts')
    # Define nodes
    inputnode = Node(util.IdentityInterface(fields=[
        'ts',
        'hc_mask',
    ]),
                     name='inputnode')

    outputnode = Node(interface=util.IdentityInterface(
        fields=['corrmap', 'corrmap_z', 'hc_ts']),
                      name='outputnode')

    #extract mean time series of mask
    mean_TS = MapNode(interface=fsl.ImageMeants(),
                      name="mean_TS",
                      iterfield='mask')
    #iterate over using Eigenvalues or mean
    #mean_TS.iterables = ("eig", [True, False])
    #mean_TS.inputs.order = 1
    #mean_TS.inputs.show_all = True
    mean_TS.inputs.eig = False  #use only mean of ROI
    mean_TS.inputs.out_file = "TS.1D"

    #calculate correlation of all voxels with seed voxel
    corr_TS = MapNode(interface=afni.Fim(),
                      name='corr_TS',
                      iterfield='ideal_file')
    corr_TS.inputs.out = 'Correlation'
    corr_TS.inputs.out_file = "corr.nii.gz"

    apply_FisherZ = MapNode(interface=afni.Calc(),
                            name="apply_FisherZ",
                            iterfield='in_file_a')
    apply_FisherZ.inputs.expr = 'log((1+a)/(1-a))/2'  #log = ln
    apply_FisherZ.inputs.out_file = 'corr_Z.nii.gz'
    apply_FisherZ.inputs.outputtype = "NIFTI"

    corr_ts.connect([(inputnode, mean_TS, [('hc_mask', 'mask')]),
                     (inputnode, mean_TS, [('ts', 'in_file')]),
                     (mean_TS, outputnode, [('out_file', 'hc_ts')]),
                     (inputnode, corr_TS, [('ts', 'in_file')]),
                     (mean_TS, corr_TS, [('out_file', 'ideal_file')]),
                     (corr_TS, apply_FisherZ, [('out_file', 'in_file_a')]),
                     (corr_TS, outputnode, [('out_file', 'corrmap')]),
                     (apply_FisherZ, outputnode, [('out_file', 'corrmap_z')])])

    return corr_ts
Exemple #5
0
def create_workflow(config: AttrDict, resource_pool: ResourcePool,
                    context: Context):
    for _, rp in resource_pool[['label-reorient_T1w']]:
        anat = rp[R('T1w', label='reorient')]
        anat_skullstrip = NipypeJob(interface=fsl.BET(output_type='NIFTI_GZ',
                                                      **config),
                                    reference='anat_skullstrip')
        anat_skullstrip.in_file = anat
        anat_skullstrip_orig_vol = NipypeJob(
            interface=afni.Calc(expr='a*step(b)', outputtype='NIFTI_GZ'),
            reference='anat_skullstrip_orig_vol')
        anat_skullstrip_orig_vol.in_file_a = anat
        anat_skullstrip_orig_vol.in_file_b = anat_skullstrip.out_file
        rp[R('T1w', desc='skullstrip-fsl',
             suffix='mask')] = anat_skullstrip.mask_file
        rp[R('T1w', desc='skullstrip-fsl',
             suffix='brain')] = anat_skullstrip_orig_vol.out_file
Exemple #6
0
def brain_extraction(wf, cfg, strat_pool, pipe_num, opt=None):
    '''
    {"name": "brain_extraction",
     "config": "None",
     "switch": "None",
     "option_key": "None",
     "option_val": "None",
     "inputs": [(["desc-preproc_T1w", "desc-reorient_T1w", "T1w"],
                 ["space-T1w_desc-brain_mask", "space-T1w_desc-acpcbrain_mask"])],
     "outputs": ["desc-brain_T1w"]}
    '''
    '''
    brain_mask_deoblique = pe.Node(interface=afni.Refit(),
                                   name='brain_mask_deoblique')
    brain_mask_deoblique.inputs.deoblique = True
    wf.connect(inputnode, 'brain_mask',
                    brain_mask_deoblique, 'in_file')

    brain_mask_reorient = pe.Node(interface=afni.Resample(),
                                  name='brain_mask_reorient')
    brain_mask_reorient.inputs.orientation = 'RPI'
    brain_mask_reorient.inputs.outputtype = 'NIFTI_GZ'
    wf.connect(brain_mask_deoblique, 'out_file',
                    brain_mask_reorient, 'in_file')
    '''

    anat_skullstrip_orig_vol = pe.Node(interface=afni.Calc(),
                                       name=f'brain_extraction_{pipe_num}')

    anat_skullstrip_orig_vol.inputs.expr = 'a*step(b)'
    anat_skullstrip_orig_vol.inputs.outputtype = 'NIFTI_GZ'

    node, out = strat_pool.get_data(
        ['desc-preproc_T1w', 'desc-reorient_T1w', 'T1w'])
    wf.connect(node, out, anat_skullstrip_orig_vol, 'in_file_a')

    node, out = strat_pool.get_data(
        ['space-T1w_desc-brain_mask', 'space-T1w_desc-acpcbrain_mask'])
    wf.connect(node, out, anat_skullstrip_orig_vol, 'in_file_b')

    outputs = {'desc-brain_T1w': (anat_skullstrip_orig_vol, 'out_file')}

    return (wf, outputs)
def extract_label(path, label_id, output_name):
    files2extract = glob.glob(path)
    for subj in range(len(files2extract)):
        print('extracting ', output_name, ' for subject ', subj)
        subj_aparc = files2extract[subj]
        mc = freesurfer.MRIConvert()
        mc.inputs.in_file = subj_aparc
        mc.inputs.out_file = os.path.join(
            mc.inputs.in_file.split('.')[0] + mc.inputs.in_file.split('.')[1] +
            '.nii.gz')
        mc.run()

        calc = afni.Calc()
        calc.inputs.in_file_a = os.path.join(
            mc.inputs.in_file.split('.')[0] + mc.inputs.in_file.split('.')[1] +
            '.nii.gz')
        calc.inputs.expr = 'amongst(a,' + str(label_id) + ')'
        calc.inputs.out_file = os.path.join(
            os.path.dirname(mc.inputs.in_file) + '/' + output_name + '.nii.gz')
        calc.run()
Exemple #8
0
def afni_wf(name='AFNISkullStripWorkflow'):
    """
    Skull-stripping workflow

    Derived from the codebase of the QAP:
    https://github.com/preprocessed-connectomes-project/\
quality-assessment-protocol/blob/master/qap/anatomical_preproc.py#L105


    """

    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(niu.IdentityInterface(fields=['in_file']),
                        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(
        fields=['bias_corrected', 'out_file', 'out_mask', 'bias_image']),
                         name='outputnode')

    inu_n4 = pe.Node(ants.N4BiasFieldCorrection(dimension=3, save_bias=True),
                     name='CorrectINU')

    sstrip = pe.Node(afni.SkullStrip(outputtype='NIFTI_GZ'), name='skullstrip')
    sstrip_orig_vol = pe.Node(afni.Calc(expr='a*step(b)',
                                        outputtype='NIFTI_GZ'),
                              name='sstrip_orig_vol')
    binarize = pe.Node(fsl.Threshold(args='-bin', thresh=1.e-3),
                       name='binarize')

    workflow.connect([(inputnode, sstrip_orig_vol, [('in_file', 'in_file_a')]),
                      (inputnode, inu_n4, [('in_file', 'input_image')]),
                      (inu_n4, sstrip, [('output_image', 'in_file')]),
                      (sstrip, sstrip_orig_vol, [('out_file', 'in_file_b')]),
                      (sstrip_orig_vol, binarize, [('out_file', 'in_file')]),
                      (sstrip_orig_vol, outputnode, [('out_file', 'out_file')
                                                     ]),
                      (binarize, outputnode, [('out_file', 'out_mask')]),
                      (inu_n4, outputnode, [('output_image', 'bias_corrected'),
                                            ('bias_image', 'bias_image')])])
    return workflow
Exemple #9
0
def test_calc():
    input_map = dict(
        args=dict(argstr='%s', ),
        environ=dict(usedefault=True, ),
        expr=dict(
            argstr='-expr %s',
            mandatory=True,
        ),
        ignore_exception=dict(usedefault=True, ),
        in_file_a=dict(
            argstr='-a %s',
            mandatory=True,
        ),
        in_file_b=dict(argstr=' -b %s', ),
        out_file=dict(argstr='-prefix %s', ),
        single_idx=dict(),
        start_idx=dict(requires=['stop_idx'], ),
        stop_idx=dict(requires=['start_idx'], ),
    )
    instance = afni.Calc()
    for key, metadata in input_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(instance.inputs.traits()[key],
                                        metakey), value
Exemple #10
0
def init_mask_finalize_wf(name="mask_finalize_wf"):
    """Creates a final mask using a combination of the t1 mask and dwi2mask
    """
    inputnode = pe.Node(
        niu.IdentityInterface(fields=['t1_mask', 'resampled_b0s']),
        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(fields=['mask_file']),
                         name='outputnode')
    workflow = Workflow(name=name)
    resample_t1_mask = pe.Node(afni.Resample(outputtype='NIFTI_GZ',
                                             resample_mode="NN"),
                               name='resample_t1_mask')
    b0mask = pe.Node(afni.Automask(outputtype='NIFTI_GZ'), name='b0mask')
    or_mask = pe.Node(afni.Calc(outputtype='NIFTI_GZ', expr='step(a+b)'),
                      name='or_mask')
    workflow.connect([(inputnode, resample_t1_mask, [('t1_mask', 'in_file'),
                                                     ('resampled_b0s',
                                                      'master')]),
                      (inputnode, b0mask, [('resampled_b0s', 'in_file')]),
                      (b0mask, or_mask, [('out_file', 'in_file_a')]),
                      (resample_t1_mask, or_mask, [('out_file', 'in_file_b')]),
                      (or_mask, outputnode, [('out_file', 'mask_file')])])

    return workflow
Exemple #11
0
def connect_func_ingress(workflow,
                         strat_list,
                         c,
                         sub_dict,
                         subject_id,
                         input_creds_path,
                         unique_id=None):

    for num_strat, strat in enumerate(strat_list):

        if 'func' in sub_dict:
            func_paths_dict = sub_dict['func']
        else:
            func_paths_dict = sub_dict['rest']

        if unique_id is None:
            workflow_name = f'func_gather_{num_strat}'
        else:
            workflow_name = f'func_gather_{unique_id}_{num_strat}'

        func_wf = create_func_datasource(func_paths_dict, workflow_name)

        func_wf.inputs.inputnode.set(subject=subject_id,
                                     creds_path=input_creds_path,
                                     dl_dir=c.workingDirectory)
        func_wf.get_node('inputnode').iterables = \
            ("scan", list(func_paths_dict.keys()))

        strat.update_resource_pool({
            'subject': (func_wf, 'outputspec.subject'),
            'scan': (func_wf, 'outputspec.scan')
        })

        # Grab field maps
        diff = False
        blip = False
        fmap_rp_list = []
        fmap_TE_list = []
        if "fmap" in sub_dict:
            for key in sub_dict["fmap"]:
                gather_fmap = create_fmap_datasource(
                    sub_dict["fmap"], "fmap_gather_"
                    "{0}".format(key))
                gather_fmap.inputs.inputnode.set(subject=subject_id,
                                                 creds_path=input_creds_path,
                                                 dl_dir=c.workingDirectory)
                gather_fmap.inputs.inputnode.scan = key
                strat.update_resource_pool({
                    key: (gather_fmap, 'outputspec.rest'),
                    "{0}_scan_params".format(key):
                    (gather_fmap, 'outputspec.scan_params')
                })

                fmap_rp_list.append(key)

                if key == "diff_phase" or key == "diff_mag_one" or \
                                key == "diff_mag_two":
                    diff = True

                    get_fmap_metadata_imports = ['import json']
                    get_fmap_metadata = pe.Node(
                        Function(input_names=['data_config_scan_params'],
                                 output_names=[
                                     'echo_time', 'dwell_time', 'pe_direction'
                                 ],
                                 function=get_fmap_phasediff_metadata,
                                 imports=get_fmap_metadata_imports),
                        name='{0}_get_metadata_{1}'.format(key, num_strat))

                    node, out_file = strat["{}_scan_params".format(key)]
                    workflow.connect(node, out_file, get_fmap_metadata,
                                     'data_config_scan_params')

                    strat.update_resource_pool({
                        "{}_TE".format(key): (get_fmap_metadata, 'echo_time'),
                        "{}_dwell".format(key):
                        (get_fmap_metadata, 'dwell_time'),
                        "{}_pedir".format(key):
                        (get_fmap_metadata, 'pe_direction')
                    })
                    fmap_TE_list.append("{}_TE".format(key))

                if key == "epi_AP" or key == "epi_PA":
                    blip = True

            if diff:
                calc_delta_ratio = pe.Node(
                    Function(input_names=[
                        'dwell_time', 'echo_time_one', 'echo_time_two',
                        'echo_time_three'
                    ],
                             output_names=['deltaTE', 'dwell_asym_ratio'],
                             function=calc_deltaTE_and_asym_ratio),
                    name='diff_distcor_calc_delta_{}'.format(num_strat))

                node, out_file = strat['diff_phase_dwell']
                workflow.connect(node, out_file, calc_delta_ratio,
                                 'dwell_time')

                node, out_file = strat[fmap_TE_list[0]]
                workflow.connect(node, out_file, calc_delta_ratio,
                                 'echo_time_one')

                node, out_file = strat[fmap_TE_list[1]]
                workflow.connect(node, out_file, calc_delta_ratio,
                                 'echo_time_two')

                if len(fmap_TE_list) > 2:
                    node, out_file = strat[fmap_TE_list[2]]
                    workflow.connect(node, out_file, calc_delta_ratio,
                                     'echo_time_three')

                strat.update_resource_pool({
                    'deltaTE': (calc_delta_ratio, 'deltaTE'),
                    'dwell_asym_ratio': (calc_delta_ratio, 'dwell_asym_ratio')
                })

        # Add in nodes to get parameters from configuration file
        # a node which checks if scan_parameters are present for each scan
        if unique_id is None:
            workflow_name = f'scan_params_{num_strat}'
        else:
            workflow_name = f'scan_params_{unique_id}_{num_strat}'

        scan_params = \
            pe.Node(Function(
                input_names=['data_config_scan_params',
                             'subject_id',
                             'scan',
                             'pipeconfig_tr',
                             'pipeconfig_tpattern',
                             'pipeconfig_start_indx',
                             'pipeconfig_stop_indx'],
                output_names=['tr',
                              'tpattern',
                              'ref_slice',
                              'start_indx',
                              'stop_indx',
                              'pe_direction'],
                function=get_scan_params,
                as_module=True
            ), name=workflow_name)

        if "Selected Functional Volume" in c.func_reg_input:
            get_func_volume = pe.Node(
                interface=afni.Calc(),
                name='get_func_volume_{0}'.format(num_strat))

            get_func_volume.inputs.set(expr='a',
                                       single_idx=c.func_reg_input_volume,
                                       outputtype='NIFTI_GZ')
            workflow.connect(func_wf, 'outputspec.rest', get_func_volume,
                             'in_file_a')

        # wire in the scan parameter workflow
        workflow.connect(func_wf, 'outputspec.scan_params', scan_params,
                         'data_config_scan_params')

        workflow.connect(func_wf, 'outputspec.subject', scan_params,
                         'subject_id')

        workflow.connect(func_wf, 'outputspec.scan', scan_params, 'scan')

        # connect in constants
        scan_params.inputs.set(pipeconfig_start_indx=c.startIdx,
                               pipeconfig_stop_indx=c.stopIdx)

        strat.update_resource_pool({
            'raw_functional': (func_wf, 'outputspec.rest'),
            'scan_id': (func_wf, 'outputspec.scan'),
            'tr': (scan_params, 'tr'),
            'tpattern': (scan_params, 'tpattern'),
            'start_idx': (scan_params, 'start_indx'),
            'stop_idx': (scan_params, 'stop_indx'),
            'pe_direction': (scan_params, 'pe_direction'),
        })

        strat.set_leaf_properties(func_wf, 'outputspec.rest')

        if "Selected Functional Volume" in c.func_reg_input:
            strat.update_resource_pool(
                {'selected_func_volume': (get_func_volume, 'out_file')})

    return (workflow, diff, blip, fmap_rp_list)
Exemple #12
0
def hmc_afni(name='fMRI_HMC_afni', st_correct=False, despike=False, deoblique=False):
    """A head motion correction (HMC) workflow for functional scans"""

    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(niu.IdentityInterface(
        fields=['in_file', 'fd_radius', 'start_idx', 'stop_idx']), name='inputnode')

    outputnode = pe.Node(niu.IdentityInterface(
        fields=['out_file', 'out_fd']), name='outputnode')

    drop_trs = pe.Node(afni.Calc(expr='a', outputtype='NIFTI_GZ'),
                       name='drop_trs')

    reorient = pe.Node(afni.Resample(
        orientation='RPI', outputtype='NIFTI_GZ'), name='reorient')

    get_mean_RPI = pe.Node(afni.TStat(
        options='-mean', outputtype='NIFTI_GZ'), name='get_mean_RPI')

    # calculate hmc parameters
    hmc = pe.Node(
        afni.Volreg(args='-Fourier -twopass', zpad=4, outputtype='NIFTI_GZ'),
        name='motion_correct')

    get_mean_motion = get_mean_RPI.clone('get_mean_motion')
    hmc_A = hmc.clone('motion_correct_A')
    hmc_A.inputs.md1d_file = 'max_displacement.1D'

    # Compute the frame-wise displacement
    calc_fd = pe.Node(niu.Function(
        function=fd_jenkinson, input_names=['in_file', 'rmax'],
        output_names=['out_fd']), name='calc_fd')

    workflow.connect([
        (inputnode, drop_trs, [('in_file', 'in_file_a'),
                               ('start_idx', 'start_idx'),
                               ('stop_idx', 'stop_idx')]),
        (inputnode, calc_fd, [('fd_radius', 'rmax')]),
        (reorient, get_mean_RPI, [('out_file', 'in_file')]),
        (reorient, hmc, [('out_file', 'in_file')]),
        (get_mean_RPI, hmc, [('out_file', 'basefile')]),
        (hmc, get_mean_motion, [('out_file', 'in_file')]),
        (reorient, hmc_A, [('out_file', 'in_file')]),
        (get_mean_motion, hmc_A, [('out_file', 'basefile')]),
        (hmc_A, outputnode, [('out_file', 'out_file')]),
        (hmc_A, calc_fd, [('oned_matrix_save', 'in_file')]),
        (calc_fd, outputnode, [('out_fd', 'out_fd')]),
    ])

    # Slice timing correction, despiking, and deoblique

    st_corr = pe.Node(afni.TShift(outputtype='NIFTI_GZ'), name='TimeShifts')

    deoblique_node = pe.Node(afni.Refit(deoblique=True), name='deoblique')

    despike_node = pe.Node(afni.Despike(outputtype='NIFTI_GZ'), name='despike')

    if st_correct and despike and deoblique:

        workflow.connect([
            (drop_trs, st_corr, [('out_file', 'in_file')]),
            (st_corr, despike_node, [('out_file', 'in_file')]),
            (despike_node, deoblique_node, [('out_file', 'in_file')]),
            (deoblique_node, reorient, [('out_file', 'in_file')])
        ])

    elif st_correct and despike:

        workflow.connect([
            (drop_trs, st_corr, [('out_file', 'in_file')]),
            (st_corr, despike_node, [('out_file', 'in_file')]),
            (despike_node, reorient, [('out_file', 'in_file')]),
        ])

    elif st_correct and deoblique:

        workflow.connect([
            (drop_trs, st_corr, [('out_file', 'in_file')]),
            (st_corr, deoblique_node, [('out_file', 'in_file')]),
            (deoblique_node, reorient, [('out_file', 'in_file')])
        ])

    elif st_correct:

        workflow.connect([
            (drop_trs, st_corr, [('out_file', 'in_file')]),
            (st_corr, reorient, [('out_file', 'in_file')])
        ])

    elif despike and deoblique:

        workflow.connect([
            (drop_trs, despike_node, [('out_file', 'in_file')]),
            (despike_node, deoblique_node, [('out_file', 'in_file')]),
            (deoblique_node, reorient, [('out_file', 'in_file')])
        ])

    elif despike:

        workflow.connect([
            (drop_trs, despike_node, [('out_file', 'in_file')]),
            (despike_node, reorient, [('out_file', 'in_file')]),
        ])

    elif deoblique:

        workflow.connect([
            (drop_trs, deoblique_node, [('out_file', 'in_file')]),
            (deoblique_node, reorient, [('out_file', 'in_file')])
        ])

    else:

        workflow.connect([
            (drop_trs, reorient, [('out_file', 'in_file')]),
        ])

    return workflow
Exemple #13
0
        name='outputspec2')

    inputnode2.inputs.drifter_result = results_path + '/' + data + '_1/drifter/drifter_corrected.nii.gz'

    # Call fslcpgeom source dest, source is reorient output nii.gz file and dest is drifter folder nii.gz file
    reoriented_file = results_path + '/' + data + '_1/reorient/corr_epi_reoriented.nii.gz'
    drifted_file = results_path + '/' + data + '_1/drifter/drifter_corrected.nii.gz'
    call(["fslcpgeom", reoriented_file, drifted_file])

    # AFNI skullstrip and mean image skullstrip
    tstat1 = pe.Node(interface=afni.TStat(args='-mean', outputtype="NIFTI_GZ"),
                     name='tstat1')
    automask = pe.Node(interface=afni.Automask(dilate=1,
                                               outputtype="NIFTI_GZ"),
                       name='automask')
    skullstrip = pe.Node(interface=afni.Calc(expr='a*b',
                                             outputtype="NIFTI_GZ"),
                         name='skullstrip')
    tstat2 = pe.Node(interface=afni.TStat(args='-mean', outputtype="NIFTI_GZ"),
                     name='tstat2')

    workflow2.connect(inputnode2, 'drifter_result', tstat1, 'in_file')
    workflow2.connect(tstat1, 'out_file', automask, 'in_file')
    workflow2.connect(automask, 'out_file', skullstrip, 'in_file_b')
    workflow2.connect(inputnode2, 'drifter_result', skullstrip, 'in_file_a')
    workflow2.connect(skullstrip, 'out_file', tstat2, 'in_file')

    # Remove n (3) first volumes
    trim = pe.Node(interface=Trim(begin_index=3), name='trim')
    workflow2.connect(skullstrip, 'out_file', trim, 'in_file')

    # Spatial smoothing, kernel sigma 2.00 mm (5 mm is too much)
Exemple #14
0
def coregister_fmri_session(session_data,
                            t_r,
                            write_dir,
                            brain_volume,
                            use_rats_tool=True,
                            slice_timing=True,
                            prior_rigid_body_registration=False,
                            caching=False,
                            voxel_size_x=.1,
                            voxel_size_y=.1,
                            verbose=True,
                            **environ_kwargs):
    """
    Coregistration of the subject's functional and anatomical images.
    The functional volume is aligned to the anatomical, first with a rigid body
    registration and then on a per-slice basis (only a fine correction, this is
    mostly for correction of EPI distortion).


    Parameters
    ----------
    session_data : sammba.registration.SessionData
        Single animal data, giving paths to its functional and anatomical
        image, as well as it identifier.

    t_r : float
        Repetition time for the EPI, in seconds.

    write_dir : str
        Directory to save the output and temporary images.

    brain_volume : int
        Volume of the brain in mm3 used for brain extraction.
        Typically 400 for mouse and 1800 for rat.

    use_rats_tool : bool, optional
        If True, brain mask is computed using RATS Mathematical Morphology.
        Otherwise, a histogram-based brain segmentation is used.

    prior_rigid_body_registration : bool, optional
        If True, a rigid-body registration of the anat to the func is performed
        prior to the warp. Useful if the images headers have missing/wrong
        information.

    voxel_size_x : float, optional
        Resampling resolution for the x-axis, in mm.

    voxel_size_y : float, optional
        Resampling resolution for the y-axis, in mm.

    caching : bool, optional
        Wether or not to use caching.

    verbose : bool, optional
        If True, all steps are verbose. Note that caching implies some
        verbosity in any case.

    environ_kwargs : extra arguments keywords
        Extra arguments keywords, passed to interfaces environ variable.

    Returns
    -------
    the same sequence with each animal_data updated: the following attributes
    are added
        - `output_dir_` : str
                          Path to the output directory.
        - `coreg_func_` : str
                          Path to paths to the coregistered functional image.
        - `coreg_anat_` : str
                          Path to paths to the coregistered functional image.
        - `coreg_transform_` : str
                               Path to the transform from anat to func.

    Notes
    -----
    If `use_rats_tool` is turned on, RATS tool is used for brain extraction
    and has to be cited. For more information, see
    `RATS <http://www.iibi.uiowa.edu/content/rats-overview/>`_
    """
    func_filename = session_data.func
    anat_filename = session_data.anat

    environ = {'AFNI_DECONFLICT': 'OVERWRITE'}
    for (key, value) in environ_kwargs.items():
        environ[key] = value

    if verbose:
        terminal_output = 'allatonce'
    else:
        terminal_output = 'none'

    if use_rats_tool:
        if segmentation.interfaces.Info().version() is None:
            raise ValueError('Can not locate RATS')
        else:
            ComputeMask = segmentation.MathMorphoMask
    else:
        ComputeMask = segmentation.HistogramMask

    if ants.base.Info().version is None:
        raise ValueError('Can not locate ANTS')

    if caching:
        memory = Memory(write_dir)
        tshift = memory.cache(afni.TShift)
        clip_level = memory.cache(afni.ClipLevel)
        volreg = memory.cache(afni.Volreg)
        allineate = memory.cache(afni.Allineate)
        tstat = memory.cache(afni.TStat)
        compute_mask = memory.cache(ComputeMask)
        calc = memory.cache(afni.Calc)
        allineate = memory.cache(afni.Allineate)
        allineate2 = memory.cache(afni.Allineate)
        unifize = memory.cache(afni.Unifize)
        bias_correct = memory.cache(ants.N4BiasFieldCorrection)
        catmatvec = memory.cache(afni.CatMatvec)
        warp = memory.cache(afni.Warp)
        resample = memory.cache(afni.Resample)
        slicer = memory.cache(afni.ZCutUp)
        warp_apply = memory.cache(afni.NwarpApply)
        qwarp = memory.cache(afni.Qwarp)
        merge = memory.cache(afni.Zcat)
        copy_geom = memory.cache(fsl.CopyGeom)
        overwrite = False
        for step in [
                tshift, volreg, allineate, allineate2, tstat, compute_mask,
                calc, unifize, resample, slicer, warp_apply, qwarp, merge
        ]:
            step.interface().set_default_terminal_output(terminal_output)
    else:
        tshift = afni.TShift(terminal_output=terminal_output).run
        clip_level = afni.ClipLevel().run
        volreg = afni.Volreg(terminal_output=terminal_output).run
        allineate = afni.Allineate(terminal_output=terminal_output).run
        allineate2 = afni.Allineate(terminal_output=terminal_output
                                    ).run  # TODO: remove after fixed bug
        tstat = afni.TStat(terminal_output=terminal_output).run
        compute_mask = ComputeMask().run
        calc = afni.Calc(terminal_output=terminal_output).run
        unifize = afni.Unifize(terminal_output=terminal_output).run
        bias_correct = ants.N4BiasFieldCorrection(
            terminal_output=terminal_output).run
        catmatvec = afni.CatMatvec().run
        warp = afni.Warp().run
        resample = afni.Resample(terminal_output=terminal_output).run
        slicer = afni.ZCutUp(terminal_output=terminal_output).run
        warp_apply = afni.NwarpApply(terminal_output=terminal_output).run
        qwarp = afni.Qwarp(terminal_output=terminal_output).run
        merge = afni.Zcat(terminal_output=terminal_output).run
        copy_geom = fsl.CopyGeom(terminal_output=terminal_output).run
        overwrite = True

    session_data._check_inputs()
    output_dir = os.path.join(os.path.abspath(write_dir),
                              session_data.animal_id)
    session_data._set_output_dir_(output_dir)
    current_dir = os.getcwd()
    os.chdir(output_dir)
    output_files = []

    #######################################
    # Correct functional for slice timing #
    #######################################
    if slice_timing:
        out_tshift = tshift(in_file=func_filename,
                            outputtype='NIFTI_GZ',
                            tpattern='altplus',
                            tr=str(t_r),
                            environ=environ)
        func_filename = out_tshift.outputs.out_file
        output_files.append(func_filename)

    ################################################
    # Register functional volumes to the first one #
    ################################################
    # XXX why do you need a thresholded image ?
    out_clip_level = clip_level(in_file=func_filename)
    out_calc_threshold = calc(in_file_a=func_filename,
                              expr='ispositive(a-{0}) * a'.format(
                                  out_clip_level.outputs.clip_val),
                              outputtype='NIFTI_GZ')
    thresholded_filename = out_calc_threshold.outputs.out_file

    out_volreg = volreg(  # XXX dfile not saved
        in_file=thresholded_filename,
        outputtype='NIFTI_GZ',
        environ=environ,
        oned_file=fname_presuffix(thresholded_filename,
                                  suffix='Vr.1Dfile.1D',
                                  use_ext=False),
        oned_matrix_save=fname_presuffix(thresholded_filename,
                                         suffix='Vr.aff12.1D',
                                         use_ext=False))

    # Apply the registration to the whole head
    out_allineate = allineate(in_file=func_filename,
                              master=func_filename,
                              in_matrix=out_volreg.outputs.oned_matrix_save,
                              out_file=fname_presuffix(func_filename,
                                                       suffix='Av'),
                              environ=environ)

    # 3dAllineate removes the obliquity. This is not a good way to readd it as
    # removes motion correction info in the header if it were an AFNI file...as
    # it happens it's NIfTI which does not store that so irrelevant!
    out_copy_geom = copy_geom(dest_file=out_allineate.outputs.out_file,
                              in_file=out_volreg.outputs.out_file)

    allineated_filename = out_copy_geom.outputs.out_file

    # Create a (hopefully) nice mean image for use in the registration
    out_tstat = tstat(in_file=allineated_filename,
                      args='-mean',
                      outputtype='NIFTI_GZ',
                      environ=environ)

    # Update outputs
    output_files.extend([
        thresholded_filename, out_volreg.outputs.oned_matrix_save,
        out_volreg.outputs.out_file, out_volreg.outputs.md1d_file,
        allineated_filename, out_tstat.outputs.out_file
    ])

    ###########################################
    # Corret anat and func for intensity bias #
    ###########################################
    # Correct the functional average for intensities bias
    out_bias_correct = bias_correct(input_image=out_tstat.outputs.out_file)
    unbiased_func_filename = out_bias_correct.outputs.output_image

    # Bias correct the antomical image
    out_unifize = unifize(in_file=anat_filename,
                          outputtype='NIFTI_GZ',
                          environ=environ)
    unbiased_anat_filename = out_unifize.outputs.out_file

    # Update outputs
    output_files.extend([unbiased_func_filename, unbiased_anat_filename])

    #############################################
    # Rigid-body registration anat -> mean func #
    #############################################
    if prior_rigid_body_registration:
        # Mask the mean functional volume outside the brain.
        out_clip_level = clip_level(in_file=unbiased_func_filename)
        out_compute_mask_func = compute_mask(
            in_file=unbiased_func_filename,
            volume_threshold=brain_volume,
            intensity_threshold=int(out_clip_level.outputs.clip_val))
        out_cacl_func = calc(in_file_a=unbiased_func_filename,
                             in_file_b=out_compute_mask_func.outputs.out_file,
                             expr='a*b',
                             outputtype='NIFTI_GZ',
                             environ=environ)

        # Mask the anatomical volume outside the brain.
        out_clip_level = clip_level(in_file=unbiased_anat_filename)
        out_compute_mask_anat = compute_mask(
            in_file=unbiased_anat_filename,
            volume_threshold=brain_volume,
            intensity_threshold=int(out_clip_level.outputs.clip_val))
        out_cacl_anat = calc(in_file_a=unbiased_anat_filename,
                             in_file_b=out_compute_mask_anat.outputs.out_file,
                             expr='a*b',
                             outputtype='NIFTI_GZ',
                             environ=environ)

        # Compute the transformation from functional to anatomical brain
        # XXX: why in this sense
        out_allineate = allineate2(
            in_file=out_cacl_func.outputs.out_file,
            reference=out_cacl_anat.outputs.out_file,
            out_matrix=fname_presuffix(out_cacl_func.outputs.out_file,
                                       suffix='_shr.aff12.1D',
                                       use_ext=False),
            center_of_mass='',
            warp_type='shift_rotate',
            out_file=fname_presuffix(out_cacl_func.outputs.out_file,
                                     suffix='_shr'),
            environ=environ)
        rigid_transform_file = out_allineate.outputs.out_matrix
        output_files.extend([
            out_compute_mask_func.outputs.out_file,
            out_cacl_func.outputs.out_file,
            out_compute_mask_anat.outputs.out_file,
            out_cacl_anat.outputs.out_file, rigid_transform_file,
            out_allineate.outputs.out_file
        ])

        # apply the inverse transform to register the anatomical to the func
        catmatvec_out_file = fname_presuffix(rigid_transform_file,
                                             suffix='INV')
        out_catmatvec = catmatvec(in_file=[(rigid_transform_file, 'I')],
                                  oneline=True,
                                  out_file=catmatvec_out_file)
        output_files.append(out_catmatvec.outputs.out_file)
        out_allineate = allineate(in_file=unbiased_anat_filename,
                                  master=unbiased_func_filename,
                                  in_matrix=out_catmatvec.outputs.out_file,
                                  out_file=fname_presuffix(
                                      unbiased_anat_filename,
                                      suffix='_shr_in_func_space'),
                                  environ=environ)
        allineated_anat_filename = out_allineate.outputs.out_file
        output_files.append(allineated_anat_filename)
    else:
        allineated_anat_filename = unbiased_anat_filename

    ############################################
    # Nonlinear registration anat -> mean func #
    ############################################
    # 3dWarp doesn't put the obliquity in the header, so do it manually
    # This step generates one file per slice and per time point, so we are
    # making sure they are removed at the end
    out_warp = warp(in_file=allineated_anat_filename,
                    oblique_parent=unbiased_func_filename,
                    interp='quintic',
                    gridset=unbiased_func_filename,
                    outputtype='NIFTI_GZ',
                    verbose=True,
                    environ=environ)
    registered_anat_filename = out_warp.outputs.out_file
    registered_anat_oblique_filename = fix_obliquity(registered_anat_filename,
                                                     unbiased_func_filename,
                                                     verbose=verbose)

    # Concatenate all the anat to func tranforms
    mat_filename = fname_presuffix(registered_anat_filename,
                                   suffix='_warp.mat',
                                   use_ext=False)
    # XXX Handle this correctly according to caching
    if not os.path.isfile(mat_filename):
        np.savetxt(mat_filename, [out_warp.runtime.stdout], fmt='%s')
        output_files.append(mat_filename)

    transform_filename = fname_presuffix(registered_anat_filename,
                                         suffix='_anat_to_func.aff12.1D',
                                         use_ext=False)
    if prior_rigid_body_registration:
        _ = catmatvec(in_file=[(mat_filename, 'ONELINE'),
                               (rigid_transform_file, 'ONELINE')],
                      oneline=True,
                      out_file=transform_filename)
    else:
        _ = catmatvec(in_file=[(mat_filename, 'ONELINE')],
                      oneline=True,
                      out_file=transform_filename)

    ##################################################
    # Per-slice non-linear registration func -> anat #
    ##################################################
    # Slice anatomical image
    anat_img = nibabel.load(registered_anat_oblique_filename)
    anat_n_slices = anat_img.header.get_data_shape()[2]
    sliced_registered_anat_filenames = []
    for slice_n in range(anat_n_slices):
        out_slicer = slicer(in_file=registered_anat_oblique_filename,
                            keep='{0} {0}'.format(slice_n),
                            out_file=fname_presuffix(
                                registered_anat_oblique_filename,
                                suffix='Sl%d' % slice_n),
                            environ=environ)
        oblique_slice = fix_obliquity(out_slicer.outputs.out_file,
                                      registered_anat_oblique_filename,
                                      verbose=verbose)
        sliced_registered_anat_filenames.append(oblique_slice)

    # Slice mean functional
    sliced_bias_corrected_filenames = []
    img = nibabel.load(func_filename)
    n_slices = img.header.get_data_shape()[2]
    for slice_n in range(n_slices):
        out_slicer = slicer(in_file=unbiased_func_filename,
                            keep='{0} {0}'.format(slice_n),
                            out_file=fname_presuffix(unbiased_func_filename,
                                                     suffix='Sl%d' % slice_n),
                            environ=environ)
        oblique_slice = fix_obliquity(out_slicer.outputs.out_file,
                                      unbiased_func_filename,
                                      verbose=verbose)
        sliced_bias_corrected_filenames.append(oblique_slice)

    # Below line is to deal with slices where there is no signal (for example
    # rostral end of some anatomicals)

    # The inverse warp frequently fails, Resampling can help it work better
    # XXX why specifically .1 in voxel_size ?
    voxel_size_z = anat_img.header.get_zooms()[2]
    resampled_registered_anat_filenames = []
    for sliced_registered_anat_filename in sliced_registered_anat_filenames:
        out_resample = resample(in_file=sliced_registered_anat_filename,
                                voxel_size=(voxel_size_x, voxel_size_y,
                                            voxel_size_z),
                                outputtype='NIFTI_GZ',
                                environ=environ)
        resampled_registered_anat_filenames.append(
            out_resample.outputs.out_file)

    resampled_bias_corrected_filenames = []
    for sliced_bias_corrected_filename in sliced_bias_corrected_filenames:
        out_resample = resample(in_file=sliced_bias_corrected_filename,
                                voxel_size=(voxel_size_x, voxel_size_y,
                                            voxel_size_z),
                                outputtype='NIFTI_GZ',
                                environ=environ)
        resampled_bias_corrected_filenames.append(
            out_resample.outputs.out_file)

    # single slice non-linear functional to anatomical registration
    warped_slices = []
    warp_filenames = []
    for (resampled_bias_corrected_filename,
         resampled_registered_anat_filename) in zip(
             resampled_bias_corrected_filenames,
             resampled_registered_anat_filenames):
        warped_slice = fname_presuffix(resampled_bias_corrected_filename,
                                       suffix='_qw')
        out_qwarp = qwarp(
            in_file=resampled_bias_corrected_filename,
            base_file=resampled_registered_anat_filename,
            iwarp=True,  # XXX: is this necessary
            noneg=True,
            blur=[0],
            nmi=True,
            noXdis=True,
            allineate=True,
            allineate_opts='-parfix 1 0 -parfix 2 0 -parfix 3 0 '
            '-parfix 4 0 -parfix 5 0 -parfix 6 0 '
            '-parfix 7 0 -parfix 9 0 '
            '-parfix 10 0 -parfix 12 0',
            out_file=warped_slice,
            environ=environ)
        warped_slices.append(out_qwarp.outputs.warped_source)
        warp_filenames.append(out_qwarp.outputs.source_warp)
        output_files.append(out_qwarp.outputs.base_warp)
        # There are files geenrated by the allineate option
        output_files.extend([
            fname_presuffix(out_qwarp.outputs.warped_source, suffix='_Allin'),
            fname_presuffix(out_qwarp.outputs.warped_source,
                            suffix='_Allin.nii',
                            use_ext=False),
            fname_presuffix(out_qwarp.outputs.warped_source,
                            suffix='_Allin.aff12.1D',
                            use_ext=False)
        ])

    # Resample the mean volume back to the initial resolution,
    voxel_size = nibabel.load(unbiased_func_filename).header.get_zooms()
    resampled_warped_slices = []
    for warped_slice in warped_slices:
        out_resample = resample(in_file=warped_slice,
                                voxel_size=voxel_size,
                                outputtype='NIFTI_GZ',
                                environ=environ)
        resampled_warped_slices.append(out_resample.outputs.out_file)

    # fix the obliquity
    resampled_warped_slices_oblique = []
    for (sliced_registered_anat_filename,
         resampled_warped_slice) in zip(sliced_registered_anat_filenames,
                                        resampled_warped_slices):
        oblique_slice = fix_obliquity(resampled_warped_slice,
                                      sliced_registered_anat_filename,
                                      verbose=verbose)
        resampled_warped_slices_oblique.append(oblique_slice)

    # slice functional
    sliced_func_filenames = []
    for slice_n in range(n_slices):
        out_slicer = slicer(in_file=allineated_filename,
                            keep='{0} {0}'.format(slice_n),
                            out_file=fname_presuffix(allineated_filename,
                                                     suffix='Sl%d' % slice_n),
                            environ=environ)
        oblique_slice = fix_obliquity(out_slicer.outputs.out_file,
                                      allineated_filename,
                                      verbose=verbose)
        sliced_func_filenames.append(oblique_slice)

    # Apply the precomputed warp slice by slice
    warped_func_slices = []
    for (sliced_func_filename, warp_filename) in zip(sliced_func_filenames,
                                                     warp_filenames):
        out_warp_apply = warp_apply(in_file=sliced_func_filename,
                                    master=sliced_func_filename,
                                    warp=warp_filename,
                                    out_file=fname_presuffix(
                                        sliced_func_filename, suffix='_qw'),
                                    environ=environ)
        warped_func_slices.append(out_warp_apply.outputs.out_file)

    # Finally, merge all slices !
    out_merge_func = merge(in_files=warped_func_slices,
                           outputtype='NIFTI_GZ',
                           environ=environ)

    # Fix the obliquity
    merged_oblique = fix_obliquity(out_merge_func.outputs.out_file,
                                   allineated_filename,
                                   verbose=verbose)

    # Update the fmri data
    setattr(session_data, "coreg_func_", merged_oblique)
    setattr(session_data, "coreg_anat_", registered_anat_oblique_filename)
    setattr(session_data, "coreg_transform_", transform_filename)
    os.chdir(current_dir)

    # Collect the outputs
    output_files.extend(sliced_registered_anat_filenames +
                        sliced_bias_corrected_filenames +
                        resampled_registered_anat_filenames +
                        resampled_bias_corrected_filenames + warped_slices +
                        warp_filenames + resampled_warped_slices_oblique +
                        sliced_func_filenames + warped_func_slices)
    if not caching:
        for out_file in output_files:
            if os.path.isfile(out_file):
                os.remove(out_file)
Exemple #15
0
def generate_summarize_tissue_mask(nuisance_wf,
                                   pipeline_resource_pool,
                                   regressor_descriptor,
                                   regressor_selector,
                                   use_ants=True,
                                   ventricle_mask_exist=True):
    """
    Add tissue mask generation into pipeline according to the selector.

    :param nuisance_wf: Nuisance regressor workflow.
    :param pipeline_resource_pool: dictionary of available resources.
    :param regressor_descriptor: dictionary of steps to build, including keys:
        'tissue', 'resolution', 'erosion'
    :param regressor_selector: dictionary with the original selector

    :return: the full path of the 3D nifti file containing the mask created by
        this operation.
    """

    steps = [
        key for key in ['tissue', 'resolution', 'erosion']
        if key in regressor_descriptor
    ]

    full_mask_key = "_".join(regressor_descriptor[s] for s in steps)

    for step_i, step in enumerate(steps):

        mask_key = "_".join(regressor_descriptor[s]
                            for s in steps[:step_i + 1])

        if mask_key in pipeline_resource_pool:
            continue

        node_mask_key = re.sub(r"[^\w]", "_", mask_key)

        prev_mask_key = "_".join(regressor_descriptor[s]
                                 for s in steps[:step_i])

        if step == 'tissue':
            pass

        elif step == 'resolution':

            mask_to_epi = pe.Node(interface=fsl.FLIRT(),
                                  name='{}_flirt'.format(node_mask_key))

            mask_to_epi.inputs.interp = 'nearestneighbour'

            if regressor_selector['extraction_resolution'] == "Functional":
                nuisance_wf.connect(
                    *(pipeline_resource_pool['Functional_mean'] +
                      (mask_to_epi, 'reference')))
            else:
                resolution = regressor_selector['extraction_resolution']
                mask_to_epi.inputs.apply_isoxfm = resolution

                nuisance_wf.connect(*(pipeline_resource_pool[
                    'Anatomical_{}mm'.format(resolution)] +
                                      (mask_to_epi, 'reference')))

            nuisance_wf.connect(*(pipeline_resource_pool[prev_mask_key] +
                                  (mask_to_epi, 'in_file')))

            pipeline_resource_pool[mask_key] = \
                (mask_to_epi, 'out_file')

            if full_mask_key.startswith('CerebrospinalFluid'):
                pipeline_resource_pool = generate_summarize_tissue_mask_ventricles_masking(
                    nuisance_wf, pipeline_resource_pool, regressor_descriptor,
                    regressor_selector, node_mask_key, use_ants,
                    ventricle_mask_exist)

        elif step == 'erosion':

            erode_mask_node = pe.Node(afni.Calc(
                args='-b a+i -c a-i -d a+j -e a-j -f a+k -g a-k',
                expr='a*(1-amongst(0,b,c,d,e,f,g))',
                outputtype='NIFTI_GZ'),
                                      name='{}'.format(node_mask_key))

            nuisance_wf.connect(*(pipeline_resource_pool[prev_mask_key] +
                                  (erode_mask_node, 'in_file_a')))

            pipeline_resource_pool[mask_key] = \
                (erode_mask_node, 'out_file')

    return pipeline_resource_pool, full_mask_key
Exemple #16
0
def create_qc_carpet(wf_name='qc_carpet', output_image='qc_carpet'):

    wf = pe.Workflow(name=wf_name)

    input_node = pe.Node(util.IdentityInterface(fields=[
        'functional_to_standard', 'mean_functional_to_standard',
        'anatomical_gm_mask', 'anatomical_wm_mask', 'anatomical_csf_mask'
    ]),
                         name='inputspec')

    output_node = pe.Node(util.IdentityInterface(fields=['carpet_plot']),
                          name='outputspec')

    gm_resample = pe.Node(afni.Resample(), name='gm_resample')
    gm_resample.inputs.outputtype = 'NIFTI'
    wf.connect(input_node, 'anatomical_gm_mask', gm_resample, 'in_file')
    wf.connect(input_node, 'mean_functional_to_standard', gm_resample,
               'master')

    gm_mask = pe.Node(afni.Calc(), name="gm_mask")
    gm_mask.inputs.expr = 'astep(a, 0.5)'
    gm_mask.inputs.outputtype = 'NIFTI'
    wf.connect(gm_resample, 'out_file', gm_mask, 'in_file_a')

    wm_resample = pe.Node(afni.Resample(), name='wm_resample')
    wm_resample.inputs.outputtype = 'NIFTI'
    wf.connect(input_node, 'anatomical_wm_mask', wm_resample, 'in_file')
    wf.connect(input_node, 'mean_functional_to_standard', wm_resample,
               'master')

    wm_mask = pe.Node(afni.Calc(), name="wm_mask")
    wm_mask.inputs.expr = 'astep(a, 0.5)'
    wm_mask.inputs.outputtype = 'NIFTI'
    wf.connect(wm_resample, 'out_file', wm_mask, 'in_file_a')

    csf_resample = pe.Node(afni.Resample(), name='csf_resample')
    csf_resample.inputs.outputtype = 'NIFTI'
    wf.connect(input_node, 'anatomical_csf_mask', csf_resample, 'in_file')
    wf.connect(input_node, 'mean_functional_to_standard', csf_resample,
               'master')

    csf_mask = pe.Node(afni.Calc(), name="csf_mask")
    csf_mask.inputs.expr = 'astep(a, 0.5)'
    csf_mask.inputs.outputtype = 'NIFTI'
    wf.connect(csf_resample, 'out_file', csf_mask, 'in_file_a')

    carpet_plot = pe.Node(Function(input_names=[
        'gm_mask', 'wm_mask', 'csf_mask', 'functional_to_standard', 'output'
    ],
                                   output_names=['carpet_plot'],
                                   function=gen_carpet_plt,
                                   as_module=True),
                          name='carpet_plot')

    carpet_plot.inputs.output = output_image
    wf.connect(gm_mask, 'out_file', carpet_plot, 'gm_mask')
    wf.connect(wm_mask, 'out_file', carpet_plot, 'wm_mask')
    wf.connect(csf_mask, 'out_file', carpet_plot, 'csf_mask')
    wf.connect(input_node, 'functional_to_standard', carpet_plot,
               'functional_to_standard')
    wf.connect(carpet_plot, 'carpet_plot', output_node, 'carpet_plot')

    return wf
Exemple #17
0
def create_extract_pipe(params_template, params={}, name="extract_pipe"):
    """
    Description: Extract T1 brain using AtlasBrex

    Inputs:

        inputnode:
            restore_T1: preprocessed (debiased/denoised) T1 file name

            restore_T1: preprocessed (debiased/denoised)T2 file name

        arguments:
            params_template: dictionary of info about template

            params: dictionary of node sub-parameters (from a json file)

            name: pipeline name (default = "extract_pipe")

    Outputs:

        smooth_mask.out_file:
            Computed mask (after some smoothing)

    """

    # creating pipeline
    extract_pipe = pe.Workflow(name=name)

    # creating inputnode
    inputnode = pe.Node(
        niu.IdentityInterface(fields=['restore_T1', 'restore_T2',
                                      "indiv_params"]),
        name='inputnode')

    # atlas_brex
    atlas_brex = NodeParams(AtlasBREX(),
                            params=parse_key(params, "atlas_brex"),
                            name='atlas_brex')

    extract_pipe.connect(inputnode, "restore_T1",
                         atlas_brex, 't1_restored_file')

    atlas_brex.inputs.NMT_file = params_template["template_head"]
    atlas_brex.inputs.NMT_SS_file = params_template["template_brain"]

    extract_pipe.connect(
            inputnode, ("indiv_params", parse_key, "atlas_brex"),
            atlas_brex, 'indiv_params')

    # mask_brex
    mask_brex = pe.Node(fsl.UnaryMaths(), name='mask_brex')
    mask_brex.inputs.operation = 'bin'

    extract_pipe.connect(atlas_brex, 'brain_file', mask_brex, 'in_file')

    # smooth_mask
    smooth_mask = pe.Node(fsl.UnaryMaths(), name='smooth_mask')
    smooth_mask.inputs.operation = "bin"
    smooth_mask.inputs.args = "-s 1 -thr 0.5 -bin"

    extract_pipe.connect(mask_brex, 'out_file', smooth_mask, 'in_file')

    # mult_T1
    mult_T1 = pe.Node(afni.Calc(), name='mult_T1')
    mult_T1.inputs.expr = "a*b"
    mult_T1.inputs.outputtype = 'NIFTI_GZ'

    extract_pipe.connect(inputnode, "restore_T1", mult_T1, 'in_file_a')
    extract_pipe.connect(smooth_mask, 'out_file', mult_T1, 'in_file_b')

    # mult_T2
    mult_T2 = pe.Node(afni.Calc(), name='mult_T2')
    mult_T2.inputs.expr = "a*b"
    mult_T2.inputs.outputtype = 'NIFTI_GZ'

    extract_pipe.connect(inputnode, 'restore_T2', mult_T2, 'in_file_a')
    extract_pipe.connect(smooth_mask, 'out_file', mult_T2, 'in_file_b')
    return extract_pipe
Exemple #18
0
def afni_brain_connector(wf, cfg, strat_pool, pipe_num, opt):
    # Skull-stripping using AFNI 3dSkullStrip
    inputnode_afni = pe.Node(util.IdentityInterface(fields=[
        'mask_vol', 'shrink_factor', 'var_shrink_fac', 'shrink_fac_bot_lim',
        'avoid_vent', 'niter', 'pushout', 'touchup', 'fill_hole', 'NN_smooth',
        'smooth_final', 'avoid_eyes', 'use_edge', 'exp_frac', 'push_to_edge',
        'use_skull', 'perc_int', 'max_inter_iter', 'blur_fwhm', 'fac', 'monkey'
    ]),
                             name=f'AFNI_options_{pipe_num}')

    skullstrip_args = pe.Node(util.Function(
        input_names=[
            'spat_norm', 'spat_norm_dxyz', 'mask_vol', 'shrink_fac',
            'var_shrink_fac', 'shrink_fac_bot_lim', 'avoid_vent', 'niter',
            'pushout', 'touchup', 'fill_hole', 'NN_smooth', 'smooth_final',
            'avoid_eyes', 'use_edge', 'exp_frac', 'push_to_edge', 'use_skull',
            'perc_int', 'max_inter_iter', 'blur_fwhm', 'fac', 'monkey'
        ],
        output_names=['expr'],
        function=create_3dskullstrip_arg_string),
                              name=f'anat_skullstrip_args_{pipe_num}')

    inputnode_afni.inputs.set(
        mask_vol=cfg.anatomical_preproc['brain_extraction']
        ['AFNI-3dSkullStrip']['mask_vol'],
        shrink_factor=cfg.anatomical_preproc['brain_extraction']
        ['AFNI-3dSkullStrip']['shrink_factor'],
        var_shrink_fac=cfg.anatomical_preproc['brain_extraction']
        ['AFNI-3dSkullStrip']['var_shrink_fac'],
        shrink_fac_bot_lim=cfg.anatomical_preproc['brain_extraction']
        ['AFNI-3dSkullStrip']['shrink_factor_bot_lim'],
        avoid_vent=cfg.anatomical_preproc['brain_extraction']
        ['AFNI-3dSkullStrip']['avoid_vent'],
        niter=cfg.anatomical_preproc['brain_extraction']['AFNI-3dSkullStrip']
        ['n_iterations'],
        pushout=cfg.anatomical_preproc['brain_extraction']['AFNI-3dSkullStrip']
        ['pushout'],
        touchup=cfg.anatomical_preproc['brain_extraction']['AFNI-3dSkullStrip']
        ['touchup'],
        fill_hole=cfg.anatomical_preproc['brain_extraction']
        ['AFNI-3dSkullStrip']['fill_hole'],
        NN_smooth=cfg.anatomical_preproc['brain_extraction']
        ['AFNI-3dSkullStrip']['NN_smooth'],
        smooth_final=cfg.anatomical_preproc['brain_extraction']
        ['AFNI-3dSkullStrip']['smooth_final'],
        avoid_eyes=cfg.anatomical_preproc['brain_extraction']
        ['AFNI-3dSkullStrip']['avoid_eyes'],
        use_edge=cfg.anatomical_preproc['brain_extraction']
        ['AFNI-3dSkullStrip']['use_edge'],
        exp_frac=cfg.anatomical_preproc['brain_extraction']
        ['AFNI-3dSkullStrip']['exp_frac'],
        push_to_edge=cfg.anatomical_preproc['brain_extraction']
        ['AFNI-3dSkullStrip']['push_to_edge'],
        use_skull=cfg.anatomical_preproc['brain_extraction']
        ['AFNI-3dSkullStrip']['use_skull'],
        perc_int=cfg.anatomical_preproc['brain_extraction']
        ['AFNI-3dSkullStrip']['perc_int'],
        max_inter_iter=cfg.anatomical_preproc['brain_extraction']
        ['AFNI-3dSkullStrip']['max_inter_iter'],
        fac=cfg.anatomical_preproc['brain_extraction']['AFNI-3dSkullStrip']
        ['fac'],
        blur_fwhm=cfg.anatomical_preproc['brain_extraction']
        ['AFNI-3dSkullStrip']['blur_fwhm'],
        monkey=cfg.anatomical_preproc['brain_extraction']['AFNI-3dSkullStrip']
        ['monkey'],
    )

    wf.connect([(inputnode_afni, skullstrip_args,
                 [('mask_vol', 'mask_vol'), ('shrink_factor', 'shrink_fac'),
                  ('var_shrink_fac', 'var_shrink_fac'),
                  ('shrink_fac_bot_lim', 'shrink_fac_bot_lim'),
                  ('avoid_vent', 'avoid_vent'), ('niter', 'niter'),
                  ('pushout', 'pushout'), ('touchup', 'touchup'),
                  ('fill_hole', 'fill_hole'), ('avoid_eyes', 'avoid_eyes'),
                  ('use_edge', 'use_edge'), ('exp_frac', 'exp_frac'),
                  ('NN_smooth', 'NN_smooth'), ('smooth_final', 'smooth_final'),
                  ('push_to_edge', 'push_to_edge'), ('use_skull', 'use_skull'),
                  ('perc_int', 'perc_int'),
                  ('max_inter_iter', 'max_inter_iter'),
                  ('blur_fwhm', 'blur_fwhm'), ('fac', 'fac'),
                  ('monkey', 'monkey')])])

    anat_skullstrip = pe.Node(interface=afni.SkullStrip(),
                              name=f'anat_skullstrip_{pipe_num}')
    anat_skullstrip.inputs.outputtype = 'NIFTI_GZ'

    node, out = strat_pool.get_data(
        ['desc-preproc_T1w', 'desc-reorient_T1w', 'T1w'])
    wf.connect(node, out, anat_skullstrip, 'in_file')
    wf.connect(skullstrip_args, 'expr', anat_skullstrip, 'args')

    # Generate anatomical brain mask
    anat_brain_mask = pe.Node(interface=afni.Calc(),
                              name=f'anat_brain_mask_{pipe_num}')

    anat_brain_mask.inputs.expr = 'step(a)'
    anat_brain_mask.inputs.outputtype = 'NIFTI_GZ'

    wf.connect(anat_skullstrip, 'out_file', anat_brain_mask, 'in_file_a')

    outputs = {'space-T1w_desc-brain_mask': (anat_brain_mask, 'out_file')}

    return (wf, outputs)
Exemple #19
0
def _create_split_hemi_pipe(params, params_template, name="split_hemi_pipe"):
    """Description: Split segmentated tissus according hemisheres after \
    removal of cortical structure

    Processing steps:

    - TODO

    Params:

        - None so far

    Inputs:

        inputnode:

            warpinv_file:
                non-linear transformation (from NMT_subject_align)

            inv_transfo_file:
                inverse transformation

            aff_file:
                affine transformation file

            t1_ref_file:
                preprocessd T1

            segmented_file:
                from atropos segmentation, with all the tissues segmented

        arguments:

            params:
                dictionary of node sub-parameters (from a json file)

            name:
                pipeline name (default = "split_hemi_pipe")

    Outputs:
    """
    split_hemi_pipe = pe.Workflow(name=name)

    # creating inputnode
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'warpinv_file', 'inv_transfo_file', 'aff_file', 't1_ref_file',
        'segmented_file'
    ]),
                        name='inputnode')

    # get values

    if "cereb_template" in params_template.keys():
        cereb_template_file = params_template["cereb_template"]

        # ### cereb
        # Binarize cerebellum
        bin_cereb = pe.Node(interface=fsl.UnaryMaths(), name='bin_cereb')
        bin_cereb.inputs.operation = "bin"

        bin_cereb.inputs.in_file = cereb_template_file

        # Warp cereb brainmask to subject space
        warp_cereb = pe.Node(interface=reg.NwarpApplyPriors(),
                             name='warp_cereb')

        warp_cereb.inputs.in_file = cereb_template_file
        warp_cereb.inputs.out_file = cereb_template_file
        warp_cereb.inputs.interp = "NN"
        warp_cereb.inputs.args = "-overwrite"

        split_hemi_pipe.connect(bin_cereb, 'out_file', warp_cereb, 'in_file')
        split_hemi_pipe.connect(inputnode, 'aff_file', warp_cereb, 'master')
        split_hemi_pipe.connect(inputnode, 'warpinv_file', warp_cereb, "warp")

        # Align cereb template
        align_cereb = pe.Node(interface=afni.Allineate(), name='align_cereb')

        align_cereb.inputs.final_interpolation = "nearestneighbour"
        align_cereb.inputs.overwrite = True
        align_cereb.inputs.outputtype = "NIFTI_GZ"

        split_hemi_pipe.connect(warp_cereb, 'out_file', align_cereb,
                                "in_file")  # -source
        split_hemi_pipe.connect(inputnode, 't1_ref_file', align_cereb,
                                "reference")  # -base
        split_hemi_pipe.connect(inputnode, 'inv_transfo_file', align_cereb,
                                "in_matrix")  # -1Dmatrix_apply

    if "L_hemi_template" in params_template.keys() and \
            "R_hemi_template" in params_template.keys():

        L_hemi_template_file = params_template["L_hemi_template"]
        R_hemi_template_file = params_template["R_hemi_template"]

        # Warp L hemi template brainmask to subject space
        warp_L_hemi = pe.Node(interface=reg.NwarpApplyPriors(),
                              name='warp_L_hemi')

        warp_L_hemi.inputs.in_file = L_hemi_template_file
        warp_L_hemi.inputs.out_file = L_hemi_template_file
        warp_L_hemi.inputs.interp = "NN"
        warp_L_hemi.inputs.args = "-overwrite"

        split_hemi_pipe.connect(inputnode, 'aff_file', warp_L_hemi, 'master')
        split_hemi_pipe.connect(inputnode, 'warpinv_file', warp_L_hemi, "warp")

        # Align L hemi template
        align_L_hemi = pe.Node(interface=afni.Allineate(), name='align_L_hemi')

        align_L_hemi.inputs.final_interpolation = "nearestneighbour"
        align_L_hemi.inputs.overwrite = True
        align_L_hemi.inputs.outputtype = "NIFTI_GZ"

        split_hemi_pipe.connect(warp_L_hemi, 'out_file', align_L_hemi,
                                "in_file")  # -source
        split_hemi_pipe.connect(inputnode, 't1_ref_file', align_L_hemi,
                                "reference")  # -base
        split_hemi_pipe.connect(inputnode, 'inv_transfo_file', align_L_hemi,
                                "in_matrix")  # -1Dmatrix_apply

        # Warp R hemi template brainmask to subject space
        warp_R_hemi = pe.Node(interface=reg.NwarpApplyPriors(),
                              name='warp_R_hemi')

        warp_R_hemi.inputs.in_file = R_hemi_template_file
        warp_R_hemi.inputs.out_file = R_hemi_template_file
        warp_R_hemi.inputs.interp = "NN"
        warp_R_hemi.inputs.args = "-overwrite"

        split_hemi_pipe.connect(inputnode, 'aff_file', warp_R_hemi, 'master')
        split_hemi_pipe.connect(inputnode, 'warpinv_file', warp_R_hemi, "warp")

        # Align R hemi template
        align_R_hemi = pe.Node(interface=afni.Allineate(), name='align_R_hemi')

        align_R_hemi.inputs.final_interpolation = "nearestneighbour"
        align_R_hemi.inputs.overwrite = True
        align_R_hemi.inputs.outputtype = "NIFTI_GZ"

        split_hemi_pipe.connect(warp_R_hemi, 'out_file', align_R_hemi,
                                "in_file")  # -source
        split_hemi_pipe.connect(inputnode, 't1_ref_file', align_R_hemi,
                                "reference")  # -base
        split_hemi_pipe.connect(inputnode, 'inv_transfo_file', align_R_hemi,
                                "in_matrix")  # -1Dmatrix_apply

    elif "LR_hemi_template" in params_template.keys():

        LR_hemi_template_file = params_template["LR_hemi_template"]

        # Warp LR hemi template brainmask to subject space
        warp_LR_hemi = pe.Node(interface=reg.NwarpApplyPriors(),
                               name='warp_LR_hemi')

        warp_LR_hemi.inputs.in_file = LR_hemi_template_file
        warp_LR_hemi.inputs.out_file = LR_hemi_template_file
        warp_LR_hemi.inputs.interp = "NN"
        warp_LR_hemi.inputs.args = "-overwrite"

        split_hemi_pipe.connect(inputnode, 'aff_file', warp_LR_hemi, 'master')
        split_hemi_pipe.connect(inputnode, 'warpinv_file', warp_LR_hemi,
                                "warp")

        # Align LR hemi template
        align_LR_hemi = pe.Node(interface=afni.Allineate(),
                                name='align_LR_hemi')

        align_LR_hemi.inputs.final_interpolation = "nearestneighbour"
        align_LR_hemi.inputs.overwrite = True
        align_LR_hemi.inputs.outputtype = "NIFTI_GZ"

        split_hemi_pipe.connect(warp_LR_hemi, 'out_file', align_LR_hemi,
                                "in_file")  # -source
        split_hemi_pipe.connect(inputnode, 't1_ref_file', align_LR_hemi,
                                "reference")  # -base
        split_hemi_pipe.connect(inputnode, 'inv_transfo_file', align_LR_hemi,
                                "in_matrix")  # -1Dmatrix_apply

        split_LR = pe.Node(interface=niu.Function(
            input_names=["LR_mask_file"],
            output_names=["L_mask_file", "R_mask_file"],
            function=split_LR_mask),
                           name="split_LR")

        split_hemi_pipe.connect(align_LR_hemi, "out_file", split_LR,
                                'LR_mask_file')

    else:
        print("Error, could not find LR_hemi_template or L_hemi_template and \
            R_hemi_template, skipping")
        print(params_template.keys())

        exit()

    # Using LH and RH masks to obtain hemisphere segmentation masks
    calc_L_hemi = pe.Node(interface=afni.Calc(), name='calc_L_hemi')
    calc_L_hemi.inputs.expr = 'a*b/b'
    calc_L_hemi.inputs.outputtype = 'NIFTI_GZ'

    split_hemi_pipe.connect(inputnode, 'segmented_file', calc_L_hemi,
                            "in_file_a")

    if "LR_hemi_template" in params_template.keys():
        split_hemi_pipe.connect(split_LR, 'L_mask_file', calc_L_hemi,
                                "in_file_b")
    else:
        split_hemi_pipe.connect(align_L_hemi, 'out_file', calc_L_hemi,
                                "in_file_b")

    # R_hemi
    calc_R_hemi = pe.Node(interface=afni.Calc(), name='calc_R_hemi')
    calc_R_hemi.inputs.expr = 'a*b/b'
    calc_R_hemi.inputs.outputtype = 'NIFTI_GZ'

    split_hemi_pipe.connect(inputnode, 'segmented_file', calc_R_hemi,
                            "in_file_a")

    if "LR_hemi_template" in params_template.keys():

        split_hemi_pipe.connect(split_LR, 'R_mask_file', calc_R_hemi,
                                "in_file_b")
    else:
        split_hemi_pipe.connect(align_R_hemi, 'out_file', calc_R_hemi,
                                "in_file_b")

    # remove cerebellum from left and right brain segmentations
    calc_nocb_L_hemi = pe.Node(interface=afni.Calc(), name='calc_nocb_L_hemi')
    calc_nocb_L_hemi.inputs.expr = '(a*(not (b)))'
    calc_nocb_L_hemi.inputs.outputtype = 'NIFTI_GZ'

    split_hemi_pipe.connect(calc_L_hemi, 'out_file', calc_nocb_L_hemi,
                            "in_file_a")
    split_hemi_pipe.connect(align_cereb, 'out_file', calc_nocb_L_hemi,
                            "in_file_b")

    calc_nocb_R_hemi = pe.Node(interface=afni.Calc(), name='calc_nocb_R_hemi')
    calc_nocb_R_hemi.inputs.expr = '(a*(not (b)))'
    calc_nocb_R_hemi.inputs.outputtype = 'NIFTI_GZ'

    split_hemi_pipe.connect(calc_R_hemi, 'out_file', calc_nocb_R_hemi,
                            "in_file_a")
    split_hemi_pipe.connect(align_cereb, 'out_file', calc_nocb_R_hemi,
                            "in_file_b")

    # create L/R GM and WM no-cerebellum masks from subject brain segmentation
    calc_GM_nocb_L_hemi = pe.Node(interface=afni.Calc(),
                                  name='calc_GM_nocb_L_hemi')
    calc_GM_nocb_L_hemi.inputs.expr = 'iszero(a-2)'
    calc_GM_nocb_L_hemi.inputs.outputtype = 'NIFTI_GZ'

    split_hemi_pipe.connect(calc_nocb_L_hemi, 'out_file', calc_GM_nocb_L_hemi,
                            "in_file_a")

    calc_WM_nocb_L_hemi = pe.Node(interface=afni.Calc(),
                                  name='calc_WM_nocb_L_hemi')
    calc_WM_nocb_L_hemi.inputs.expr = 'iszero(a-3)'
    calc_WM_nocb_L_hemi.inputs.outputtype = 'NIFTI_GZ'

    split_hemi_pipe.connect(calc_nocb_L_hemi, 'out_file', calc_WM_nocb_L_hemi,
                            "in_file_a")

    calc_GM_nocb_R_hemi = pe.Node(interface=afni.Calc(),
                                  name='calc_GM_nocb_R_hemi')
    calc_GM_nocb_R_hemi.inputs.expr = 'iszero(a-2)'
    calc_GM_nocb_R_hemi.inputs.outputtype = 'NIFTI_GZ'

    split_hemi_pipe.connect(calc_nocb_R_hemi, 'out_file', calc_GM_nocb_R_hemi,
                            "in_file_a")

    calc_WM_nocb_R_hemi = pe.Node(interface=afni.Calc(),
                                  name='calc_WM_nocb_R_hemi')
    calc_WM_nocb_R_hemi.inputs.expr = 'iszero(a-3)'
    calc_WM_nocb_R_hemi.inputs.outputtype = 'NIFTI_GZ'

    split_hemi_pipe.connect(calc_nocb_R_hemi, 'out_file', calc_WM_nocb_R_hemi,
                            "in_file_a")

    # Extract Cerebellum using template mask transformed to subject space
    extract_cereb = pe.Node(interface=afni.Calc(), name='extract_cereb')
    extract_cereb.inputs.expr = 'a*b/b'
    extract_cereb.inputs.outputtype = 'NIFTI_GZ'

    split_hemi_pipe.connect(inputnode, 't1_ref_file', extract_cereb,
                            "in_file_a")
    split_hemi_pipe.connect(align_cereb, 'out_file', extract_cereb,
                            "in_file_b")

    # Extract L.GM using template mask transformed to subject space
    extract_L_GM = pe.Node(interface=afni.Calc(), name='extract_L_GM')
    extract_L_GM.inputs.expr = 'a*b/b'
    extract_L_GM.inputs.outputtype = 'NIFTI_GZ'

    split_hemi_pipe.connect(inputnode, 't1_ref_file', extract_L_GM,
                            "in_file_a")
    split_hemi_pipe.connect(calc_GM_nocb_L_hemi, 'out_file', extract_L_GM,
                            "in_file_b")

    # Extract L.WM using template mask transformed to subject space
    extract_L_WM = pe.Node(interface=afni.Calc(), name='extract_L_WM')
    extract_L_WM.inputs.expr = 'a*b/b'
    extract_L_WM.inputs.outputtype = 'NIFTI_GZ'

    split_hemi_pipe.connect(inputnode, 't1_ref_file', extract_L_WM,
                            "in_file_a")
    split_hemi_pipe.connect(calc_WM_nocb_L_hemi, 'out_file', extract_L_WM,
                            "in_file_b")

    # Extract L.GM using template mask transformed to subject space
    extract_R_GM = pe.Node(interface=afni.Calc(), name='extract_R_GM')
    extract_R_GM.inputs.expr = 'a*b/b'
    extract_R_GM.inputs.outputtype = 'NIFTI_GZ'

    split_hemi_pipe.connect(inputnode, 't1_ref_file', extract_R_GM,
                            "in_file_a")
    split_hemi_pipe.connect(calc_GM_nocb_R_hemi, 'out_file', extract_R_GM,
                            "in_file_b")

    # Extract L.WM using template mask transformed to subject space
    extract_R_WM = pe.Node(interface=afni.Calc(), name='extract_R_WM')
    extract_R_WM.inputs.expr = 'a*b/b'
    extract_R_WM.inputs.outputtype = 'NIFTI_GZ'

    split_hemi_pipe.connect(inputnode, 't1_ref_file', extract_R_WM,
                            "in_file_a")
    split_hemi_pipe.connect(calc_WM_nocb_R_hemi, 'out_file', extract_R_WM,
                            "in_file_b")

    return split_hemi_pipe
def main(args=None):

    args = arg_parser().parse_args(args)
    FLAIR = args.FLAIR
    MPRAGE = args.T1
    
    prefix=args.prefix + '.'

    if args.mask is None:
        args.temp_mask = os.path.abspath (args.temp_mask)
        args.brain_template = os.path.abspath(args.brain_template)
        args.temp_prob = os.path.abspath(args.temp_prob)
        if not os.path.isfile(args.temp_mask):
            raise Exception("template mask not foud")
        if not os.path.isfile(args.brain_template):
            raise Exception("brain template mask not foud")
        if not os.path.isfile(args.temp_prob):
            raise Exception("template probability mask not foud")
    elif not os.path.isfile(args.mask):
            raise Exception("T1 mask file not foud")

    if not os.path.isfile(MPRAGE):
        raise Exception("Input T1 file not found")
    if not os.path.isfile(FLAIR):
        raise Exception("Input FLAIR file not found")

    if args.outfolder is not None:
        abs_out = os.path.abspath(args.outfolder)
        #print(abs_out)
        if not os.path.exists(abs_out):
            #if selecting a new folder copy the files (not sure how to specify different folder under nipype when it runs sh scripts from ants)
            os.mkdir(abs_out)
        copyfile(os.path.abspath(MPRAGE),os.path.join(abs_out,os.path.basename(MPRAGE)))
        copyfile(os.path.abspath(FLAIR),os.path.join(abs_out,os.path.basename(FLAIR)))
        if args.mask is not None:
            if os.path.isfile(args.mask):
                copyfile(os.path.abspath(args.mask),os.path.join(abs_out, prefix + 'MPRAGE.mask.nii.gz'))
        os.chdir(args.outfolder)
    elif args.mask is not None:
        copyfile(os.path.abspath(args.mask),os.path.join(os.path.abspath(args.mask), prefix + 'MPRAGE.mask.nii.gz'))

    if args.mask is None:
        # T1 brain extraction
        brainextraction = BrainExtraction()
        brainextraction.inputs.dimension = 3
        brainextraction.inputs.anatomical_image = MPRAGE
        brainextraction.inputs.brain_template = args.brain_template
        brainextraction.inputs.brain_probability_mask = args.temp_prob
        brainextraction.inputs.extraction_registration_mask= args.temp_mask
        brainextraction.inputs.debug=True
        print("brain extraction")
        print(' ')
        print(brainextraction.cmdline)
        print('-'*30)
        brainextraction.run()
        os.rename('highres001_BrainExtractionMask.nii.gz',prefix +'MPRAGE.mask.nii.gz')
        os.rename('highres001_BrainExtractionBrain.nii.gz',prefix +'MPRAGE.brain.nii.gz')
        os.remove('highres001_BrainExtractionPrior0GenericAffine.mat')
        os.rmdir('highres001_')

    #two step registration with ants (step1)

    reg = Registration()
    reg.inputs.fixed_image = FLAIR
    reg.inputs.moving_image = MPRAGE
    reg.inputs.output_transform_prefix = "output_"
    reg.inputs.output_warped_image = prefix + 'output_warped_image.nii.gz'
    reg.inputs.dimension = 3
    reg.inputs.transforms = ['Rigid']
    reg.inputs.transform_parameters = [[0.1]]
    reg.inputs.radius_or_number_of_bins = [32]
    reg.inputs.metric = ['MI']
    reg.inputs.sampling_percentage = [0.1]
    reg.inputs.sampling_strategy = ['Regular']
    reg.inputs.shrink_factors = [[4,3,2,1]]
    reg.inputs.smoothing_sigmas = [[3,2,1,0]]
    reg.inputs.sigma_units = ['vox']
    reg.inputs.use_histogram_matching = [False]
    reg.inputs.number_of_iterations = [[1000,500,250,100]]
    reg.inputs.winsorize_lower_quantile = 0.025
    reg.inputs.winsorize_upper_quantile = 0.975
    print("first pass registration")
    print(' ')
    print(reg.cmdline)
    print('-'*30)
    reg.run()

    os.rename('output_0GenericAffine.mat',prefix + 'MPRAGE_to_FLAIR.firstpass.mat')

    #apply tranform MPRAGE mask to FLAIR

    at = ApplyTransforms()
    at.inputs.dimension = 3
    at.inputs.input_image = prefix + 'MPRAGE.mask.nii.gz'
    at.inputs.reference_image = FLAIR
    at.inputs.output_image = prefix + 'FLAIR.mask.nii.gz'
    at.inputs.interpolation = 'MultiLabel'
    at.inputs.default_value = 0
    at.inputs.transforms = [ prefix + 'MPRAGE_to_FLAIR.firstpass.mat']
    at.inputs.invert_transform_flags = [False]
    print("apply stranform to T1 maks")
    print(' ')
    print(at.cmdline)
    print('-'*30)    
    at.run()

    # bias correct FLAIR and MPRAGE

    n4m = N4BiasFieldCorrection()
    n4m.inputs.dimension = 3
    n4m.inputs.input_image = MPRAGE
    n4m.inputs.mask_image = prefix + 'MPRAGE.mask.nii.gz'
    n4m.inputs.bspline_fitting_distance = 300
    n4m.inputs.shrink_factor = 3
    n4m.inputs.n_iterations = [50,50,30,20]
    n4m.inputs.output_image = prefix + 'MPRAGE.N4.nii.gz'
    print("bias correcting T1")
    print(' ')
    print(n4m.cmdline)
    print('-'*30)
    n4m.run()

    n4f = copy.deepcopy(n4m)
    n4f.inputs.input_image = FLAIR
    n4f.inputs.mask_image = prefix + 'FLAIR.mask.nii.gz'
    n4f.inputs.output_image = prefix + 'FLAIR.N4.nii.gz'
    print("bias correcting FLAIR")
    print(' ')
    print(n4f.cmdline)
    print('-'*30)
    n4f.run()

    # mask bias corrected FLAIR and MPRAGE

    calc = afni.Calc()
    calc.inputs.in_file_a = prefix + 'FLAIR.N4.nii.gz'
    calc.inputs.in_file_b = prefix + 'FLAIR.mask.nii.gz'
    calc.inputs.expr='a*b'
    calc.inputs.out_file = prefix +  'FLAIR.N4.masked.nii.gz'
    calc.inputs.outputtype = 'NIFTI'
    calc.inputs.overwrite = True
    calc.run()

    calc1= copy.deepcopy(calc)
    calc1.inputs.in_file_a = prefix + 'MPRAGE.N4.nii.gz'
    calc1.inputs.in_file_b = prefix + 'MPRAGE.mask.nii.gz'
    calc1.inputs.out_file = prefix +  'MPRAGE.N4.masked.nii.gz'
    calc1.inputs.overwrite = True
    calc1.run()

    #register bias corrected

    reg1 = copy.deepcopy(reg)
    reg1.inputs.output_transform_prefix = "output_"
    reg1.inputs.output_warped_image = prefix + 'output_warped_image.nii.gz'
    reg1.inputs.initial_moving_transform = prefix +'MPRAGE_to_FLAIR.firstpass.mat'
    print("second pass registration")
    print(' ')
    print(reg1.cmdline)
    print('-'*30)
    reg1.run()
    os.rename('output_0GenericAffine.mat',prefix +'MPRAGE_to_FLAIR.secondpass.mat')
    
    
    #generate final mask in FLAIR space

    atf = ApplyTransforms()
    atf.inputs.dimension = 3
    atf.inputs.input_image = prefix + 'MPRAGE.N4.nii.gz'
    atf.inputs.reference_image = FLAIR
    atf.inputs.output_image = prefix + 'MPRAGE.N4.toFLAIR.nii.gz'
    atf.inputs.interpolation = 'BSpline'
    atf.inputs.interpolation_parameters = (3,)
    atf.inputs.default_value = 0
    atf.inputs.transforms = [prefix +  'MPRAGE_to_FLAIR.secondpass.mat']
    atf.inputs.invert_transform_flags = [False]
    print("final apply transform")
    print(' ')
    print(atf.cmdline)
    print('-'*30)
    atf.run()


    #cleanup

    os.remove(prefix + 'output_warped_image.nii.gz')

    if args.outfolder is not None:
        os.remove(os.path.join(abs_out,os.path.basename(MPRAGE)))
        os.remove(os.path.join(abs_out,os.path.basename(FLAIR)))
        
    if args.mask is None:
        os.remove(prefix + 'MPRAGE.brain.nii.gz')
        
    if not args.storetemp:
        os.remove(prefix + 'MPRAGE.mask.nii.gz')
        os.remove(prefix + 'MPRAGE_to_FLAIR.firstpass.mat')
        os.remove(prefix + 'FLAIR.N4.masked.nii.gz')
        os.remove(prefix + 'MPRAGE.N4.masked.nii.gz')
        os.remove(prefix + 'MPRAGE.N4.nii.gz')


    return
Exemple #21
0
def hmc_afni(settings, name='fMRI_HMC_afni', st_correct=False, despike=False,
             deoblique=False, start_idx=None, stop_idx=None):
    """
    A :abbr:`HMC (head motion correction)` workflow for
    functional scans

    .. workflow::

      from mriqc.workflows.functional import hmc_afni
      wf = hmc_afni({'biggest_file_size_gb': 1})

    """

    biggest_file_gb = settings.get("biggest_file_size_gb", 1)

    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(niu.IdentityInterface(
        fields=['in_file', 'fd_radius', 'start_idx', 'stop_idx']), name='inputnode')

    outputnode = pe.Node(niu.IdentityInterface(
        fields=['out_file', 'out_fd']), name='outputnode')

    if (start_idx is not None) or (stop_idx is not None):
        drop_trs = pe.Node(afni.Calc(expr='a', outputtype='NIFTI_GZ'),
                           name='drop_trs')
        workflow.connect([
            (inputnode, drop_trs, [('in_file', 'in_file_a'),
                                   ('start_idx', 'start_idx'),
                                   ('stop_idx', 'stop_idx')]),
        ])
    else:
        drop_trs = pe.Node(niu.IdentityInterface(fields=['out_file']),
                           name='drop_trs')
        workflow.connect([
            (inputnode, drop_trs, [('in_file', 'out_file')]),
        ])

    gen_ref = pe.Node(nwr.EstimateReferenceImage(mc_method="AFNI"), name="gen_ref")

    # calculate hmc parameters
    hmc = pe.Node(
        afni.Volreg(args='-Fourier -twopass', zpad=4, outputtype='NIFTI_GZ'),
        name='motion_correct', mem_gb=biggest_file_gb * 2.5)

    # Compute the frame-wise displacement
    fdnode = pe.Node(nac.FramewiseDisplacement(normalize=False,
                                               parameter_source="AFNI"),
                     name='ComputeFD')

    workflow.connect([
        (inputnode, fdnode, [('fd_radius', 'radius')]),
        (gen_ref, hmc, [('ref_image', 'basefile')]),
        (hmc, outputnode, [('out_file', 'out_file')]),
        (hmc, fdnode, [('oned_file', 'in_file')]),
        (fdnode, outputnode, [('out_file', 'out_fd')]),
    ])

    # Slice timing correction, despiking, and deoblique

    st_corr = pe.Node(afni.TShift(outputtype='NIFTI_GZ'), name='TimeShifts')

    deoblique_node = pe.Node(afni.Refit(deoblique=True), name='deoblique')

    despike_node = pe.Node(afni.Despike(outputtype='NIFTI_GZ'), name='despike')

    if st_correct and despike and deoblique:

        workflow.connect([
            (drop_trs, st_corr, [('out_file', 'in_file')]),
            (st_corr, despike_node, [('out_file', 'in_file')]),
            (despike_node, deoblique_node, [('out_file', 'in_file')]),
            (deoblique_node, gen_ref, [('out_file', 'in_file')]),
            (deoblique_node, hmc, [('out_file', 'in_file')]),
        ])

    elif st_correct and despike:

        workflow.connect([
            (drop_trs, st_corr, [('out_file', 'in_file')]),
            (st_corr, despike_node, [('out_file', 'in_file')]),
            (despike_node, gen_ref, [('out_file', 'in_file')]),
            (despike_node, hmc, [('out_file', 'in_file')]),
        ])

    elif st_correct and deoblique:

        workflow.connect([
            (drop_trs, st_corr, [('out_file', 'in_file')]),
            (st_corr, deoblique_node, [('out_file', 'in_file')]),
            (deoblique_node, gen_ref, [('out_file', 'in_file')]),
            (deoblique_node, hmc, [('out_file', 'in_file')]),
        ])

    elif st_correct:

        workflow.connect([
            (drop_trs, st_corr, [('out_file', 'in_file')]),
            (st_corr, gen_ref, [('out_file', 'in_file')]),
            (st_corr, hmc, [('out_file', 'in_file')]),
        ])

    elif despike and deoblique:

        workflow.connect([
            (drop_trs, despike_node, [('out_file', 'in_file')]),
            (despike_node, deoblique_node, [('out_file', 'in_file')]),
            (deoblique_node, gen_ref, [('out_file', 'in_file')]),
            (deoblique_node, hmc, [('out_file', 'in_file')]),
        ])

    elif despike:

        workflow.connect([
            (drop_trs, despike_node, [('out_file', 'in_file')]),
            (despike_node, gen_ref, [('out_file', 'in_file')]),
            (despike_node, hmc, [('out_file', 'in_file')]),
        ])

    elif deoblique:

        workflow.connect([
            (drop_trs, deoblique_node, [('out_file', 'in_file')]),
            (deoblique_node, gen_ref, [('out_file', 'in_file')]),
            (deoblique_node, hmc, [('out_file', 'in_file')]),
        ])

    else:
        workflow.connect([
            (drop_trs, gen_ref, [('out_file', 'in_file')]),
            (drop_trs, hmc, [('out_file', 'in_file')]),
        ])

    return workflow
Exemple #22
0
def temporal_variance_mask(threshold, by_slice=False, erosion=False, degree=1):

    threshold_method = "VAR"

    if isinstance(threshold, str):
        regex_match = {
            "SD": r"([0-9]+(\.[0-9]+)?)\s*SD",
            "PCT": r"([0-9]+(\.[0-9]+)?)\s*PCT",
        }

        for method, regex in regex_match.items():
            matched = re.match(regex, threshold)
            if matched:
                threshold_method = method
                threshold_value = matched.groups()[0]

    try:
        threshold_value = float(threshold_value)
    except:
        raise ValueError(
            "Error converting threshold value {0} from {1} to a "
            "floating point number. The threshold value can "
            "contain SD or PCT for selecting a threshold based on "
            "the variance distribution, otherwise it should be a "
            "floating point number.".format(threshold_value, threshold))

    if threshold_value < 0:
        raise ValueError(
            "Threshold value should be positive, instead of {0}.".format(
                threshold_value))

    if threshold_method is "PCT" and threshold_value >= 100.0:
        raise ValueError(
            "Percentile should be less than 100, received {0}.".format(
                threshold_value))

    threshold = threshold_value

    wf = pe.Workflow(name='tcompcor')

    input_node = pe.Node(util.IdentityInterface(
        fields=['functional_file_path', 'mask_file_path']),
                         name='inputspec')
    output_node = pe.Node(util.IdentityInterface(fields=['mask']),
                          name='outputspec')

    # C-PAC default performs linear regression while nipype performs quadratic regression
    detrend = pe.Node(afni.Detrend(args='-polort {0}'.format(degree),
                                   outputtype='NIFTI'),
                      name='detrend')
    wf.connect(input_node, 'functional_file_path', detrend, 'in_file')

    std = pe.Node(afni.TStat(args='-nzstdev', outputtype='NIFTI'), name='std')
    wf.connect(input_node, 'mask_file_path', std, 'mask')
    wf.connect(detrend, 'out_file', std, 'in_file')

    var = pe.Node(afni.Calc(expr='a*a', outputtype='NIFTI'), name='var')
    wf.connect(std, 'out_file', var, 'in_file_a')

    if by_slice:
        slices = pe.Node(fsl.Slice(), name='slicer')
        wf.connect(var, 'out_file', slices, 'in_file')

        mask_slices = pe.Node(fsl.Slice(), name='mask_slicer')
        wf.connect(input_node, 'mask_file_path', mask_slices, 'in_file')

        mapper = pe.MapNode(
            util.IdentityInterface(fields=['out_file', 'mask_file']),
            name='slice_mapper',
            iterfield=['out_file', 'mask_file'])
        wf.connect(slices, 'out_files', mapper, 'out_file')
        wf.connect(mask_slices, 'out_files', mapper, 'mask_file')

    else:
        mapper_list = pe.Node(util.Merge(1), name='slice_mapper_list')
        wf.connect(var, 'out_file', mapper_list, 'in1')

        mask_mapper_list = pe.Node(util.Merge(1),
                                   name='slice_mask_mapper_list')
        wf.connect(input_node, 'mask_file_path', mask_mapper_list, 'in1')

        mapper = pe.Node(
            util.IdentityInterface(fields=['out_file', 'mask_file']),
            name='slice_mapper')
        wf.connect(mapper_list, 'out', mapper, 'out_file')
        wf.connect(mask_mapper_list, 'out', mapper, 'mask_file')

    if threshold_method is "PCT":
        threshold_node = pe.MapNode(Function(
            input_names=['in_file', 'mask', 'threshold_pct'],
            output_names=['threshold'],
            function=compute_pct_threshold,
            as_module=True),
                                    name='threshold_value',
                                    iterfield=['in_file', 'mask'])
        threshold_node.inputs.threshold_pct = threshold_value
        wf.connect(mapper, 'out_file', threshold_node, 'in_file')
        wf.connect(mapper, 'mask_file', threshold_node, 'mask')

    elif threshold_method is "SD":
        threshold_node = pe.MapNode(Function(
            input_names=['in_file', 'mask', 'threshold_sd'],
            output_names=['threshold'],
            function=compute_sd_threshold,
            as_module=True),
                                    name='threshold_value',
                                    iterfield=['in_file', 'mask'])
        threshold_node.inputs.threshold_sd = threshold_value
        wf.connect(mapper, 'out_file', threshold_node, 'in_file')
        wf.connect(mapper, 'mask_file', threshold_node, 'mask')

    else:
        threshold_node = pe.MapNode(Function(
            input_names=['in_file', 'mask', 'threshold'],
            output_names=['threshold'],
            function=compute_threshold,
            as_module=True),
                                    name='threshold_value',
                                    iterfield=['in_file', 'mask'])
        threshold_node.inputs.threshold = threshold_value
        wf.connect(mapper, 'out_file', threshold_node, 'in_file')
        wf.connect(mapper, 'mask_file', threshold_node, 'mask')

    threshold_mask = pe.MapNode(interface=fsl.maths.Threshold(),
                                name='threshold',
                                iterfield=['in_file', 'thresh'])
    threshold_mask.inputs.args = '-bin'
    wf.connect(mapper, 'out_file', threshold_mask, 'in_file')
    wf.connect(threshold_node, 'threshold', threshold_mask, 'thresh')

    merge_slice_masks = pe.Node(interface=fsl.Merge(),
                                name='merge_slice_masks')
    merge_slice_masks.inputs.dimension = 'z'
    wf.connect(threshold_mask, 'out_file', merge_slice_masks, 'in_files')

    wf.connect(merge_slice_masks, 'merged_file', output_node, 'mask')

    return wf
Exemple #23
0
def create_anat_preproc(template_path=None,
                        mask_path=None,
                        regmask_path=None,
                        method='afni',
                        already_skullstripped=False,
                        non_local_means_filtering=True,
                        n4_correction=True,
                        wf_name='anat_preproc'):
    """ 
    The main purpose of this workflow is to process T1 scans. Raw mprage file is deobliqued, reoriented
    into RPI and skullstripped. Also, a whole brain only mask is generated from the skull stripped image
    for later use in registration.

    Returns
    -------
    anat_preproc : workflow
        Anatomical Preprocessing Workflow

    Notes
    -----
    `Source <https://github.com/FCP-INDI/C-PAC/blob/master/CPAC/anat_preproc/anat_preproc.py>`_
    
    Workflow Inputs::
        inputspec.anat : string
            User input anatomical (T1) Image, in any of the 8 orientations
    
    Workflow Outputs::

        outputspec.refit : string
            Path to deobliqued anatomical image
    
        outputspec.reorient : string
            Path to RPI oriented anatomical image
    
        outputspec.skullstrip : string
            Path to skull stripped RPI oriented mprage file with normalized intensities.
    
        outputspec.brain : string
            Path to skull stripped RPI brain image with original intensity values and not normalized or scaled.
    
    Order of commands:
    - Deobliqing the scans. ::
        3drefit -deoblique mprage.nii.gz

    - Re-orienting the Image into Right-to-Left Posterior-to-Anterior Inferior-to-Superior  (RPI) orientation ::
        3dresample -orient RPI
                   -prefix mprage_RPI.nii.gz
                   -inset mprage.nii.gz
                   
    - Skull-Stripping the image ::
        Using AFNI ::
            3dSkullStrip -input mprage_RPI.nii.gz
                         -o_ply mprage_RPI_3dT.nii.gz
        or using BET ::
            bet mprage_RPI.nii.gz

    - The skull-stripping step modifies the intensity values. To get back the original intensity values, we do an element wise product of RPI data with step function of skull-stripped data ::
        3dcalc -a mprage_RPI.nii.gz
               -b mprage_RPI_3dT.nii.gz
               -expr 'a*step(b)'
               -prefix mprage_RPI_3dc.nii.gz
    
    High Level Workflow Graph:
    .. image:: ../images/anatpreproc_graph.dot.png
       :width: 500
    
    Detailed Workflow Graph:
    .. image:: ../images/anatpreproc_graph_detailed.dot.png
       :width: 500

    Examples
    --------
    >>> from CPAC.anat_preproc import create_anat_preproc
    >>> preproc = create_anat_preproc()
    >>> preproc.inputs.inputspec.anat = 'sub1/anat/mprage.nii.gz'
    >>> preproc.run() #doctest: +SKIP
    """

    preproc = pe.Workflow(name=wf_name)

    inputnode = pe.Node(util.IdentityInterface(fields=['anat', 'brain_mask']),
                        name='inputspec')

    outputnode = pe.Node(util.IdentityInterface(
        fields=['refit', 'reorient', 'skullstrip', 'brain', 'brain_mask']),
                         name='outputspec')

    anat_deoblique = pe.Node(interface=afni.Refit(), name='anat_deoblique')

    anat_deoblique.inputs.deoblique = True
    preproc.connect(inputnode, 'anat', anat_deoblique, 'in_file')
    preproc.connect(anat_deoblique, 'out_file', outputnode, 'refit')
    # Disable non_local_means_filtering and n4_correction when run niworkflows-ants
    if method == 'niworkflows-ants':
        non_local_means_filtering = False
        n4_correction = False

    if non_local_means_filtering and n4_correction:
        denoise = pe.Node(interface=ants.DenoiseImage(), name='anat_denoise')
        preproc.connect(anat_deoblique, 'out_file', denoise, 'input_image')
        n4 = pe.Node(interface=ants.N4BiasFieldCorrection(dimension=3,
                                                          shrink_factor=2,
                                                          copy_header=True),
                     name='anat_n4')
        preproc.connect(denoise, 'output_image', n4, 'input_image')
    elif non_local_means_filtering and not n4_correction:
        denoise = pe.Node(interface=ants.DenoiseImage(), name='anat_denoise')
        preproc.connect(anat_deoblique, 'out_file', denoise, 'input_image')
    elif not non_local_means_filtering and n4_correction:
        n4 = pe.Node(interface=ants.N4BiasFieldCorrection(dimension=3,
                                                          shrink_factor=2,
                                                          copy_header=True),
                     name='anat_n4')
        preproc.connect(anat_deoblique, 'out_file', n4, 'input_image')

    # Anatomical reorientation
    anat_reorient = pe.Node(interface=afni.Resample(), name='anat_reorient')

    anat_reorient.inputs.orientation = 'RPI'
    anat_reorient.inputs.outputtype = 'NIFTI_GZ'

    if n4_correction:
        preproc.connect(n4, 'output_image', anat_reorient, 'in_file')
    elif non_local_means_filtering and not n4_correction:
        preproc.connect(denoise, 'output_image', anat_reorient, 'in_file')
    else:
        preproc.connect(anat_deoblique, 'out_file', anat_reorient, 'in_file')

    preproc.connect(anat_reorient, 'out_file', outputnode, 'reorient')

    if already_skullstripped:

        anat_skullstrip = pe.Node(
            interface=util.IdentityInterface(fields=['out_file']),
            name='anat_skullstrip')

        preproc.connect(anat_reorient, 'out_file', anat_skullstrip, 'out_file')

        preproc.connect(anat_skullstrip, 'out_file', outputnode, 'skullstrip')

        preproc.connect(anat_skullstrip, 'out_file', outputnode, 'brain')

    else:

        if method == 'afni':
            # Skull-stripping using AFNI 3dSkullStrip
            inputnode_afni = pe.Node(util.IdentityInterface(fields=[
                'shrink_factor', 'var_shrink_fac', 'shrink_fac_bot_lim',
                'avoid_vent', 'niter', 'pushout', 'touchup', 'fill_hole',
                'avoid_eyes', 'use_edge', 'exp_frac', 'smooth_final',
                'push_to_edge', 'use_skull', 'perc_int', 'max_inter_iter',
                'blur_fwhm', 'fac', 'monkey'
            ]),
                                     name='AFNI_options')

            skullstrip_args = pe.Node(util.Function(
                input_names=[
                    'spat_norm', 'spat_norm_dxyz', 'shrink_fac',
                    'var_shrink_fac', 'shrink_fac_bot_lim', 'avoid_vent',
                    'niter', 'pushout', 'touchup', 'fill_hole', 'avoid_eyes',
                    'use_edge', 'exp_frac', 'smooth_final', 'push_to_edge',
                    'use_skull', 'perc_int', 'max_inter_iter', 'blur_fwhm',
                    'fac', 'monkey'
                ],
                output_names=['expr'],
                function=create_3dskullstrip_arg_string),
                                      name='anat_skullstrip_args')

            preproc.connect([(inputnode_afni, skullstrip_args,
                              [('shrink_factor', 'shrink_fac'),
                               ('var_shrink_fac', 'var_shrink_fac'),
                               ('shrink_fac_bot_lim', 'shrink_fac_bot_lim'),
                               ('avoid_vent', 'avoid_vent'),
                               ('niter', 'niter'), ('pushout', 'pushout'),
                               ('touchup', 'touchup'),
                               ('fill_hole', 'fill_hole'),
                               ('avoid_eyes', 'avoid_eyes'),
                               ('use_edge', 'use_edge'),
                               ('exp_frac', 'exp_frac'),
                               ('smooth_final', 'smooth_final'),
                               ('push_to_edge', 'push_to_edge'),
                               ('use_skull', 'use_skull'),
                               ('perc_int', 'perc_int'),
                               ('max_inter_iter', 'max_inter_iter'),
                               ('blur_fwhm', 'blur_fwhm'), ('fac', 'fac'),
                               ('monkey', 'monkey')])])

            anat_skullstrip = pe.Node(interface=afni.SkullStrip(),
                                      name='anat_skullstrip')

            anat_skullstrip.inputs.outputtype = 'NIFTI_GZ'

            preproc.connect(anat_reorient, 'out_file', anat_skullstrip,
                            'in_file')
            preproc.connect(skullstrip_args, 'expr', anat_skullstrip, 'args')

            preproc.connect(anat_skullstrip, 'out_file', outputnode,
                            'skullstrip')
            # Apply skull-stripping step mask to original volume
            anat_skullstrip_orig_vol = pe.Node(interface=afni.Calc(),
                                               name='anat_skullstrip_orig_vol')

            anat_skullstrip_orig_vol.inputs.expr = 'a*step(b)'
            anat_skullstrip_orig_vol.inputs.outputtype = 'NIFTI_GZ'

            preproc.connect(anat_reorient, 'out_file',
                            anat_skullstrip_orig_vol, 'in_file_a')

            anat_brain_mask = pe.Node(interface=afni.Calc(),
                                      name='anat_brain_mask')

            anat_brain_mask.inputs.expr = 'step(a)'
            anat_brain_mask.inputs.outputtype = 'NIFTI_GZ'

            preproc.connect(anat_skullstrip, 'out_file', anat_brain_mask,
                            'in_file_a')

            preproc.connect(anat_skullstrip, 'out_file',
                            anat_skullstrip_orig_vol, 'in_file_b')

            preproc.connect(anat_brain_mask, 'out_file', outputnode,
                            'brain_mask')

            preproc.connect(anat_skullstrip_orig_vol, 'out_file', outputnode,
                            'brain')

        elif method == 'fsl':
            # Skull-stripping using FSL BET
            inputnode_bet = pe.Node(util.IdentityInterface(fields=[
                'frac', 'mask_boolean', 'mesh_boolean', 'outline', 'padding',
                'radius', 'reduce_bias', 'remove_eyes', 'robust', 'skull',
                'surfaces', 'threshold', 'vertical_gradient'
            ]),
                                    name='BET_options')

            anat_skullstrip = pe.Node(interface=fsl.BET(),
                                      name='anat_skullstrip')

            preproc.connect(anat_reorient, 'out_file', anat_skullstrip,
                            'in_file')

            preproc.connect([(inputnode_bet, anat_skullstrip, [
                ('frac', 'frac'),
                ('mask_boolean', 'mask'),
                ('mesh_boolean', 'mesh'),
                ('outline', 'outline'),
                ('padding', 'padding'),
                ('radius', 'radius'),
                ('reduce_bias', 'reduce_bias'),
                ('remove_eyes', 'remove_eyes'),
                ('robust', 'robust'),
                ('skull', 'skull'),
                ('surfaces', 'surfaces'),
                ('threshold', 'threshold'),
                ('vertical_gradient', 'vertical_gradient'),
            ])])

            preproc.connect(anat_skullstrip, 'out_file', outputnode,
                            'skullstrip')

            # Apply skull-stripping step mask to original volume
            anat_skullstrip_orig_vol = pe.Node(interface=afni.Calc(),
                                               name='anat_skullstrip_orig_vol')

            anat_skullstrip_orig_vol.inputs.expr = 'a*step(b)'
            anat_skullstrip_orig_vol.inputs.outputtype = 'NIFTI_GZ'

            preproc.connect(anat_reorient, 'out_file',
                            anat_skullstrip_orig_vol, 'in_file_a')

            preproc.connect(anat_skullstrip, 'out_file',
                            anat_skullstrip_orig_vol, 'in_file_b')

            preproc.connect(anat_skullstrip, 'mask_file', outputnode,
                            'brain_mask')

            preproc.connect(anat_skullstrip_orig_vol, 'out_file', outputnode,
                            'brain')

        elif method == 'niworkflows-ants':
            # Skull-stripping using niworkflows-ants
            anat_skullstrip_ants = init_brain_extraction_wf(
                tpl_target_path=template_path,
                tpl_mask_path=mask_path,
                tpl_regmask_path=regmask_path,
                name='anat_skullstrip_ants')

            preproc.connect(anat_reorient, 'out_file', anat_skullstrip_ants,
                            'inputnode.in_files')

            preproc.connect(anat_skullstrip_ants, 'copy_xform.out_file',
                            outputnode, 'skullstrip')

            preproc.connect(anat_skullstrip_ants, 'copy_xform.out_file',
                            outputnode, 'brain')

            preproc.connect(anat_skullstrip_ants,
                            'atropos_wf.copy_xform.out_mask', outputnode,
                            'brain_mask')

        elif method == 'mask':

            anat_skullstrip_orig_vol = pe.Node(interface=afni.Calc(),
                                               name='anat_skullstrip_orig_vol')

            anat_skullstrip_orig_vol.inputs.expr = 'a*step(b)'
            anat_skullstrip_orig_vol.inputs.outputtype = 'NIFTI_GZ'

            preproc.connect(anat_reorient, 'out_file',
                            anat_skullstrip_orig_vol, 'in_file_a')

            preproc.connect(inputnode, 'brain_mask', anat_skullstrip_orig_vol,
                            'in_file_b')

            preproc.connect(inputnode, 'brain_mask', outputnode, 'brain_mask')

            preproc.connect(anat_skullstrip_orig_vol, 'out_file', outputnode,
                            'brain')

    return preproc
Exemple #24
0
def create_anat_preproc(method='afni',
                        already_skullstripped=False,
                        c=None,
                        wf_name='anat_preproc'):
    """The main purpose of this workflow is to process T1 scans. Raw mprage file is deobliqued, reoriented
    into RPI and skullstripped. Also, a whole brain only mask is generated from the skull stripped image
    for later use in registration.

    Returns
    -------
    anat_preproc : workflow
        Anatomical Preprocessing Workflow

    Notes
    -----
    `Source <https://github.com/FCP-INDI/C-PAC/blob/master/CPAC/anat_preproc/anat_preproc.py>`_

    Workflow Inputs::
        inputspec.anat : string
            User input anatomical (T1) Image, in any of the 8 orientations

    Workflow Outputs::

        outputspec.refit : string
            Path to deobliqued anatomical image

        outputspec.reorient : string
            Path to RPI oriented anatomical image

        outputspec.skullstrip : string
            Path to skull stripped RPI oriented mprage file with normalized intensities.

        outputspec.brain : string
            Path to skull stripped RPI brain image with original intensity values and not normalized or scaled.

    Order of commands:
    - Deobliqing the scans. ::
        3drefit -deoblique mprage.nii.gz

    - Re-orienting the Image into Right-to-Left Posterior-to-Anterior Inferior-to-Superior  (RPI) orientation ::
        3dresample -orient RPI
                   -prefix mprage_RPI.nii.gz
                   -inset mprage.nii.gz

    - Skull-Stripping the image ::
        Using AFNI ::
            3dSkullStrip -input mprage_RPI.nii.gz
                         -o_ply mprage_RPI_3dT.nii.gz
        or using BET ::
            bet mprage_RPI.nii.gz

    - The skull-stripping step modifies the intensity values. To get back the original intensity values, we do an element wise product of RPI data with step function of skull-stripped data ::
        3dcalc -a mprage_RPI.nii.gz
               -b mprage_RPI_3dT.nii.gz
               -expr 'a*step(b)'
               -prefix mprage_RPI_3dc.nii.gz

    High Level Workflow Graph:
    .. image:: ../images/anatpreproc_graph.dot.png
       :width: 500

    Detailed Workflow Graph:
    .. image:: ../images/anatpreproc_graph_detailed.dot.png
       :width: 500

    Examples
    --------
    >>> from CPAC.anat_preproc import create_anat_preproc
    >>> preproc = create_anat_preproc()
    >>> preproc.inputs.inputspec.anat = 'sub1/anat/mprage.nii.gz'
    >>> preproc.run() #doctest: +SKIP
    """

    preproc = pe.Workflow(name=wf_name)

    inputnode = pe.Node(util.IdentityInterface(fields=['anat', 'brain_mask']),
                        name='inputspec')

    outputnode = pe.Node(util.IdentityInterface(
        fields=['refit', 'reorient', 'skullstrip', 'brain', 'brain_mask']),
                         name='outputspec')

    anat_deoblique = pe.Node(interface=afni.Refit(), name='anat_deoblique')
    anat_deoblique.inputs.deoblique = True
    preproc.connect(inputnode, 'anat', anat_deoblique, 'in_file')

    preproc.connect(anat_deoblique, 'out_file', outputnode, 'refit')
    # Disable non_local_means_filtering and n4_bias_field_correction when run niworkflows-ants
    if method == 'niworkflows-ants':
        c.non_local_means_filtering = False
        c.n4_bias_field_correction = False

    if c.non_local_means_filtering and c.n4_bias_field_correction:
        denoise = pe.Node(interface=ants.DenoiseImage(), name='anat_denoise')
        preproc.connect(anat_deoblique, 'out_file', denoise, 'input_image')
        n4 = pe.Node(interface=ants.N4BiasFieldCorrection(dimension=3,
                                                          shrink_factor=2,
                                                          copy_header=True),
                     name='anat_n4')
        preproc.connect(denoise, 'output_image', n4, 'input_image')
    elif c.non_local_means_filtering and not c.n4_bias_field_correction:
        denoise = pe.Node(interface=ants.DenoiseImage(), name='anat_denoise')
        preproc.connect(anat_deoblique, 'out_file', denoise, 'input_image')
    elif not c.non_local_means_filtering and c.n4_bias_field_correction:
        n4 = pe.Node(interface=ants.N4BiasFieldCorrection(dimension=3,
                                                          shrink_factor=2,
                                                          copy_header=True),
                     name='anat_n4')
        preproc.connect(anat_deoblique, 'out_file', n4, 'input_image')

    # Anatomical reorientation
    anat_reorient = pe.Node(interface=afni.Resample(), name='anat_reorient')
    anat_reorient.inputs.orientation = 'RPI'
    anat_reorient.inputs.outputtype = 'NIFTI_GZ'

    if c.n4_bias_field_correction:
        preproc.connect(n4, 'output_image', anat_reorient, 'in_file')
    elif c.non_local_means_filtering and not c.n4_bias_field_correction:
        preproc.connect(denoise, 'output_image', anat_reorient, 'in_file')
    else:
        preproc.connect(anat_deoblique, 'out_file', anat_reorient, 'in_file')

    preproc.connect(anat_reorient, 'out_file', outputnode, 'reorient')

    if already_skullstripped:

        anat_skullstrip = pe.Node(
            interface=util.IdentityInterface(fields=['out_file']),
            name='anat_skullstrip')

        preproc.connect(anat_reorient, 'out_file', anat_skullstrip, 'out_file')

        preproc.connect(anat_skullstrip, 'out_file', outputnode, 'skullstrip')

        preproc.connect(anat_skullstrip, 'out_file', outputnode, 'brain')

    else:

        if method == 'afni':
            # Skull-stripping using AFNI 3dSkullStrip
            inputnode_afni = pe.Node(util.IdentityInterface(fields=[
                'mask_vol', 'shrink_factor', 'var_shrink_fac',
                'shrink_fac_bot_lim', 'avoid_vent', 'niter', 'pushout',
                'touchup', 'fill_hole', 'avoid_eyes', 'use_edge', 'exp_frac',
                'smooth_final', 'push_to_edge', 'use_skull', 'perc_int',
                'max_inter_iter', 'blur_fwhm', 'fac', 'monkey'
            ]),
                                     name='AFNI_options')

            skullstrip_args = pe.Node(util.Function(
                input_names=[
                    'spat_norm', 'spat_norm_dxyz', 'mask_vol', 'shrink_fac',
                    'var_shrink_fac', 'shrink_fac_bot_lim', 'avoid_vent',
                    'niter', 'pushout', 'touchup', 'fill_hole', 'avoid_eyes',
                    'use_edge', 'exp_frac', 'smooth_final', 'push_to_edge',
                    'use_skull', 'perc_int', 'max_inter_iter', 'blur_fwhm',
                    'fac', 'monkey'
                ],
                output_names=['expr'],
                function=create_3dskullstrip_arg_string),
                                      name='anat_skullstrip_args')

            preproc.connect([(inputnode_afni, skullstrip_args,
                              [('mask_vol', 'mask_vol'),
                               ('shrink_factor', 'shrink_fac'),
                               ('var_shrink_fac', 'var_shrink_fac'),
                               ('shrink_fac_bot_lim', 'shrink_fac_bot_lim'),
                               ('avoid_vent', 'avoid_vent'),
                               ('niter', 'niter'), ('pushout', 'pushout'),
                               ('touchup', 'touchup'),
                               ('fill_hole', 'fill_hole'),
                               ('avoid_eyes', 'avoid_eyes'),
                               ('use_edge', 'use_edge'),
                               ('exp_frac', 'exp_frac'),
                               ('smooth_final', 'smooth_final'),
                               ('push_to_edge', 'push_to_edge'),
                               ('use_skull', 'use_skull'),
                               ('perc_int', 'perc_int'),
                               ('max_inter_iter', 'max_inter_iter'),
                               ('blur_fwhm', 'blur_fwhm'), ('fac', 'fac'),
                               ('monkey', 'monkey')])])

            anat_skullstrip = pe.Node(interface=afni.SkullStrip(),
                                      name='anat_skullstrip')

            anat_skullstrip.inputs.outputtype = 'NIFTI_GZ'

            preproc.connect(anat_reorient, 'out_file', anat_skullstrip,
                            'in_file')
            preproc.connect(skullstrip_args, 'expr', anat_skullstrip, 'args')

            # Generate anatomical brain mask

            anat_brain_mask = pe.Node(interface=afni.Calc(),
                                      name='anat_brain_mask')

            anat_brain_mask.inputs.expr = 'step(a)'
            anat_brain_mask.inputs.outputtype = 'NIFTI_GZ'

            preproc.connect(anat_skullstrip, 'out_file', anat_brain_mask,
                            'in_file_a')

            # Apply skull-stripping step mask to original volume
            anat_skullstrip_orig_vol = pe.Node(interface=afni.Calc(),
                                               name='anat_skullstrip_orig_vol')

            anat_skullstrip_orig_vol.inputs.expr = 'a*step(b)'
            anat_skullstrip_orig_vol.inputs.outputtype = 'NIFTI_GZ'

            preproc.connect(anat_reorient, 'out_file',
                            anat_skullstrip_orig_vol, 'in_file_a')

            preproc.connect(anat_brain_mask, 'out_file',
                            anat_skullstrip_orig_vol, 'in_file_b')

            preproc.connect(anat_brain_mask, 'out_file', outputnode,
                            'brain_mask')

            preproc.connect(anat_skullstrip_orig_vol, 'out_file', outputnode,
                            'brain')

        elif method == 'fsl':
            # Skull-stripping using FSL BET
            inputnode_bet = pe.Node(util.IdentityInterface(fields=[
                'frac', 'mask_boolean', 'mesh_boolean', 'outline', 'padding',
                'radius', 'reduce_bias', 'remove_eyes', 'robust', 'skull',
                'surfaces', 'threshold', 'vertical_gradient'
            ]),
                                    name='BET_options')

            anat_skullstrip = pe.Node(interface=fsl.BET(),
                                      name='anat_skullstrip')
            anat_skullstrip.inputs.output_type = 'NIFTI_GZ'

            preproc.connect(anat_reorient, 'out_file', anat_skullstrip,
                            'in_file')

            preproc.connect([(inputnode_bet, anat_skullstrip, [
                ('frac', 'frac'),
                ('mask_boolean', 'mask'),
                ('mesh_boolean', 'mesh'),
                ('outline', 'outline'),
                ('padding', 'padding'),
                ('radius', 'radius'),
                ('reduce_bias', 'reduce_bias'),
                ('remove_eyes', 'remove_eyes'),
                ('robust', 'robust'),
                ('skull', 'skull'),
                ('surfaces', 'surfaces'),
                ('threshold', 'threshold'),
                ('vertical_gradient', 'vertical_gradient'),
            ])])

            preproc.connect(anat_skullstrip, 'out_file', outputnode,
                            'skullstrip')

            # Apply skull-stripping step mask to original volume
            anat_skullstrip_orig_vol = pe.Node(interface=afni.Calc(),
                                               name='anat_skullstrip_orig_vol')

            anat_skullstrip_orig_vol.inputs.expr = 'a*step(b)'
            anat_skullstrip_orig_vol.inputs.outputtype = 'NIFTI_GZ'

            preproc.connect(anat_reorient, 'out_file',
                            anat_skullstrip_orig_vol, 'in_file_a')

            preproc.connect(anat_skullstrip, 'out_file',
                            anat_skullstrip_orig_vol, 'in_file_b')

            preproc.connect(anat_skullstrip, 'mask_file', outputnode,
                            'brain_mask')

            preproc.connect(anat_skullstrip_orig_vol, 'out_file', outputnode,
                            'brain')

        elif method == 'niworkflows-ants':
            # Skull-stripping using niworkflows-ants
            anat_skullstrip_ants = init_brain_extraction_wf(
                tpl_target_path=c.niworkflows_ants_template_path,
                tpl_mask_path=c.niworkflows_ants_mask_path,
                tpl_regmask_path=c.niworkflows_ants_regmask_path,
                name='anat_skullstrip_ants')

            preproc.connect(anat_reorient, 'out_file', anat_skullstrip_ants,
                            'inputnode.in_files')

            preproc.connect(anat_skullstrip_ants, 'copy_xform.out_file',
                            outputnode, 'skullstrip')

            preproc.connect(anat_skullstrip_ants, 'copy_xform.out_file',
                            outputnode, 'brain')

            preproc.connect(anat_skullstrip_ants,
                            'atropos_wf.copy_xform.out_mask', outputnode,
                            'brain_mask')

        elif method == 'mask':

            brain_mask_deoblique = pe.Node(interface=afni.Refit(),
                                           name='brain_mask_deoblique')
            brain_mask_deoblique.inputs.deoblique = True
            preproc.connect(inputnode, 'brain_mask', brain_mask_deoblique,
                            'in_file')

            brain_mask_reorient = pe.Node(interface=afni.Resample(),
                                          name='brain_mask_reorient')
            brain_mask_reorient.inputs.orientation = 'RPI'
            brain_mask_reorient.inputs.outputtype = 'NIFTI_GZ'
            preproc.connect(brain_mask_deoblique, 'out_file',
                            brain_mask_reorient, 'in_file')

            anat_skullstrip_orig_vol = pe.Node(interface=afni.Calc(),
                                               name='anat_skullstrip_orig_vol')
            anat_skullstrip_orig_vol.inputs.expr = 'a*step(b)'
            anat_skullstrip_orig_vol.inputs.outputtype = 'NIFTI_GZ'

            preproc.connect(anat_reorient, 'out_file',
                            anat_skullstrip_orig_vol, 'in_file_a')

            preproc.connect(brain_mask_reorient, 'out_file',
                            anat_skullstrip_orig_vol, 'in_file_b')

            preproc.connect(brain_mask_reorient, 'out_file', outputnode,
                            'brain_mask')

            preproc.connect(anat_skullstrip_orig_vol, 'out_file', outputnode,
                            'brain')

        elif method == 'unet':
            """
            UNet
            options (following numbers are default):
            input_slice: 3
            conv_block: 5
            kernel_root: 16
            rescale_dim: 256
            """
            # TODO: add options to pipeline_config
            train_model = UNet2d(dim_in=3, num_conv_block=5, kernel_root=16)
            unet_path = check_for_s3(c.unet_model)
            checkpoint = torch.load(unet_path, map_location={'cuda:0': 'cpu'})
            train_model.load_state_dict(checkpoint['state_dict'])
            model = nn.Sequential(train_model, nn.Softmax2d())

            # create a node called unet_mask
            unet_mask = pe.Node(util.Function(input_names=['model', 'cimg_in'],
                                              output_names=['out_path'],
                                              function=predict_volumes),
                                name='unet_mask')

            unet_mask.inputs.model = model
            preproc.connect(anat_reorient, 'out_file', unet_mask, 'cimg_in')
            """
            Revised mask with ANTs
            """
            # fslmaths <whole head> -mul <mask> brain.nii.gz
            unet_masked_brain = pe.Node(interface=fsl.MultiImageMaths(),
                                        name='unet_masked_brain')
            unet_masked_brain.inputs.op_string = "-mul %s"
            preproc.connect(anat_reorient, 'out_file', unet_masked_brain,
                            'in_file')
            preproc.connect(unet_mask, 'out_path', unet_masked_brain,
                            'operand_files')

            # flirt -v -dof 6 -in brain.nii.gz -ref NMT_SS_0.5mm.nii.gz -o brain_rot2atl -omat brain_rot2atl.mat -interp sinc
            # TODO change it to ANTs linear transform
            native_brain_to_template_brain = pe.Node(
                interface=fsl.FLIRT(), name='native_brain_to_template_brain')
            native_brain_to_template_brain.inputs.reference = c.template_brain_only_for_anat
            native_brain_to_template_brain.inputs.dof = 6
            native_brain_to_template_brain.inputs.interp = 'sinc'
            preproc.connect(unet_masked_brain, 'out_file',
                            native_brain_to_template_brain, 'in_file')

            # flirt -in head.nii.gz -ref NMT_0.5mm.nii.gz -o head_rot2atl -applyxfm -init brain_rot2atl.mat
            # TODO change it to ANTs linear transform
            native_head_to_template_head = pe.Node(
                interface=fsl.FLIRT(), name='native_head_to_template_head')
            native_head_to_template_head.inputs.reference = c.template_skull_for_anat
            native_head_to_template_head.inputs.apply_xfm = True
            preproc.connect(anat_reorient, 'out_file',
                            native_head_to_template_head, 'in_file')
            preproc.connect(native_brain_to_template_brain, 'out_matrix_file',
                            native_head_to_template_head, 'in_matrix_file')

            # fslmaths NMT_SS_0.5mm.nii.gz -bin templateMask.nii.gz
            template_brain_mask = pe.Node(interface=fsl.maths.MathsCommand(),
                                          name='template_brain_mask')
            template_brain_mask.inputs.in_file = c.template_brain_only_for_anat
            template_brain_mask.inputs.args = '-bin'

            # ANTS 3 -m  CC[head_rot2atl.nii.gz,NMT_0.5mm.nii.gz,1,5] -t SyN[0.25] -r Gauss[3,0] -o atl2T1rot -i 60x50x20 --use-Histogram-Matching  --number-of-affine-iterations 10000x10000x10000x10000x10000 --MI-option 32x16000
            ants_template_head_to_template = pe.Node(
                interface=ants.Registration(),
                name='template_head_to_template')
            ants_template_head_to_template.inputs.metric = ['CC']
            ants_template_head_to_template.inputs.metric_weight = [1, 5]
            ants_template_head_to_template.inputs.moving_image = c.template_skull_for_anat
            ants_template_head_to_template.inputs.transforms = ['SyN']
            ants_template_head_to_template.inputs.transform_parameters = [
                (0.25, )
            ]
            ants_template_head_to_template.inputs.interpolation = 'NearestNeighbor'
            ants_template_head_to_template.inputs.number_of_iterations = [[
                60, 50, 20
            ]]
            ants_template_head_to_template.inputs.smoothing_sigmas = [[
                0.6, 0.2, 0.0
            ]]
            ants_template_head_to_template.inputs.shrink_factors = [[4, 2, 1]]
            ants_template_head_to_template.inputs.convergence_threshold = [
                1.e-8
            ]
            preproc.connect(native_head_to_template_head, 'out_file',
                            ants_template_head_to_template, 'fixed_image')

            # antsApplyTransforms -d 3 -i templateMask.nii.gz -t atl2T1rotWarp.nii.gz atl2T1rotAffine.txt -r brain_rot2atl.nii.gz -o brain_rot2atl_mask.nii.gz
            template_head_transform_to_template = pe.Node(
                interface=ants.ApplyTransforms(),
                name='template_head_transform_to_template')
            template_head_transform_to_template.inputs.dimension = 3
            preproc.connect(template_brain_mask, 'out_file',
                            template_head_transform_to_template, 'input_image')
            preproc.connect(native_brain_to_template_brain, 'out_file',
                            template_head_transform_to_template,
                            'reference_image')
            preproc.connect(ants_template_head_to_template,
                            'forward_transforms',
                            template_head_transform_to_template, 'transforms')

            # convert_xfm -omat brain_rot2native.mat -inverse brain_rot2atl.mat
            invt = pe.Node(interface=fsl.ConvertXFM(), name='convert_xfm')
            invt.inputs.invert_xfm = True
            preproc.connect(native_brain_to_template_brain, 'out_matrix_file',
                            invt, 'in_file')

            # flirt -in brain_rot2atl_mask.nii.gz -ref brain.nii.gz -o brain_mask.nii.gz -applyxfm -init brain_rot2native.mat
            template_brain_to_native_brain = pe.Node(
                interface=fsl.FLIRT(), name='template_brain_to_native_brain')
            template_brain_to_native_brain.inputs.apply_xfm = True
            preproc.connect(template_head_transform_to_template,
                            'output_image', template_brain_to_native_brain,
                            'in_file')
            preproc.connect(unet_masked_brain, 'out_file',
                            template_brain_to_native_brain, 'reference')
            preproc.connect(invt, 'out_file', template_brain_to_native_brain,
                            'in_matrix_file')

            # fslmaths brain_mask.nii.gz -thr .5 -bin brain_mask_thr.nii.gz
            refined_mask = pe.Node(interface=fsl.Threshold(),
                                   name='refined_mask')
            refined_mask.inputs.thresh = 0.5
            preproc.connect(template_brain_to_native_brain, 'out_file',
                            refined_mask, 'in_file')

            # get a new brain with mask
            refined_brain = pe.Node(interface=fsl.MultiImageMaths(),
                                    name='refined_brain')
            refined_brain.inputs.op_string = "-mul %s"
            preproc.connect(anat_reorient, 'out_file', refined_brain,
                            'in_file')
            preproc.connect(refined_mask, 'out_file', refined_brain,
                            'operand_files')

            preproc.connect(refined_mask, 'out_file', outputnode, 'brain_mask')
            preproc.connect(refined_brain, 'out_file', outputnode, 'brain')

    return preproc
Exemple #25
0
def create_qc_snr(wf_name='qc_snr'):

    wf = pe.Workflow(name=wf_name)

    input_node = pe.Node(util.IdentityInterface(fields=[
        'functional_preprocessed', 'functional_brain_mask',
        'functional_to_anat_linear_xfm', 'anatomical_brain',
        'mean_functional_in_anat'
    ]),
                         name='inputspec')

    output_node = pe.Node(util.IdentityInterface(fields=[
        'snr_axial_image', 'snr_sagittal_image', 'snr_histogram_image',
        'snr_mean'
    ]),
                          name='outputspec')

    std_dev = pe.Node(afni.TStat(args='-stdev'), name='std_dev')

    std_dev.inputs.outputtype = 'NIFTI_GZ'
    wf.connect(input_node, 'functional_preprocessed', std_dev, 'in_file')
    wf.connect(input_node, 'functional_brain_mask', std_dev, 'mask')

    std_dev_anat = pe.Node(fsl.ApplyWarp(interp='trilinear'),
                           name='std_dev_anat')
    wf.connect(input_node, 'functional_to_anat_linear_xfm', std_dev_anat,
               'premat')
    wf.connect(std_dev, 'out_file', std_dev_anat, 'in_file')
    wf.connect(input_node, 'anatomical_brain', std_dev_anat, 'ref_file')

    snr = pe.Node(afni.Calc(expr='b/a'), name='snr')
    snr.inputs.outputtype = 'NIFTI_GZ'
    wf.connect(input_node, 'mean_functional_in_anat', snr, 'in_file_b')
    wf.connect(std_dev_anat, 'out_file', snr, 'in_file_a')

    snr_val = pe.Node(Function(input_names=['measure_file'],
                               output_names=['snr_storefl'],
                               function=cal_snr_val,
                               as_module=True),
                      name='snr_val')

    wf.connect(snr, 'out_file', snr_val, 'measure_file')

    hist_snr = pe.Node(Function(input_names=['measure_file', 'measure'],
                                output_names=['hist_path'],
                                function=gen_histogram,
                                as_module=True),
                       name='hist_snr')

    hist_snr.inputs.measure = 'snr'

    wf.connect(snr, 'out_file', hist_snr, 'measure_file')

    snr_drop_percent = pe.Node(Function(
        input_names=['measure_file', 'percent'],
        output_names=['modified_measure_file'],
        function=drop_percent,
        as_module=True),
                               name='dp_snr')

    snr_drop_percent.inputs.percent = 99

    wf.connect(snr, 'out_file', snr_drop_percent, 'measure_file')

    montage_snr = create_montage('montage_snr', 'red_to_blue', 'snr')

    wf.connect(snr_drop_percent, 'modified_measure_file', montage_snr,
               'inputspec.overlay')
    wf.connect(input_node, 'anatomical_brain', montage_snr,
               'inputspec.underlay')

    wf.connect(montage_snr, 'outputspec.axial_png', output_node,
               'snr_axial_image')
    wf.connect(montage_snr, 'outputspec.sagittal_png', output_node,
               'snr_sagittal_image')
    wf.connect(hist_snr, 'hist_path', output_node, 'snr_histogram_image')
    wf.connect(snr_val, 'snr_storefl', output_node, 'snr_mean')

    return wf
                     name="datasource")  #grabs data
datasource.inputs.base_directory = '/afs/cbs.mpg.de/projects/mar004_lsd-lemon-preproc/probands/'
datasource.inputs.template = '%s/preprocessed/lsd_resting/%s/rest_preprocessed2mni.nii.gz'
datasource.inputs.template_args['EPI_bandpassed'] = [['subject_id', 'sess_id']]
datasource.inputs.sort_filelist = True
wf.connect(subjects_infosource, "subject_id", datasource, "subject_id")
wf.connect(sess_infosource, "sess_id", datasource, "sess_id")

automask = pe.Node(interface=afni.Automask(), name='automask')
automask.inputs.dilate = 1
automask.inputs.outputtype = "NIFTI_GZ"
wf.connect(datasource, 'EPI_bandpassed', automask, 'in_file')
wf.connect(automask, 'out_file', ds, '@automask')

#extract rois with spheres
sphere = pe.Node(afni.Calc(), name="sphere")
sphere.inputs.in_file_a = fsl.Info.standard_image(
    '/usr/share/fsl/data/standard/MNI152_T1_2mm_brain.nii.gz')
sphere.inputs.outputtype = 'NIFTI_GZ'


def roi2exp(coord):
    radius = 4
    return "step((%d*%d)-(x+%d)*(x+%d)-(y+%d)*(y+%d)-(z+%d)*(z+%d))" % (
        radius, radius, coord[0], coord[0], coord[1], coord[1], -coord[2],
        -coord[2])


def roi2name(coord):
    return 'roi_sphere_%s_%s_%s.nii.gz' % (str(coord[0]), str(
        coord[1]), str(coord[2]))
Exemple #27
0
    def __init__(self, settings):
        # call base constructor
        super().__init__(settings)

        # define input/output node
        self.set_input(['T1', 'orig', 'brainmask'])
        self.set_output(['T1_skullstrip', 'allineate_freesurfer2anat'])

        # define datasink substitutions
        self.set_subs([('_maskop40', ''), ('_calc_calc_calc_calc_calc', '')])

        # 3dAllineate (FSorig)
        self.allineate_orig = MapNode(afni.Allineate(
            out_matrix='FSorig2MPR.aff12.1D',
            overwrite=True,
            outputtype='NIFTI_GZ'),
                                      iterfield=['in_file', 'reference'],
                                      name='3dallineate_orig')
        # 3dAllineate (FSbrainmask)
        self.allineate_bm = MapNode(
            afni.Allineate(overwrite=True, no_pad=True, outputtype='NIFTI_GZ'),
            iterfield=['in_file', 'reference', 'in_matrix'],
            name='3dallineate_brainmask')

        # skullstrip mprage (afni)
        self.afni_skullstrip = MapNode(afni.SkullStrip(args="-orig_vol",
                                                       outputtype="NIFTI_GZ"),
                                       iterfield=['in_file'],
                                       name='afni_skullstrip')
        # 3dcalc operations for achieving final mask
        self.maskop1 = MapNode(afni.Calc(expr='step(a)',
                                         overwrite=True,
                                         outputtype='NIFTI_GZ'),
                               iterfield=['in_file_a'],
                               name='maskop1')
        self.maskop2 = []
        for n in range(3):
            self.maskop2.append(
                MapNode(afni.Calc(
                    args='-b a+i -c a-i -d a+j -e a-j -f a+k -g a-k',
                    expr='ispositive(a+b+c+d+e+f+g)',
                    overwrite=True,
                    outputtype='NIFTI_GZ'),
                        iterfield=['in_file_a'],
                        name='maskop2_{}'.format(n)))
        # Inline function for setting up to copy IJK_TO_DICOM_REAL file attribute
        self.refit_setup = MapNode(Function(input_names=['noskull_T1'],
                                            output_names=['refit_input'],
                                            function=lambda noskull_T1:
                                            (noskull_T1, 'IJK_TO_DICOM_REAL')),
                                   iterfield=['noskull_T1'],
                                   name='refitsetup')
        # 3dRefit
        self.refit = MapNode(afni.Refit(),
                             iterfield=['in_file', 'atrcopy'],
                             name='3drefit')
        # 3dcalc for uniform intensity
        self.uniform = MapNode(afni.Calc(expr='a*and(b,b)',
                                         overwrite=True,
                                         outputtype='NIFTI_GZ'),
                               iterfield=['in_file_a', 'in_file_b'],
                               name='uniformintensity')

        # skullstrip mprage (fsl)
        self.fsl_skullstrip = MapNode(fsl.BET(),
                                      iterfield=['in_file'],
                                      name='fsl_skullstrip')
        self.maskop3 = MapNode(
            afni.Calc(expr='or(a,b,c)', overwrite=True, outputtype='NIFTI_GZ'),
            iterfield=['in_file_a', 'in_file_b', 'in_file_c'],
            name='maskop3')
        self.maskop4 = MapNode(
            afni.Calc(expr='c*and(a,b)', overwrite=True,
                      outputtype='NIFTI_GZ'),
            iterfield=['in_file_a', 'in_file_b', 'in_file_c'],
            name='maskop4')

        # Convert from list to string input
        self.select0T1 = Node(Function(input_names=['T1_list'],
                                       output_names=['T1_0'],
                                       function=lambda T1_list: T1_list[0]),
                              name='select0T1')

        # apply bias field correction
        self.biasfieldcorrect = Node(ants.N4BiasFieldCorrection(
            num_threads=settings['num_threads'], copy_header=True),
                                     name='biasfieldcorrect')
Exemple #28
0
def generate_summarize_tissue_mask_ventricles_masking(
        nuisance_wf,
        pipeline_resource_pool,
        regressor_descriptor,
        regressor_selector,
        mask_key,
        use_ants=True,
        ventricle_mask_exist=True):

    # Mask CSF with Ventricles
    if '{}_Unmasked'.format(mask_key) not in pipeline_resource_pool:

        # reduce CSF mask to the lateral ventricles
        mask_csf_with_lat_ven = pe.Node(
            interface=afni.Calc(outputtype='NIFTI_GZ'),
            name='{}_Ventricles'.format(mask_key))
        mask_csf_with_lat_ven.inputs.expr = 'a*b'
        mask_csf_with_lat_ven.inputs.out_file = 'csf_lat_ven_mask.nii.gz'

        if ventricle_mask_exist:
            ventricles_key = 'VentriclesToAnat'
            if 'resolution' in regressor_descriptor:
                ventricles_key += '_{}'.format(
                    regressor_descriptor['resolution'])

            if ventricles_key not in pipeline_resource_pool:

                transforms = pipeline_resource_pool['Transformations']

                if use_ants is True:

                    # perform the transform using ANTS
                    collect_linear_transforms = pe.Node(
                        util.Merge(3),
                        name='{}_ants_transforms'.format(ventricles_key))

                    nuisance_wf.connect(
                        *(transforms['anat_to_mni_initial_xfm'] +
                          (collect_linear_transforms, 'in1')))
                    nuisance_wf.connect(*(transforms['anat_to_mni_rigid_xfm'] +
                                          (collect_linear_transforms, 'in2')))
                    nuisance_wf.connect(
                        *(transforms['anat_to_mni_affine_xfm'] +
                          (collect_linear_transforms, 'in3')))

                    # check transform list to exclude Nonetype (missing) init/rig/affine
                    check_transform = pe.Node(
                        util.Function(input_names=['transform_list'],
                                      output_names=[
                                          'checked_transform_list',
                                          'list_length'
                                      ],
                                      function=check_transforms),
                        name='{0}_check_transforms'.format(ventricles_key))

                    nuisance_wf.connect(collect_linear_transforms, 'out',
                                        check_transform, 'transform_list')

                    # generate inverse transform flags, which depends on the number of transforms
                    inverse_transform_flags = pe.Node(
                        util.Function(
                            input_names=['transform_list'],
                            output_names=['inverse_transform_flags'],
                            function=generate_inverse_transform_flags),
                        name='{0}_inverse_transform_flags'.format(
                            ventricles_key))
                    nuisance_wf.connect(check_transform,
                                        'checked_transform_list',
                                        inverse_transform_flags,
                                        'transform_list')

                    lat_ven_mni_to_anat = pe.Node(
                        interface=ants.ApplyTransforms(),
                        name='{}_ants'.format(ventricles_key))
                    lat_ven_mni_to_anat.inputs.interpolation = 'NearestNeighbor'
                    lat_ven_mni_to_anat.inputs.dimension = 3

                    nuisance_wf.connect(inverse_transform_flags,
                                        'inverse_transform_flags',
                                        lat_ven_mni_to_anat,
                                        'invert_transform_flags')
                    nuisance_wf.connect(check_transform,
                                        'checked_transform_list',
                                        lat_ven_mni_to_anat, 'transforms')

                    nuisance_wf.connect(
                        *(pipeline_resource_pool['Ventricles'] +
                          (lat_ven_mni_to_anat, 'input_image')))
                    nuisance_wf.connect(
                        *(pipeline_resource_pool[mask_key] +
                          (lat_ven_mni_to_anat, 'reference_image')))

                    pipeline_resource_pool[ventricles_key] = (
                        lat_ven_mni_to_anat, 'output_image')

                else:
                    # perform the transform using FLIRT
                    lat_ven_mni_to_anat = pe.Node(
                        interface=fsl.FLIRT(),
                        name='{}_flirt'.format(ventricles_key))
                    lat_ven_mni_to_anat.inputs.interp = 'nearestneighbour'

                    nuisance_wf.connect(
                        *(transforms['mni_to_anat_linear_xfm'] +
                          (lat_ven_mni_to_anat, 'in_matrix_file')))
                    nuisance_wf.connect(
                        *(pipeline_resource_pool['Ventricles'] +
                          (lat_ven_mni_to_anat, 'in_file')))
                    nuisance_wf.connect(*(pipeline_resource_pool[mask_key] +
                                          (lat_ven_mni_to_anat, 'reference')))

                    pipeline_resource_pool[ventricles_key] = (
                        lat_ven_mni_to_anat, 'out_file')

            nuisance_wf.connect(*(pipeline_resource_pool[ventricles_key] +
                                  (mask_csf_with_lat_ven, 'in_file_a')))
            nuisance_wf.connect(*(pipeline_resource_pool[mask_key] +
                                  (mask_csf_with_lat_ven, 'in_file_b')))

            pipeline_resource_pool['{}_Unmasked'.format(
                mask_key)] = pipeline_resource_pool[mask_key]
            pipeline_resource_pool[mask_key] = (mask_csf_with_lat_ven,
                                                'out_file')
        else:
            pipeline_resource_pool['{}_Unmasked'.format(
                mask_key)] = pipeline_resource_pool[mask_key]

        return pipeline_resource_pool
Exemple #29
0
def init_falff_wf(workdir: str | Path,
                  feature=None,
                  fwhm=None,
                  memcalc=MemoryCalculator.default()):
    """
    Calculate Amplitude of low frequency oscillations(ALFF) and
    fractional ALFF maps

    Returns
    -------
    workflow : workflow object
        ALFF workflow

    Notes
    -----
    Adapted from
    <https://github.com/FCP-INDI/C-PAC/blob/master/CPAC/alff/alff.py>

    """
    if feature is not None:
        name = f"{format_workflow(feature.name)}"
    else:
        name = "falff"
    if fwhm is not None:
        name = f"{name}_{int(float(fwhm) * 1e3):d}"
    name = f"{name}_wf"
    workflow = pe.Workflow(name=name)

    # input
    inputnode = pe.Node(
        niu.IdentityInterface(
            fields=["tags", "vals", "metadata", "bold", "mask", "fwhm"]),
        name="inputnode",
    )
    unfiltered_inputnode = pe.Node(
        niu.IdentityInterface(fields=["bold", "mask"]),
        name="unfiltered_inputnode",
    )
    outputnode = pe.Node(niu.IdentityInterface(fields=["resultdicts"]),
                         name="outputnode")

    if fwhm is not None:
        inputnode.inputs.fwhm = float(fwhm)
    elif feature is not None and hasattr(feature, "smoothing"):
        inputnode.inputs.fwhm = feature.smoothing.get("fwhm")

    #
    make_resultdicts = pe.Node(
        MakeResultdicts(tagkeys=["feature"],
                        imagekeys=["alff", "falff", "mask"]),
        name="make_resultdicts",
    )
    if feature is not None:
        make_resultdicts.inputs.feature = feature.name
    workflow.connect(inputnode, "tags", make_resultdicts, "tags")
    workflow.connect(inputnode, "vals", make_resultdicts, "vals")
    workflow.connect(inputnode, "metadata", make_resultdicts, "metadata")
    workflow.connect(inputnode, "mask", make_resultdicts, "mask")

    workflow.connect(make_resultdicts, "resultdicts", outputnode,
                     "resultdicts")

    #
    resultdict_datasink = pe.Node(ResultdictDatasink(base_directory=workdir),
                                  name="resultdict_datasink")
    workflow.connect(make_resultdicts, "resultdicts", resultdict_datasink,
                     "indicts")

    # standard deviation of the filtered image
    stddev_filtered = pe.Node(afni.TStat(),
                              name="stddev_filtered",
                              mem_gb=memcalc.series_std_gb)
    stddev_filtered.inputs.outputtype = "NIFTI_GZ"
    stddev_filtered.inputs.options = "-stdev"
    workflow.connect(inputnode, "bold", stddev_filtered, "in_file")
    workflow.connect(inputnode, "mask", stddev_filtered, "mask")

    # standard deviation of the unfiltered image
    stddev_unfiltered = pe.Node(afni.TStat(),
                                name="stddev_unfiltered",
                                mem_gb=memcalc.series_std_gb)
    stddev_unfiltered.inputs.outputtype = "NIFTI_GZ"
    stddev_unfiltered.inputs.options = "-stdev"
    workflow.connect(unfiltered_inputnode, "bold", stddev_unfiltered,
                     "in_file")
    workflow.connect(unfiltered_inputnode, "mask", stddev_unfiltered, "mask")

    falff = pe.Node(afni.Calc(), name="falff", mem_gb=memcalc.volume_std_gb)
    falff.inputs.args = "-float"
    falff.inputs.expr = "(1.0*bool(a))*((1.0*b)/(1.0*c))"
    falff.inputs.outputtype = "NIFTI_GZ"
    workflow.connect(inputnode, "mask", falff, "in_file_a")
    workflow.connect(stddev_filtered, "out_file", falff, "in_file_b")
    workflow.connect(stddev_unfiltered, "out_file", falff, "in_file_c")

    #
    merge = pe.Node(niu.Merge(2), name="merge")
    workflow.connect(stddev_filtered, "out_file", merge, "in1")
    workflow.connect(falff, "out_file", merge, "in2")

    smooth = pe.MapNode(LazyBlurToFWHM(outputtype="NIFTI_GZ"),
                        iterfield="in_file",
                        name="smooth")
    workflow.connect(merge, "out", smooth, "in_file")
    workflow.connect(inputnode, "mask", smooth, "mask")
    workflow.connect(inputnode, "fwhm", smooth, "fwhm")

    zscore = pe.MapNode(ZScore(),
                        iterfield="in_file",
                        name="zscore",
                        mem_gb=memcalc.volume_std_gb)
    workflow.connect(smooth, "out_file", zscore, "in_file")
    workflow.connect(inputnode, "mask", zscore, "mask")

    split = pe.Node(niu.Split(splits=[1, 1]), name="split")
    workflow.connect(zscore, "out_file", split, "inlist")

    workflow.connect(split, "out1", make_resultdicts, "alff")
    workflow.connect(split, "out2", make_resultdicts, "falff")

    return workflow
Exemple #30
0
def create_pipeline_graph(pipeline_name, graph_file,
                          graph_kind='hierarchical'):
    """Creates pipeline graph for a given piepline.

    Parameters
    ----------
    pipeline_name : one of {'anat_to_common_rigid', 'anat_to_common_affine',
        'anat_to_common_nonlinear'}
        Pipeline name.

    graph_file : str.
        Path to save the graph image to.

    graph_kind : one of {'orig', 'hierarchical', 'flat', 'exec', 'colored'}, optional.
        The kind of the graph, passed to
        nipype.pipeline.workflows.Workflow().write_graph
    """
    pipeline_names = ['anats_to_common_rigid', 'anats_to_common_affine',
                      'anats_to_common_nonlinear']
    if pipeline_name not in pipeline_names:
        raise NotImplementedError(
            'Pipeline name must be one of {0}, you entered {1}'.format(
                pipeline_names, pipeline_name))
    graph_kinds = ['orig', 'hierarchical', 'flat', 'exec', 'colored']
    if graph_kind not in graph_kinds:
        raise ValueError(
            'Graph kind must be one of {0}, you entered {1}'.format(
                graph_kinds, graph_kind))

    workflow = pe.Workflow(name=pipeline_name)

    #######################################################################
    # Specify rigid body registration pipeline steps
    unifize = pe.Node(interface=afni.Unifize(), name='bias_correct')
    clip_level = pe.Node(interface=afni.ClipLevel(),
                         name='compute_mask_threshold')
    compute_mask = pe.Node(interface=interfaces.MathMorphoMask(),
                           name='compute_brain_mask')
    apply_mask = pe.Node(interface=afni.Calc(), name='apply_brain_mask')
    center_mass = pe.Node(interface=afni.CenterMass(),
                          name='compute_and_set_cm_in_header')
    refit_copy = pe.Node(afni.Refit(), name='copy_cm_in_header')
    tcat1 = pe.Node(afni.TCat(), name='concatenate_across_individuals1')
    tstat1 = pe.Node(afni.TStat(), name='compute_average1')
    undump = pe.Node(afni.Undump(), name='create_empty_template')
    refit_set = pe.Node(afni.Refit(), name='set_cm_in_header')
    resample1 = pe.Node(afni.Resample(), name='resample1')
    resample2 = pe.Node(afni.Resample(), name='resample2')
    shift_rotate = pe.Node(afni.Allineate(), name='shift_rotate')
    apply_allineate1 = pe.Node(afni.Allineate(), name='apply_transform1')
    tcat2 = pe.Node(afni.TCat(), name='concatenate_across_individuals2')
    tstat2 = pe.Node(afni.TStat(), name='compute_average2')
    tcat3 = pe.Node(afni.TCat(), name='concatenate_across_individuals3')
    tstat3 = pe.Node(afni.TStat(), name='compute_average3')

    workflow.add_nodes([unifize, clip_level, compute_mask, apply_mask,
                        center_mass,
                        refit_copy, tcat1, tstat1, undump, refit_set,
                        resample1, resample2, shift_rotate, apply_allineate1,
                        tcat2, tstat2, tcat3, tstat3])

    #######################################################################
    # and connections
    workflow.connect(unifize, 'out_file', clip_level, 'in_file')
    workflow.connect(clip_level, 'clip_val',
                     compute_mask, 'intensity_threshold')
    workflow.connect(unifize, 'out_file', compute_mask, 'in_file')
    workflow.connect(compute_mask, 'out_file', apply_mask, 'in_file_a')
    workflow.connect(unifize, 'out_file', apply_mask, 'in_file_b')
    workflow.connect(apply_mask, 'out_file',
                     center_mass, 'in_file')
    workflow.connect(unifize, 'out_file', refit_copy, 'in_file')
    workflow.connect(center_mass, 'out_file',
                     refit_copy, 'duporigin_file')
    workflow.connect(center_mass, 'out_file', tcat1, 'in_files')
    workflow.connect(tcat1, 'out_file', tstat1, 'in_file')
    workflow.connect(tstat1, 'out_file', undump, 'in_file')
    workflow.connect(undump, 'out_file', refit_set, 'in_file')
    workflow.connect(refit_set, 'out_file', resample1, 'master')
    workflow.connect(refit_copy, 'out_file', resample1, 'in_file')
    workflow.connect(refit_set, 'out_file', resample2, 'master')
    workflow.connect(center_mass, 'out_file', resample2, 'in_file')
    workflow.connect(resample2, 'out_file', tcat2, 'in_files')
    workflow.connect(tcat2, 'out_file', tstat2, 'in_file')
    workflow.connect(tstat2, 'out_file', shift_rotate, 'reference')
    workflow.connect(resample2, 'out_file', shift_rotate, 'in_file')
    workflow.connect(tstat2, 'out_file', apply_allineate1, 'master')
    workflow.connect(resample1, 'out_file',
                     apply_allineate1, 'in_file')
    workflow.connect(shift_rotate, 'out_matrix',
                     apply_allineate1, 'in_matrix')
    workflow.connect(apply_allineate1, 'out_file', tcat3, 'in_files')
    workflow.connect(tcat3, 'out_file', tstat3, 'in_file')
    if pipeline_name in ['anats_to_common_affine',
                         'anat_to_common_nonlinear']:
        mask = pe.Node(afni.MaskTool(), name='generate_count_mask')
        allineate = pe.Node(afni.Allineate(), name='allineate')
        catmatvec = pe.Node(afni.CatMatvec(), name='concatenate_transforms')
        apply_allineate2 = pe.Node(afni.Allineate(), name='apply_transform2')
        tcat3 = pe.Node(
            afni.TCat(), name='concatenate_across_individuals4')
        tstat3 = pe.Node(afni.TStat(), name='compute_average4')

        workflow.add_nodes([mask, allineate, catmatvec, apply_allineate2,
                            tcat3, tstat3])

        workflow.connect(tcat2, 'out_file', mask, 'in_file')
        workflow.connect(mask, 'out_file', allineate, 'weight')
        workflow.connect(apply_allineate1, 'out_file',
                         allineate, 'in_file')
        workflow.connect(allineate, 'out_matrix',
                         catmatvec, 'in_file')
        #XXX how can we enter multiple files ? 
        workflow.connect(catmatvec, 'out_file',
                         apply_allineate2, 'in_matrix')
        workflow.connect(resample1, 'out_file',
                         apply_allineate2, 'in_file')
        workflow.connect(apply_allineate2, 'out_file', tcat3, 'in_files')
        workflow.connect(tcat3, 'out_file', tstat3, 'in_file')

    if pipeline_name == 'anats_to_common_nonlinear':
        pass

    graph_file_root, graph_file_ext = os.path.splitext(graph_file)
    if graph_file_ext:
        _ = workflow.write_graph(graph2use=graph_kind,
                                 format=graph_file_ext[1:],
                                 dotfilename=graph_file_root)
    else:
        _ = workflow.write_graph(graph2use=graph_kind,
                                 dotfilename=graph_file_root)