Esempio n. 1
0
def test_warp():
    input_map = dict(
        args=dict(argstr='%s', ),
        deoblique=dict(argstr='-deoblique', ),
        environ=dict(usedefault=True, ),
        gridset=dict(argstr='-gridset %s', ),
        ignore_exception=dict(usedefault=True, ),
        in_file=dict(
            argstr='%s',
            mandatory=True,
        ),
        interp=dict(argstr='-%s', ),
        matparent=dict(argstr='-matparent %s', ),
        mni2tta=dict(argstr='-mni2tta', ),
        out_file=dict(argstr='-prefix %s', ),
        outputtype=dict(),
        suffix=dict(),
        tta2mni=dict(argstr='-tta2mni', ),
        zpad=dict(argstr='-zpad %d', ),
    )
    instance = afni.Warp()
    for key, metadata in input_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(instance.inputs.traits()[key],
                                        metakey), value
Esempio n. 2
0
def afni_deoblique(in_file=traits.Undefined,
                   out_file=traits.Undefined,
                   out_type='NIFTI_GZ'):
    """ Return a nipype interface for AFNI '3dWarp -deoblique'.

    Parameters
    ----------
    in_file: str
        Path to the input file

    out_file: str
        Path to the output file.

    out_type: str
        ('NIFTI_GZ' or 'AFNI' or 'NIFTI')
        AFNI output filetype

    Returns
    -------
    deob: nipype.interfaces.afni.Warp
    """
    deob = afni.Warp()
    deob.inputs.in_file = in_file
    deob.inputs.deoblique = True
    deob.inputs.out_file = out_file
    deob.inputs.outputtype = out_type

    return deob
    def warp(self,
             fileobj1=None,
             fileobj2=None,
             out_file=None,
             transformation=None,
             args=None,
             saved_mat_file=None,
             suffix=None):

        #setting files
        if fileobj2 is not None:
            fileobj2, _ = self.FuncHandler(fileobj2, out_file, suffix)
        fileobj1, out_file = self.FuncHandler(fileobj1, out_file, suffix)

        ThreeDWarp = afni.Warp(in_file=fileobj1, out_file=out_file)
        #https://nipype.readthedocs.io/en/latest/interfaces/generated/interfaces.afni/preprocess.html#warp
        if args is not None:
            ThreeDWarp.inputs.args = args
        if transformation == 'card2oblique':
            ThreeDWarp.inputs.oblique_parent = fileobj2
        elif transformation == 'deoblique':
            ThreeDWarp.inputs.deoblique = True
        elif transformation == 'mni2tta':
            ThreeDWarp.inputs.mni2tta = True
        elif transformation == 'tta2mni':
            ThreeDWarp.inputs.tta2mni = True
        elif transformation == 'matrix':
            ThreeDWarp.inputs.matparent = fileobj2
        elif transformation == None:
            print("Warning: no transformation input given")
        else:
            print(
                "Warning: none of the transformation options given match the possible arguments. Matching arguments are card2oblique,"
                + " deoblique, mni2tta, tta2mni, and matrix")
        #ThreeDWarp.inputs.num_threads = cpu_count()

        if saved_mat_file:  #this is for if the pipline requires saving the 1D matrix tranformation information
            print('saving matrix')
            ThreeDWarp.inputs.verbose = True
            ThreeDWarp.inputs.save_warp = True

        ThreeDWarp.run()

        #remove temp files
        if type(fileobj1) == models.BIDSImageFile:
            fileobj1 = os.path.join(self._output_dir, fileobj1.filename)
        if "_desc-temp" in fileobj1:
            os.remove(fileobj1)
Esempio n. 4
0
def _warp(to_warp_file,
          reference_file,
          write_dir=None,
          caching=False,
          terminal_output='allatonce',
          verbose=True,
          environ=None):
    if write_dir is None:
        write_dir = os.path.dirname(to_warp_file)

    if environ is None:
        environ = {'AFNI_DECONFLICT': 'OVERWRITE'}

    if caching:
        memory = Memory(write_dir)
        warp = memory.cache(afni.Warp)
    else:
        warp = afni.Warp().run

    out_warp = warp(in_file=to_warp_file,
                    oblique_parent=reference_file,
                    interp='quintic',
                    gridset=reference_file,
                    out_file=fname_presuffix(to_warp_file,
                                             suffix='_warped',
                                             newpath=write_dir),
                    verbose=True,
                    save_warp=True,
                    environ=environ)

    # 3dWarp doesn't put the obliquity in the header, so do it manually
    warped_oblique_file = fix_obliquity(out_warp.outputs.out_file,
                                        reference_file,
                                        verbose=verbose,
                                        caching=caching,
                                        caching_dir=write_dir,
                                        environ=environ)

    return warped_oblique_file, out_warp.outputs.warp_file
Esempio n. 5
0
def run(options):
    # fix!
    out_dir = os.path.join('option', '1')
    err_dir = os.path.join('option', '2')
    data_dir = os.path.join('option', '3')
    work_dir = os.path.join('something', 'else')

    # Workflow
    merica_wf = pe.Workflow('merica_wf')
    merica_wf.base_dir = work_dir

    inputspec = pe.Node(util.IdentityInterface(fields=options.keys()),
                        name='inputspec')

    # Node: subject_iterable
    run_iterable = pe.Node(util.IdentityInterface(fields=['run'],
                                                  mandatory_inputs=True),
                           name='run_iterable')
    run_iterable.iterables = ('run', runs)

    info = dict(mri_files=[['run']])

    # Create a datasource node to get the mri files
    datasource = pe.Node(nio.DataGrabber(infields=['run'],
                                         outfields=info.keys()),
                         name='datasource')
    datasource.inputs.template = '*'
    datasource.inputs.base_directory = abspath(data_dir)
    datasource.inputs.field_template = dict(mri_files='%s/func/*.nii.gz')
    datasource.inputs.template_args = info
    datasource.inputs.sort_filelist = True
    datasource.inputs.ignore_exception = False
    datasource.inputs.raise_on_empty = True
    meica_wf.connect(run_iterable, 'run', datasource, 'run')

    # Create a Function node to rename output files
    getsubs = pe.Node(util.Function(input_names=['run', 'mri_files'],
                                    output_names=['subs'],
                                    function=get_subs),
                      name='getsubs')
    getsubs.inputs.ignore_exception = False
    meica_wf.connect(run_iterable, 'run', getsubs, 'run')
    meica_wf.connect(datasource, 'mri_files', getsubs, 'mri_files')



    get_cm = pe.Node(util.Function(input_names=['fname'],
                                   output_names=['x', 'y', 'z'],
                                   function=find_CM),
                     name='get_cm')
    get_obliquity = pe.Node(util.Function(input_names=['fname'],
                                          output_names=['angmerit'],
                                          function=check_obliquity),
                            name='get_cm')
    if get_obliquity.is_oblique == True:
        deoblique = pe.Node(afni.Warp(deoblique=True)
                            name='deoblique')
        merica_wf.connect(upstream, 't1', deoblique, 'in_file')
        warpspeed = pe.Node(afni.Warp(args='-card2oblique -newgrid 1.0'))
    if skull-stripped == False:
        unifeyes = pe.Node(afni.Unifize()
                            name='unifeyes')
        if get_obliquity.is_oblique == True:
            merica_wf.connect(deoblique, 'out_file', unifeyes, 'in_file')
        else:
            merica_wf.connect(upstream, 't1', unifeyes, 'in_file')
        skullstrip = pe.Node(afni.SkullStrip(args='-shrink_fac_bot_lim 0.3 -orig_vol')
                                name='skullstrip')
        autobots = pe.Node(afni.Autobox()
                            name='autobots')
        merica_wf.connect(skullstrip, 'out_file', autobots, 'in_file')

    # Moving on to functional preprocessing, be back later!
    if despike == True:
        despike = pe.Node(afni.Despike()
                            name='despike')
        if skull-stripped == False:
            merica_wf.connect(autobots, 'out_file', despike, 'in_file')
        else:
            merica_wf.connect(upstream, 't1', despike, 'in_file')


    meica_wf.connect(run_iterable, 'run', get_cm, 'fname')
    meica_wf.connect(run_iterable, 'run', get_cm, 'fname')
Esempio n. 6
0
def coregister_fmri_session(session_data,
                            t_r,
                            write_dir,
                            brain_volume,
                            use_rats_tool=True,
                            slice_timing=True,
                            prior_rigid_body_registration=False,
                            caching=False,
                            voxel_size_x=.1,
                            voxel_size_y=.1,
                            verbose=True,
                            **environ_kwargs):
    """
    Coregistration of the subject's functional and anatomical images.
    The functional volume is aligned to the anatomical, first with a rigid body
    registration and then on a per-slice basis (only a fine correction, this is
    mostly for correction of EPI distortion).


    Parameters
    ----------
    session_data : sammba.registration.SessionData
        Single animal data, giving paths to its functional and anatomical
        image, as well as it identifier.

    t_r : float
        Repetition time for the EPI, in seconds.

    write_dir : str
        Directory to save the output and temporary images.

    brain_volume : int
        Volume of the brain in mm3 used for brain extraction.
        Typically 400 for mouse and 1800 for rat.

    use_rats_tool : bool, optional
        If True, brain mask is computed using RATS Mathematical Morphology.
        Otherwise, a histogram-based brain segmentation is used.

    prior_rigid_body_registration : bool, optional
        If True, a rigid-body registration of the anat to the func is performed
        prior to the warp. Useful if the images headers have missing/wrong
        information.

    voxel_size_x : float, optional
        Resampling resolution for the x-axis, in mm.

    voxel_size_y : float, optional
        Resampling resolution for the y-axis, in mm.

    caching : bool, optional
        Wether or not to use caching.

    verbose : bool, optional
        If True, all steps are verbose. Note that caching implies some
        verbosity in any case.

    environ_kwargs : extra arguments keywords
        Extra arguments keywords, passed to interfaces environ variable.

    Returns
    -------
    the same sequence with each animal_data updated: the following attributes
    are added
        - `output_dir_` : str
                          Path to the output directory.
        - `coreg_func_` : str
                          Path to paths to the coregistered functional image.
        - `coreg_anat_` : str
                          Path to paths to the coregistered functional image.
        - `coreg_transform_` : str
                               Path to the transform from anat to func.

    Notes
    -----
    If `use_rats_tool` is turned on, RATS tool is used for brain extraction
    and has to be cited. For more information, see
    `RATS <http://www.iibi.uiowa.edu/content/rats-overview/>`_
    """
    func_filename = session_data.func
    anat_filename = session_data.anat

    environ = {'AFNI_DECONFLICT': 'OVERWRITE'}
    for (key, value) in environ_kwargs.items():
        environ[key] = value

    if verbose:
        terminal_output = 'allatonce'
    else:
        terminal_output = 'none'

    if use_rats_tool:
        if segmentation.interfaces.Info().version() is None:
            raise ValueError('Can not locate RATS')
        else:
            ComputeMask = segmentation.MathMorphoMask
    else:
        ComputeMask = segmentation.HistogramMask

    if ants.base.Info().version is None:
        raise ValueError('Can not locate ANTS')

    if caching:
        memory = Memory(write_dir)
        tshift = memory.cache(afni.TShift)
        clip_level = memory.cache(afni.ClipLevel)
        volreg = memory.cache(afni.Volreg)
        allineate = memory.cache(afni.Allineate)
        tstat = memory.cache(afni.TStat)
        compute_mask = memory.cache(ComputeMask)
        calc = memory.cache(afni.Calc)
        allineate = memory.cache(afni.Allineate)
        allineate2 = memory.cache(afni.Allineate)
        unifize = memory.cache(afni.Unifize)
        bias_correct = memory.cache(ants.N4BiasFieldCorrection)
        catmatvec = memory.cache(afni.CatMatvec)
        warp = memory.cache(afni.Warp)
        resample = memory.cache(afni.Resample)
        slicer = memory.cache(afni.ZCutUp)
        warp_apply = memory.cache(afni.NwarpApply)
        qwarp = memory.cache(afni.Qwarp)
        merge = memory.cache(afni.Zcat)
        copy_geom = memory.cache(fsl.CopyGeom)
        overwrite = False
        for step in [
                tshift, volreg, allineate, allineate2, tstat, compute_mask,
                calc, unifize, resample, slicer, warp_apply, qwarp, merge
        ]:
            step.interface().set_default_terminal_output(terminal_output)
    else:
        tshift = afni.TShift(terminal_output=terminal_output).run
        clip_level = afni.ClipLevel().run
        volreg = afni.Volreg(terminal_output=terminal_output).run
        allineate = afni.Allineate(terminal_output=terminal_output).run
        allineate2 = afni.Allineate(terminal_output=terminal_output
                                    ).run  # TODO: remove after fixed bug
        tstat = afni.TStat(terminal_output=terminal_output).run
        compute_mask = ComputeMask().run
        calc = afni.Calc(terminal_output=terminal_output).run
        unifize = afni.Unifize(terminal_output=terminal_output).run
        bias_correct = ants.N4BiasFieldCorrection(
            terminal_output=terminal_output).run
        catmatvec = afni.CatMatvec().run
        warp = afni.Warp().run
        resample = afni.Resample(terminal_output=terminal_output).run
        slicer = afni.ZCutUp(terminal_output=terminal_output).run
        warp_apply = afni.NwarpApply(terminal_output=terminal_output).run
        qwarp = afni.Qwarp(terminal_output=terminal_output).run
        merge = afni.Zcat(terminal_output=terminal_output).run
        copy_geom = fsl.CopyGeom(terminal_output=terminal_output).run
        overwrite = True

    session_data._check_inputs()
    output_dir = os.path.join(os.path.abspath(write_dir),
                              session_data.animal_id)
    session_data._set_output_dir_(output_dir)
    current_dir = os.getcwd()
    os.chdir(output_dir)
    output_files = []

    #######################################
    # Correct functional for slice timing #
    #######################################
    if slice_timing:
        out_tshift = tshift(in_file=func_filename,
                            outputtype='NIFTI_GZ',
                            tpattern='altplus',
                            tr=str(t_r),
                            environ=environ)
        func_filename = out_tshift.outputs.out_file
        output_files.append(func_filename)

    ################################################
    # Register functional volumes to the first one #
    ################################################
    # XXX why do you need a thresholded image ?
    out_clip_level = clip_level(in_file=func_filename)
    out_calc_threshold = calc(in_file_a=func_filename,
                              expr='ispositive(a-{0}) * a'.format(
                                  out_clip_level.outputs.clip_val),
                              outputtype='NIFTI_GZ')
    thresholded_filename = out_calc_threshold.outputs.out_file

    out_volreg = volreg(  # XXX dfile not saved
        in_file=thresholded_filename,
        outputtype='NIFTI_GZ',
        environ=environ,
        oned_file=fname_presuffix(thresholded_filename,
                                  suffix='Vr.1Dfile.1D',
                                  use_ext=False),
        oned_matrix_save=fname_presuffix(thresholded_filename,
                                         suffix='Vr.aff12.1D',
                                         use_ext=False))

    # Apply the registration to the whole head
    out_allineate = allineate(in_file=func_filename,
                              master=func_filename,
                              in_matrix=out_volreg.outputs.oned_matrix_save,
                              out_file=fname_presuffix(func_filename,
                                                       suffix='Av'),
                              environ=environ)

    # 3dAllineate removes the obliquity. This is not a good way to readd it as
    # removes motion correction info in the header if it were an AFNI file...as
    # it happens it's NIfTI which does not store that so irrelevant!
    out_copy_geom = copy_geom(dest_file=out_allineate.outputs.out_file,
                              in_file=out_volreg.outputs.out_file)

    allineated_filename = out_copy_geom.outputs.out_file

    # Create a (hopefully) nice mean image for use in the registration
    out_tstat = tstat(in_file=allineated_filename,
                      args='-mean',
                      outputtype='NIFTI_GZ',
                      environ=environ)

    # Update outputs
    output_files.extend([
        thresholded_filename, out_volreg.outputs.oned_matrix_save,
        out_volreg.outputs.out_file, out_volreg.outputs.md1d_file,
        allineated_filename, out_tstat.outputs.out_file
    ])

    ###########################################
    # Corret anat and func for intensity bias #
    ###########################################
    # Correct the functional average for intensities bias
    out_bias_correct = bias_correct(input_image=out_tstat.outputs.out_file)
    unbiased_func_filename = out_bias_correct.outputs.output_image

    # Bias correct the antomical image
    out_unifize = unifize(in_file=anat_filename,
                          outputtype='NIFTI_GZ',
                          environ=environ)
    unbiased_anat_filename = out_unifize.outputs.out_file

    # Update outputs
    output_files.extend([unbiased_func_filename, unbiased_anat_filename])

    #############################################
    # Rigid-body registration anat -> mean func #
    #############################################
    if prior_rigid_body_registration:
        # Mask the mean functional volume outside the brain.
        out_clip_level = clip_level(in_file=unbiased_func_filename)
        out_compute_mask_func = compute_mask(
            in_file=unbiased_func_filename,
            volume_threshold=brain_volume,
            intensity_threshold=int(out_clip_level.outputs.clip_val))
        out_cacl_func = calc(in_file_a=unbiased_func_filename,
                             in_file_b=out_compute_mask_func.outputs.out_file,
                             expr='a*b',
                             outputtype='NIFTI_GZ',
                             environ=environ)

        # Mask the anatomical volume outside the brain.
        out_clip_level = clip_level(in_file=unbiased_anat_filename)
        out_compute_mask_anat = compute_mask(
            in_file=unbiased_anat_filename,
            volume_threshold=brain_volume,
            intensity_threshold=int(out_clip_level.outputs.clip_val))
        out_cacl_anat = calc(in_file_a=unbiased_anat_filename,
                             in_file_b=out_compute_mask_anat.outputs.out_file,
                             expr='a*b',
                             outputtype='NIFTI_GZ',
                             environ=environ)

        # Compute the transformation from functional to anatomical brain
        # XXX: why in this sense
        out_allineate = allineate2(
            in_file=out_cacl_func.outputs.out_file,
            reference=out_cacl_anat.outputs.out_file,
            out_matrix=fname_presuffix(out_cacl_func.outputs.out_file,
                                       suffix='_shr.aff12.1D',
                                       use_ext=False),
            center_of_mass='',
            warp_type='shift_rotate',
            out_file=fname_presuffix(out_cacl_func.outputs.out_file,
                                     suffix='_shr'),
            environ=environ)
        rigid_transform_file = out_allineate.outputs.out_matrix
        output_files.extend([
            out_compute_mask_func.outputs.out_file,
            out_cacl_func.outputs.out_file,
            out_compute_mask_anat.outputs.out_file,
            out_cacl_anat.outputs.out_file, rigid_transform_file,
            out_allineate.outputs.out_file
        ])

        # apply the inverse transform to register the anatomical to the func
        catmatvec_out_file = fname_presuffix(rigid_transform_file,
                                             suffix='INV')
        out_catmatvec = catmatvec(in_file=[(rigid_transform_file, 'I')],
                                  oneline=True,
                                  out_file=catmatvec_out_file)
        output_files.append(out_catmatvec.outputs.out_file)
        out_allineate = allineate(in_file=unbiased_anat_filename,
                                  master=unbiased_func_filename,
                                  in_matrix=out_catmatvec.outputs.out_file,
                                  out_file=fname_presuffix(
                                      unbiased_anat_filename,
                                      suffix='_shr_in_func_space'),
                                  environ=environ)
        allineated_anat_filename = out_allineate.outputs.out_file
        output_files.append(allineated_anat_filename)
    else:
        allineated_anat_filename = unbiased_anat_filename

    ############################################
    # Nonlinear registration anat -> mean func #
    ############################################
    # 3dWarp doesn't put the obliquity in the header, so do it manually
    # This step generates one file per slice and per time point, so we are
    # making sure they are removed at the end
    out_warp = warp(in_file=allineated_anat_filename,
                    oblique_parent=unbiased_func_filename,
                    interp='quintic',
                    gridset=unbiased_func_filename,
                    outputtype='NIFTI_GZ',
                    verbose=True,
                    environ=environ)
    registered_anat_filename = out_warp.outputs.out_file
    registered_anat_oblique_filename = fix_obliquity(registered_anat_filename,
                                                     unbiased_func_filename,
                                                     verbose=verbose)

    # Concatenate all the anat to func tranforms
    mat_filename = fname_presuffix(registered_anat_filename,
                                   suffix='_warp.mat',
                                   use_ext=False)
    # XXX Handle this correctly according to caching
    if not os.path.isfile(mat_filename):
        np.savetxt(mat_filename, [out_warp.runtime.stdout], fmt='%s')
        output_files.append(mat_filename)

    transform_filename = fname_presuffix(registered_anat_filename,
                                         suffix='_anat_to_func.aff12.1D',
                                         use_ext=False)
    if prior_rigid_body_registration:
        _ = catmatvec(in_file=[(mat_filename, 'ONELINE'),
                               (rigid_transform_file, 'ONELINE')],
                      oneline=True,
                      out_file=transform_filename)
    else:
        _ = catmatvec(in_file=[(mat_filename, 'ONELINE')],
                      oneline=True,
                      out_file=transform_filename)

    ##################################################
    # Per-slice non-linear registration func -> anat #
    ##################################################
    # Slice anatomical image
    anat_img = nibabel.load(registered_anat_oblique_filename)
    anat_n_slices = anat_img.header.get_data_shape()[2]
    sliced_registered_anat_filenames = []
    for slice_n in range(anat_n_slices):
        out_slicer = slicer(in_file=registered_anat_oblique_filename,
                            keep='{0} {0}'.format(slice_n),
                            out_file=fname_presuffix(
                                registered_anat_oblique_filename,
                                suffix='Sl%d' % slice_n),
                            environ=environ)
        oblique_slice = fix_obliquity(out_slicer.outputs.out_file,
                                      registered_anat_oblique_filename,
                                      verbose=verbose)
        sliced_registered_anat_filenames.append(oblique_slice)

    # Slice mean functional
    sliced_bias_corrected_filenames = []
    img = nibabel.load(func_filename)
    n_slices = img.header.get_data_shape()[2]
    for slice_n in range(n_slices):
        out_slicer = slicer(in_file=unbiased_func_filename,
                            keep='{0} {0}'.format(slice_n),
                            out_file=fname_presuffix(unbiased_func_filename,
                                                     suffix='Sl%d' % slice_n),
                            environ=environ)
        oblique_slice = fix_obliquity(out_slicer.outputs.out_file,
                                      unbiased_func_filename,
                                      verbose=verbose)
        sliced_bias_corrected_filenames.append(oblique_slice)

    # Below line is to deal with slices where there is no signal (for example
    # rostral end of some anatomicals)

    # The inverse warp frequently fails, Resampling can help it work better
    # XXX why specifically .1 in voxel_size ?
    voxel_size_z = anat_img.header.get_zooms()[2]
    resampled_registered_anat_filenames = []
    for sliced_registered_anat_filename in sliced_registered_anat_filenames:
        out_resample = resample(in_file=sliced_registered_anat_filename,
                                voxel_size=(voxel_size_x, voxel_size_y,
                                            voxel_size_z),
                                outputtype='NIFTI_GZ',
                                environ=environ)
        resampled_registered_anat_filenames.append(
            out_resample.outputs.out_file)

    resampled_bias_corrected_filenames = []
    for sliced_bias_corrected_filename in sliced_bias_corrected_filenames:
        out_resample = resample(in_file=sliced_bias_corrected_filename,
                                voxel_size=(voxel_size_x, voxel_size_y,
                                            voxel_size_z),
                                outputtype='NIFTI_GZ',
                                environ=environ)
        resampled_bias_corrected_filenames.append(
            out_resample.outputs.out_file)

    # single slice non-linear functional to anatomical registration
    warped_slices = []
    warp_filenames = []
    for (resampled_bias_corrected_filename,
         resampled_registered_anat_filename) in zip(
             resampled_bias_corrected_filenames,
             resampled_registered_anat_filenames):
        warped_slice = fname_presuffix(resampled_bias_corrected_filename,
                                       suffix='_qw')
        out_qwarp = qwarp(
            in_file=resampled_bias_corrected_filename,
            base_file=resampled_registered_anat_filename,
            iwarp=True,  # XXX: is this necessary
            noneg=True,
            blur=[0],
            nmi=True,
            noXdis=True,
            allineate=True,
            allineate_opts='-parfix 1 0 -parfix 2 0 -parfix 3 0 '
            '-parfix 4 0 -parfix 5 0 -parfix 6 0 '
            '-parfix 7 0 -parfix 9 0 '
            '-parfix 10 0 -parfix 12 0',
            out_file=warped_slice,
            environ=environ)
        warped_slices.append(out_qwarp.outputs.warped_source)
        warp_filenames.append(out_qwarp.outputs.source_warp)
        output_files.append(out_qwarp.outputs.base_warp)
        # There are files geenrated by the allineate option
        output_files.extend([
            fname_presuffix(out_qwarp.outputs.warped_source, suffix='_Allin'),
            fname_presuffix(out_qwarp.outputs.warped_source,
                            suffix='_Allin.nii',
                            use_ext=False),
            fname_presuffix(out_qwarp.outputs.warped_source,
                            suffix='_Allin.aff12.1D',
                            use_ext=False)
        ])

    # Resample the mean volume back to the initial resolution,
    voxel_size = nibabel.load(unbiased_func_filename).header.get_zooms()
    resampled_warped_slices = []
    for warped_slice in warped_slices:
        out_resample = resample(in_file=warped_slice,
                                voxel_size=voxel_size,
                                outputtype='NIFTI_GZ',
                                environ=environ)
        resampled_warped_slices.append(out_resample.outputs.out_file)

    # fix the obliquity
    resampled_warped_slices_oblique = []
    for (sliced_registered_anat_filename,
         resampled_warped_slice) in zip(sliced_registered_anat_filenames,
                                        resampled_warped_slices):
        oblique_slice = fix_obliquity(resampled_warped_slice,
                                      sliced_registered_anat_filename,
                                      verbose=verbose)
        resampled_warped_slices_oblique.append(oblique_slice)

    # slice functional
    sliced_func_filenames = []
    for slice_n in range(n_slices):
        out_slicer = slicer(in_file=allineated_filename,
                            keep='{0} {0}'.format(slice_n),
                            out_file=fname_presuffix(allineated_filename,
                                                     suffix='Sl%d' % slice_n),
                            environ=environ)
        oblique_slice = fix_obliquity(out_slicer.outputs.out_file,
                                      allineated_filename,
                                      verbose=verbose)
        sliced_func_filenames.append(oblique_slice)

    # Apply the precomputed warp slice by slice
    warped_func_slices = []
    for (sliced_func_filename, warp_filename) in zip(sliced_func_filenames,
                                                     warp_filenames):
        out_warp_apply = warp_apply(in_file=sliced_func_filename,
                                    master=sliced_func_filename,
                                    warp=warp_filename,
                                    out_file=fname_presuffix(
                                        sliced_func_filename, suffix='_qw'),
                                    environ=environ)
        warped_func_slices.append(out_warp_apply.outputs.out_file)

    # Finally, merge all slices !
    out_merge_func = merge(in_files=warped_func_slices,
                           outputtype='NIFTI_GZ',
                           environ=environ)

    # Fix the obliquity
    merged_oblique = fix_obliquity(out_merge_func.outputs.out_file,
                                   allineated_filename,
                                   verbose=verbose)

    # Update the fmri data
    setattr(session_data, "coreg_func_", merged_oblique)
    setattr(session_data, "coreg_anat_", registered_anat_oblique_filename)
    setattr(session_data, "coreg_transform_", transform_filename)
    os.chdir(current_dir)

    # Collect the outputs
    output_files.extend(sliced_registered_anat_filenames +
                        sliced_bias_corrected_filenames +
                        resampled_registered_anat_filenames +
                        resampled_bias_corrected_filenames + warped_slices +
                        warp_filenames + resampled_warped_slices_oblique +
                        sliced_func_filenames + warped_func_slices)
    if not caching:
        for out_file in output_files:
            if os.path.isfile(out_file):
                os.remove(out_file)
	os.chdir(results_path+subj)
	print "Currently processing subject: ", subj

	anat = data_path + subj + '/anat/t1_mpr_sag_p2_iso.nii.gz'

	# Initialize workflow
	workflow = pe.Workflow(name='anat')
	workflow.base_dir = '.'

	ref_brain = '/usr/share/fsl/5.0/data/standard/MNI152_T1_2mm_brain.nii.gz'
	ref_mask = '/usr/share/fsl/5.0/data/standard/MNI152_T1_2mm_brain_mask.nii.gz'
	reference_skull = '/usr/share/fsl/5.0/data/standard/MNI152_T1_2mm.nii.gz'
	fnirt_config = '/usr/share/fsl/5.0/etc/flirtsch/T1_2_MNI152_2mm.cnf'

	# Reorient to FSL standard orientation
	deoblique = pe.Node(interface=afni.Warp(in_file=anat, deoblique=True, outputtype='NIFTI_GZ'), name='deoblique')
	reorient = pe.Node(interface=fsl.Reorient2Std(output_type='NIFTI_GZ'), name='reorient')
	workflow.connect(deoblique, 'out_file', reorient, 'in_file')

	# AFNI skullstrip
	skullstrip = pe.Node(interface=afni.SkullStrip(args='-push_to_edge -ld 30', outputtype='NIFTI_GZ'), name='skullstrip')
	workflow.connect(reorient, 'out_file', skullstrip, 'in_file')

	# Segment with FSL FAST
	#tissue priors
	#tissue_path = '/usr/share/fsl/5.0/data/standard/tissuepriors/2mm/'
	#csf_prior = tissue_path + 'avg152T1_csf_bin.nii.gz'
	#white_prior = tissue_path + 'avg152T1_white_bin.nii.gz'
	#gray_prior = tissue_path + 'avg152T1_gray_bin.nii.gz'

	#segmentation = pe.Node(interface=fsl.FAST(number_classes=3, use_priors=True, img_type=1), name='segmentation')
            inputnode.inputs.source_file = func1
        else:
            inputnode.inputs.source_file = func2

        # Motion correction + slice timing correction
        realign4d = pe.Node(interface=SpaceTimeRealigner(), name='realign4d')
        realign4d.inputs.ignore_exception = True
        realign4d.inputs.slice_times = 'asc_alt_siemens'
        realign4d.inputs.slice_info = 2
        realign4d.inputs.tr = 2.00

        workflow1.connect(inputnode, 'source_file', realign4d, 'in_file')
        workflow1.connect(realign4d, 'par_file', outputnode, 'move_par')

        # Reorient
        deoblique = pe.Node(interface=afni.Warp(deoblique=True,
                                                outputtype='NIFTI_GZ'),
                            name='deoblique')
        workflow1.connect(realign4d, 'out_file', deoblique, 'in_file')
        reorient = pe.Node(interface=fsl.Reorient2Std(output_type='NIFTI_GZ'),
                           name='reorient')
        workflow1.connect(deoblique, 'out_file', reorient, 'in_file')
        workflow1.connect(reorient, 'out_file', outputnode, 'out_file')

        # Run workflow1
        workflow1.write_graph()
        workflow1.run()

        # Initialize DRIFTER
        if data == 'func1':
            #check is physiological signals exist, if not, then None
            infile = results_path + '/' + data + '_1/reorient/corr_epi_warp_reoriented.nii.gz'
def prepro_anat(k):
    try:

        subj = k

        for s in (['session2']):

            if (not os.path.isdir(data_path + subj + '/' + s)):
                continue

            if (os.path.isfile(
                    results_path + subj + '/' + s +
                    '/anat/nonlinear_reg/anat_HR_reoriented_warped.nii.gz')):
                print "Skipping " + subj + '/' + s
                continue
            '''
		if (os.path.isfile(pilot_path +subj +'/anat/nonlinear_reg/anat_reoriented_skullstrip_warped.nii.gz')):
			print "Skipping "+ subj +'/' + s 
			continue
		'''
            try:
                os.stat(results_path)
            except:
                os.mkdir(results_path)
            try:
                os.stat(results_path + subj)
            except:
                os.mkdir(results_path + subj)

            try:
                os.stat(results_path + subj + '/' + s)
            except:
                os.mkdir(results_path + subj + '/' + s)
            os.chdir(results_path + subj + '/' + s)
            print "Currently processing subject: ", subj + '/' + s

            anat = data_path + subj + '/' + s + '/anat_HR.nii.gz'

            # Initialize workflow
            workflow = pe.Workflow(name='anat')
            workflow.base_dir = '.'

            # Reorient to FSL standard orientation
            deoblique = pe.Node(
                interface=afni.Warp(in_file=anat,
                                    deoblique=True,
                                    outputtype='NIFTI_GZ'),
                name='deoblique')  #leave out if you don't need this
            reorient = pe.Node(
                interface=fsl.Reorient2Std(output_type='NIFTI_GZ'),
                name='reorient')
            workflow.connect(deoblique, 'out_file', reorient, 'in_file')

            # AFNI skullstrip
            skullstrip = pe.Node(
                interface=afni.SkullStrip(outputtype='NIFTI_GZ'),
                name='skullstrip')
            workflow.connect(reorient, 'out_file', skullstrip, 'in_file')

            # Segment with FSL FAST
            segmentation = pe.Node(interface=fsl.FAST(number_classes=3,
                                                      use_priors=True,
                                                      img_type=1),
                                   name='segmentation')
            segmentation.segments = True
            segmentation.probability_maps = True
            workflow.connect(skullstrip, 'out_file', segmentation, 'in_files')

            # Register to HR anatomical
            hranat = results_path + subj + '/session1/anat/reorient/anat_HR_brain_reoriented.nii.gz'
            #anat2hr = pe.Node(interface=fsl.FLIRT(no_search=True, reference=hranat), name='anat2hr')
            anat2hr = pe.Node(interface=fsl.FLIRT(dof=6, reference=hranat),
                              name='anat2hr')
            workflow.connect(reorient, 'out_file', anat2hr, 'in_file')

            # Register to standard MNI template
            #1. linear
            linear_reg = pe.Node(interface=fsl.FLIRT(cost='corratio',
                                                     reference=ref_brain),
                                 name='linear_reg')

            #2.nonlinear
            nonlinear_reg = pe.Node(interface=fsl.FNIRT(fieldcoeff_file=True,
                                                        jacobian_file=True,
                                                        ref_file=ref_brain,
                                                        refmask_file=ref_mask),
                                    name='nonlinear_reg')

            inv_flirt_xfm = pe.Node(
                interface=fsl.utils.ConvertXFM(invert_xfm=True),
                name='inv_linear_xfm')

            workflow.connect(skullstrip, 'out_file', linear_reg, 'in_file')
            #workflow.connect(anat2hr, 'out_matrix_file', linear_reg, 'in_matrix_file')
            workflow.connect(linear_reg, 'out_matrix_file', nonlinear_reg,
                             'affine_file')
            workflow.connect(skullstrip, 'out_file', nonlinear_reg, 'in_file')
            workflow.connect(linear_reg, 'out_matrix_file', inv_flirt_xfm,
                             'in_file')

            # Run workflow
            workflow.write_graph()
            workflow.run()

            print "ANATOMICAL PREPROCESSING DONE! Results in ", results_path + subj + '/' + s

    except:
        print "Error with patient: ", subj
        traceback.print_exc()