Example #1
0
def merge_tsv(
    bids_directory: str,
    output_tsv: str,
    caps_directory: Optional[str] = None,
    pipelines: Optional[List[str]] = None,
    volume_atlas_selection: Optional[List[str]] = None,
    freesurfer_atlas_selection: Optional[List[str]] = None,
    pvc_restriction: Optional[int] = None,
    pet_tracers_selection: Optional[List[str]] = None,
    group_selection: Optional[List[str]] = None,
    subjects_sessions_tsv: Optional[str] = None,
    ignore_scan_files: bool = False,
    ignore_session_scan_files: bool = False,
) -> None:
    """Merge clinical data into a single TSV file."""

    from clinica.iotools.utils.data_handling import create_merge_file
    from clinica.utils.inputs import check_bids_folder

    check_bids_folder(bids_directory)

    create_merge_file(
        bids_directory,
        output_tsv,
        caps_dir=caps_directory,
        pipelines=pipelines,
        ignore_scan_files=ignore_scan_files,
        ignore_sessions_files=ignore_session_scan_files,
        volume_atlas_selection=volume_atlas_selection,
        freesurfer_atlas_selection=freesurfer_atlas_selection,
        pvc_restriction=pvc_restriction,
        tsv_file=subjects_sessions_tsv,
        group_selection=group_selection,
        tracers_selection=pet_tracers_selection,
    )
Example #2
0
    def run_command(self, args):
        from clinica.iotools.utils import data_handling as dt
        from clinica.utils.inputs import check_bids_folder

        check_bids_folder(args.bids_directory)
        dt.compute_missing_mods(args.bids_directory, args.out_directory,
                                args.output_prefix)
Example #3
0
def check_missing_modalities(
    bids_directory: str,
    output_directory: str,
    output_prefix: str = "missing_mods",
) -> None:
    """Check missing modalities in a BIDS dataset."""
    from clinica.iotools.utils.data_handling import compute_missing_mods
    from clinica.utils.inputs import check_bids_folder

    check_bids_folder(bids_directory)
    compute_missing_mods(bids_directory, output_directory, output_prefix)
Example #4
0
    def run_command(self, args):
        import os

        from clinica.iotools.utils import data_handling as dt
        from clinica.utils.inputs import check_bids_folder
        from clinica.utils.stream import cprint

        check_bids_folder(args.bids_directory)
        output_directory = os.path.dirname(os.path.abspath(args.out_tsv))
        os.makedirs(output_directory, exist_ok=True)
        dt.create_subs_sess_list(args.bids_directory, output_directory,
                                 os.path.basename(args.out_tsv))
        cprint("The TSV file was saved to %s" % os.path.abspath(args.out_tsv))
Example #5
0
    def run_command(self, args):
        from clinica.iotools.utils import data_handling as dt
        from clinica.utils.inputs import check_bids_folder

        check_bids_folder(args.bids_directory)
        dt.create_merge_file(args.bids_directory,
                             args.out_tsv,
                             caps_dir=args.caps_directory,
                             pipelines=args.pipelines,
                             atlas_selection=args.atlas_selection,
                             pvc_restriction=args.pvc_restriction,
                             tsv_file=args.subjects_sessions_tsv,
                             group_selection=args.group_selection)
Example #6
0
def create_subjects_visits(bids_directory: str, output_tsv: str) -> None:
    """Export participants with their sessions."""
    from os import makedirs
    from os.path import basename, dirname

    from clinica.iotools.utils.data_handling import create_subs_sess_list
    from clinica.utils.inputs import check_bids_folder
    from clinica.utils.stream import cprint

    check_bids_folder(bids_directory)
    output_directory = dirname(output_tsv)
    makedirs(output_directory, exist_ok=True)
    create_subs_sess_list(bids_directory, output_directory,
                          basename(output_tsv))
    cprint(f"The TSV file was saved to {output_tsv}.")
Example #7
0
    def run_command(self, args):
        import os
        import errno
        from clinica.iotools.utils import data_handling as dt
        from clinica.utils.stream import cprint
        from clinica.utils.inputs import check_bids_folder

        check_bids_folder(args.bids_directory)
        output_directory = os.path.dirname(os.path.abspath(args.out_tsv))
        if not os.path.exists(output_directory):
            try:
                os.makedirs(output_directory)
            except OSError as exc:  # Guard against race condition
                if exc.errno != errno.EEXIST:
                    raise
        dt.create_subs_sess_list(args.bids_directory, output_directory,
                                 os.path.basename(args.out_tsv))
        cprint("The TSV file was saved to %s" % os.path.abspath(args.out_tsv))
Example #8
0
    def run_command(self, args):
        from clinica.iotools.utils import data_handling as dt
        from clinica.utils.inputs import check_bids_folder

        check_bids_folder(args.bids_directory)
        dt.create_merge_file(
            args.bids_directory,
            args.out_tsv,
            caps_dir=args.caps_directory,
            pipelines=args.pipelines,
            ignore_scan_files=args.ignore_scan_files,
            ignore_sessions_files=args.ignore_session_scan_files,
            volume_atlas_selection=args.volume_atlas_selection,
            freesurfer_atlas_selection=args.freesurfer_atlas_selection,
            pvc_restriction=args.pvc_restriction,
            tsv_file=args.subjects_sessions_tsv,
            group_selection=args.group_selection,
            tracers_selection=args.pet_tracers_selection,
        )
Example #9
0
def center_all_nifti(bids_dir, output_dir, modality, center_all_files=False):
    """
    Center all the NIfTI images of the input BIDS folder into the empty output_dir specified in argument.
    All the files from bids_dir are copied into output_dir, then all the NIfTI images we can found are replaced by their
    centered version if their center if off the origin by more than 50 mm.

    Args:
        bids_dir: (str) path to bids directory
        output_dir: (str) path to EMPTY output directory
        modality: (list of str) modalities to convert
        center_all_files: (bool) center only files that may cause problem for SPM if false. If true, center all NIfTI

    Returns:
        List of the centered files
    """
    from colorama import Fore
    from clinica.utils.inputs import check_bids_folder
    from clinica.utils.exceptions import ClinicaBIDSError
    from os.path import join, basename
    from glob import glob
    from os import listdir
    from os.path import isdir, isfile
    from shutil import copy2, copytree

    # output and input must be different, so that we do not mess with user's data
    if bids_dir == output_dir:
        raise ClinicaBIDSError(
            Fore.RED +
            '[Error] Input BIDS and output directories must be different' +
            Fore.RESET)

    assert isinstance(modality, list), 'modality arg must be a list of str'

    # check that input is a BIDS dir
    check_bids_folder(bids_dir)

    for f in listdir(bids_dir):
        if isdir(join(bids_dir, f)) and not isdir(join(output_dir, f)):
            copytree(join(bids_dir, f), join(output_dir, f))
        elif isfile(join(bids_dir, f)) and not isfile(join(output_dir, f)):
            copy2(join(bids_dir, f), output_dir)

    pattern = join(output_dir, '**/*.nii*')
    nifti_files = glob(pattern, recursive=True)

    # Now filter this list by elements in modality list
    #   For each file:
    #       if any modality name (lowercase) is found in the basename of the file:
    #           keep the file
    nifti_files_filtered = [
        f for f in nifti_files
        if any(elem.lower() in basename(f).lower() for elem in modality)
    ]

    # Remove those who are centered
    if not center_all_files:
        nifti_files_filtered = [
            file for file in nifti_files_filtered if not is_centered(file)
        ]

    all_errors = []
    for f in nifti_files_filtered:
        print('Handling ' + f)
        _, current_error = center_nifti_origin(f, f)
        if current_error:
            all_errors.append(current_error)
    if len(all_errors) > 0:
        final_error_msg = Fore.RED + '[Error] Clinica encoutered ' + str(len(all_errors)) \
                          + ' error(s) while trying to center all NIfTI images.\n'
        for error in all_errors:
            final_error_msg += '\n' + error
        raise RuntimeError(final_error_msg)
    return nifti_files_filtered
Example #10
0
def preprocessing_t1w(bids_directory,
                      caps_directory,
                      tsv,
                      working_directory=None):
    """
    This preprocessing pipeline includes globally three steps:
    1) N4 bias correction (performed with ANTS).
    2) Linear registration to MNI (MNI icbm152 nlinear sym template)
       (performed with ANTS) - RegistrationSynQuick.
    3) Cropping the background (in order to save computational power).
    4) Histogram-based intensity normalization. This is a custom function
       performed by the binary ImageMath included with ANTS. 

    Parameters
    ----------
    bids_directory: str
       Folder with BIDS structure.
    caps_directory: str
       Folder where CAPS structure will be stored.
    working_directory: str
       Folder containing a temporary space to save intermediate results.
   """

    from clinica.utils.inputs import check_bids_folder
    from clinica.utils.participant import get_subject_session_list
    from clinica.utils.exceptions import ClinicaBIDSError, ClinicaException
    from clinica.utils.inputs import clinica_file_reader
    from clinica.utils.input_files import T1W_NII
    from clinicadl.tools.inputs.input import fetch_file
    from os.path import dirname, join, abspath, split, exists
    from os import pardir

    check_bids_folder(bids_directory)
    input_dir = bids_directory
    is_bids_dir = True
    base_dir = working_directory

    root = dirname(abspath(join(abspath(__file__), pardir)))
    path_to_mask = join(root, 'resources', 'masks')
    ref_template = join(path_to_mask, 'mni_icbm152_t1_tal_nlin_sym_09c.nii')
    ref_crop = join(path_to_mask, 'ref_cropped_template.nii.gz')
    url1 = "https://aramislab.paris.inria.fr/files/data/img_t1_linear/ref_cropped_template.nii.gz"
    url2 = "https://aramislab.paris.inria.fr/files/data/img_t1_linear/mni_icbm152_t1_tal_nlin_sym_09c.nii"
    if not (exists(ref_template)):
        try:
            fetch_file(url2, ref_template)
        except IOError as err:
            print(
                'Unable to download required template (mni_icbm152) for processing:',
                err)

    if not (exists(ref_crop)):
        try:
            fetch_file(url1, ref_crop)
        except IOError as err:
            print(
                'Unable to download required template (ref_crop) for processing:',
                err)

    sessions, subjects = get_subject_session_list(input_dir, tsv, is_bids_dir,
                                                  False, base_dir)

    from clinica.utils.exceptions import ClinicaBIDSError, ClinicaException
    from clinica.utils.inputs import clinica_file_reader
    from clinica.utils.input_files import T1W_NII
    import nipype.pipeline.engine as npe
    import nipype.interfaces.utility as nutil
    from nipype.interfaces import ants
    from clinica.utils.filemanip import get_subject_id

    # Inputs from anat/ folder
    # ========================
    # T1w file:
    try:
        t1w_files = clinica_file_reader(subjects, sessions, bids_directory,
                                        T1W_NII)
    except ClinicaException as e:
        err = 'Clinica faced error(s) while trying to read files in your CAPS directory.\n' + str(
            e)
        raise ClinicaBIDSError(err)

    def get_input_fields():
        """"Specify the list of possible inputs of this pipelines.
       Returns:
       A list of (string) input fields name.
       """
        return ['t1w']

    read_node = npe.Node(
        name="ReadingFiles",
        iterables=[
            ('t1w', t1w_files),
        ],
        synchronize=True,
        interface=nutil.IdentityInterface(fields=get_input_fields()))

    image_id_node = npe.Node(interface=nutil.Function(
        input_names=['bids_or_caps_file'],
        output_names=['image_id'],
        function=get_subject_id),
                             name='ImageID')

    ## The core (processing) nodes

    # 1. N4biascorrection by ANTS. It uses nipype interface.
    n4biascorrection = npe.Node(name='n4biascorrection',
                                interface=ants.N4BiasFieldCorrection(
                                    dimension=3,
                                    save_bias=True,
                                    bspline_fitting_distance=600))

    # 2. `RegistrationSynQuick` by *ANTS*. It uses nipype interface.
    ants_registration_node = npe.Node(name='antsRegistrationSynQuick',
                                      interface=ants.RegistrationSynQuick())
    ants_registration_node.inputs.fixed_image = ref_template
    ants_registration_node.inputs.transform_type = 'a'
    ants_registration_node.inputs.dimension = 3

    # 3. Crop image (using nifti). It uses custom interface, from utils file
    from .T1_linear_utils import crop_nifti

    cropnifti = npe.Node(name='cropnifti',
                         interface=nutil.Function(
                             function=crop_nifti,
                             input_names=['input_img', 'ref_crop'],
                             output_names=['output_img', 'crop_template']))
    cropnifti.inputs.ref_crop = ref_crop

    #### Deprecrecated ####
    #### This step was not used in the final version ####
    # 4. Histogram-based intensity normalization. This is a custom function
    #    performed by the binary `ImageMath` included with *ANTS*.

    #   from .T1_linear_utils import ants_histogram_intensity_normalization
    #
    #   ## histogram-based intensity normalization
    #   intensitynorm = npe.Node(
    #           name='intensitynormalization',
    #           interface=nutil.Function(
    #               input_names=['image_dimension', 'crop_template', 'input_img'],
    #               output_names=['output_img'],
    #               function=ants_histogram_intensity_normalization
    #               )
    #           )
    #   intensitynorm.inputs.image_dimension = 3

    ## DataSink and the output node

    from .T1_linear_utils import (container_from_filename, get_data_datasink)
    # Create node to write selected files into the CAPS
    from nipype.interfaces.io import DataSink

    get_ids = npe.Node(interface=nutil.Function(
        input_names=['image_id'],
        output_names=['image_id_out', 'subst_ls'],
        function=get_data_datasink),
                       name="GetIDs")

    # Find container path from t1w filename
    # =====================================
    container_path = npe.Node(nutil.Function(
        input_names=['bids_or_caps_filename'],
        output_names=['container'],
        function=container_from_filename),
                              name='ContainerPath')

    write_node = npe.Node(name="WriteCaps", interface=DataSink())
    write_node.inputs.base_directory = caps_directory
    write_node.inputs.parameterization = False

    ## Connectiong the workflow
    from clinica.utils.nipype import fix_join

    wf = npe.Workflow(name='t1_linear_dl', base_dir=working_directory)

    wf.connect([
        (read_node, image_id_node, [('t1w', 'bids_or_caps_file')]),
        (read_node, container_path, [('t1w', 'bids_or_caps_filename')]),
        (image_id_node, ants_registration_node, [('image_id', 'output_prefix')
                                                 ]),
        (read_node, n4biascorrection, [("t1w", "input_image")]),
        (n4biascorrection, ants_registration_node, [('output_image',
                                                     'moving_image')]),
        (ants_registration_node, cropnifti, [('warped_image', 'input_img')]),
        (ants_registration_node, write_node, [('out_matrix', 'affine_mat')]),
        # Connect to DataSink
        (container_path, write_node, [(('container', fix_join, 't1_linear'),
                                       'container')]),
        (image_id_node, get_ids, [('image_id', 'image_id')]),
        (get_ids, write_node, [('image_id_out', '@image_id')]),
        (get_ids, write_node, [('subst_ls', 'substitutions')]),
        #(get_ids, write_node, [('regexp_subst_ls', 'regexp_substitutions')]),
        (n4biascorrection, write_node, [('output_image', '@outfile_corr')]),
        (ants_registration_node, write_node, [('warped_image', '@outfile_reg')
                                              ]),
        (cropnifti, write_node, [('output_img', '@outfile_crop')]),
    ])

    return wf
Example #11
0
    def __init__(
        self,
        bids_directory=None,
        caps_directory=None,
        tsv_file=None,
        overwrite_caps=False,
        base_dir=None,
        parameters={},
        name=None,
    ):
        """Init a Pipeline object.

        Args:
            bids_directory (str, optional): Path to a BIDS directory. Defaults to None.
            caps_directory (str, optional): Path to a CAPS directory. Defaults to None.
            tsv_file (str, optional): Path to a subjects-sessions `.tsv` file. Defaults to None.
            overwrite_caps (bool, optional): Overwrite or not output directory.. Defaults to False.
            base_dir (str, optional): Working directory (attribute of Nipype::Workflow class). Defaults to None.
            parameters (dict, optional): Pipeline parameters. Defaults to {}.
            name (str, optional): Pipeline name. Defaults to None.

        Raises:
            RuntimeError: [description]
        """
        import inspect
        import os
        from tempfile import mkdtemp

        from colorama import Fore

        from clinica.utils.exceptions import ClinicaException
        from clinica.utils.inputs import check_bids_folder, check_caps_folder
        from clinica.utils.participant import get_subject_session_list

        self._is_built = False
        self._overwrite_caps = overwrite_caps
        self._bids_directory = bids_directory
        self._caps_directory = caps_directory
        self._verbosity = "debug"
        self._tsv_file = tsv_file
        self._info_file = os.path.join(
            os.path.dirname(os.path.abspath(inspect.getfile(self.__class__))),
            "info.json",
        )
        self._info = {}

        if base_dir is None:
            self.base_dir = mkdtemp()
            self._base_dir_was_specified = False
        else:
            self.base_dir = base_dir
            self._base_dir_was_specified = True

        if name:
            self._name = name
        else:
            self._name = self.__class__.__name__
        self._parameters = parameters

        if self._bids_directory is None:
            if self._caps_directory is None:
                raise RuntimeError(
                    f"{Fore.RED}[Error] The {self._name} pipeline does not contain "
                    f"BIDS nor CAPS directory at the initialization.{Fore.RESET}"
                )

            check_caps_folder(self._caps_directory)
            input_dir = self._caps_directory
            is_bids_dir = False
        else:
            check_bids_folder(self._bids_directory)
            input_dir = self._bids_directory
            is_bids_dir = True
        self._sessions, self._subjects = get_subject_session_list(
            input_dir, tsv_file, is_bids_dir, False, base_dir
        )

        self.init_nodes()
Example #12
0
    def __init__(self,
                 bids_directory=None,
                 caps_directory=None,
                 tsv_file=None,
                 overwrite_caps=False,
                 base_dir=None,
                 parameters={},
                 name=None):
        """Init a Pipeline object.

        Args:
            bids_directory (optional): Path to a BIDS directory.
            caps_directory (optional): Path to a CAPS directory.
            tsv_file (optional): Path to a subjects-sessions `.tsv` file.
            overwrite_caps (optional): Boolean which specifies overwritten of output directory.
            base_dir (optional): Working directory (attribute of Nipype::Workflow class).
            name (optional): Pipeline name.
        """
        import inspect
        import os
        from tempfile import mkdtemp
        from colorama import Fore
        from clinica.utils.inputs import check_caps_folder
        from clinica.utils.inputs import check_bids_folder
        from clinica.utils.exceptions import ClinicaException
        from clinica.utils.participant import get_subject_session_list

        self._is_built = False
        self._overwrite_caps = overwrite_caps
        self._bids_directory = bids_directory
        self._caps_directory = caps_directory
        self._verbosity = 'debug'
        self._tsv_file = tsv_file
        self._info_file = os.path.join(
            os.path.dirname(os.path.abspath(inspect.getfile(self.__class__))),
            'info.json')
        self._info = {}

        if base_dir is None:
            self.base_dir = mkdtemp()
            self._base_dir_was_specified = False
        else:
            self.base_dir = base_dir
            self._base_dir_was_specified = True

        if name:
            self._name = name
        else:
            self._name = self.__class__.__name__
        self._parameters = parameters

        if self._bids_directory is None:
            if self._caps_directory is None:
                raise RuntimeError(
                    '%s[Error] The %s pipeline does not contain BIDS nor CAPS directory at the initialization.%s'
                    % (Fore.RED, self._name, Fore.RESET))

            check_caps_folder(self._caps_directory)
            input_dir = self._caps_directory
            is_bids_dir = False
        else:
            check_bids_folder(self._bids_directory)
            input_dir = self._bids_directory
            is_bids_dir = True
        self._sessions, self._subjects = get_subject_session_list(
            input_dir, tsv_file, is_bids_dir, False, base_dir)

        self.init_nodes()
Example #13
0
def preprocessing_t1w(bids_directory,
                      caps_directory,
                      tsv,
                      working_directory=None):
    """
     This preprocessing pipeline includes globally three steps:
     1) N4 bias correction (performed with ANTS).
     2) Linear registration to MNI (MNI icbm152 nlinear sym template)
        (performed with ANTS) - RegistrationSynQuick.
     3) Cropping the background (in order to save computational power).
     4) Histogram-based intensity normalization. This is a custom function
        performed by the binary ImageMath included with ANTS.

     Parameters
     ----------
     bids_directory: str
        Folder with BIDS structure.
     caps_directory: str
        Folder where CAPS structure will be stored.
     working_directory: str
        Folder containing a temporary space to save intermediate results.
    """

    from os.path import dirname, join, abspath, split, exists
    from os import pardir, makedirs
    from pathlib import Path
    from clinica.utils.inputs import check_bids_folder
    from clinica.utils.participant import get_subject_session_list
    from clinica.utils.filemanip import get_subject_id
    from clinica.utils.exceptions import ClinicaBIDSError, ClinicaException
    from clinica.utils.inputs import clinica_file_reader
    from clinica.utils.input_files import T1W_NII
    from clinica.utils.check_dependency import check_ants
    from clinicadl.tools.inputs.input import fetch_file
    from clinicadl.tools.inputs.input import RemoteFileStructure
    import nipype.pipeline.engine as npe
    import nipype.interfaces.utility as nutil
    from nipype.interfaces import ants

    check_ants()
    check_bids_folder(bids_directory)
    input_dir = abspath(bids_directory)
    caps_directory = abspath(caps_directory)
    is_bids_dir = True
    base_dir = abspath(working_directory)

    home = str(Path.home())
    cache_clinicadl = join(home, '.cache', 'clinicadl', 'ressources', 'masks')
    url_aramis = 'https://aramislab.paris.inria.fr/files/data/img_t1_linear/'
    FILE1 = RemoteFileStructure(
        filename='ref_cropped_template.nii.gz',
        url=url_aramis,
        checksum=
        '67e1e7861805a8fd35f7fcf2bdf9d2a39d7bcb2fd5a201016c4d2acdd715f5b3')
    FILE2 = RemoteFileStructure(
        filename='mni_icbm152_t1_tal_nlin_sym_09c.nii',
        url=url_aramis,
        checksum=
        '93359ab97c1c027376397612a9b6c30e95406c15bf8695bd4a8efcb2064eaa34')

    if not (exists(cache_clinicadl)):
        makedirs(cache_clinicadl)

    ref_template = join(cache_clinicadl, FILE2.filename)
    ref_crop = join(cache_clinicadl, FILE1.filename)

    if not (exists(ref_template)):
        try:
            ref_template = fetch_file(FILE2, cache_clinicadl)
        except IOError as err:
            print(
                'Unable to download required template (mni_icbm152) for processing:',
                err)

    if not (exists(ref_crop)):
        try:
            ref_crop = fetch_file(FILE1, cache_clinicadl)
        except IOError as err:
            print(
                'Unable to download required template (ref_crop) for processing:',
                err)

    sessions, subjects = get_subject_session_list(input_dir, tsv, is_bids_dir,
                                                  False, base_dir)

    # Use hash instead of parameters for iterables folder names
    # Otherwise path will be too long and generate OSError
    from nipype import config
    cfg = dict(execution={'parameterize_dirs': False})
    config.update_config(cfg)

    # Inputs from anat/ folder
    # ========================
    # T1w file:
    try:
        t1w_files = clinica_file_reader(subjects, sessions, bids_directory,
                                        T1W_NII)
    except ClinicaException as e:
        err = 'Clinica faced error(s) while trying to read files in your CAPS directory.\n' + str(
            e)
        raise ClinicaBIDSError(err)

    def get_input_fields():
        """"Specify the list of possible inputs of this pipelines.
        Returns:
        A list of (string) input fields name.
        """
        return ['t1w']

    read_node = npe.Node(
        name="ReadingFiles",
        iterables=[
            ('t1w', t1w_files),
        ],
        synchronize=True,
        interface=nutil.IdentityInterface(fields=get_input_fields()))

    image_id_node = npe.Node(interface=nutil.Function(
        input_names=['bids_or_caps_file'],
        output_names=['image_id'],
        function=get_subject_id),
                             name='ImageID')

    # The core (processing) nodes

    # 1. N4biascorrection by ANTS. It uses nipype interface.
    n4biascorrection = npe.Node(name='n4biascorrection',
                                interface=ants.N4BiasFieldCorrection(
                                    dimension=3,
                                    save_bias=True,
                                    bspline_fitting_distance=600))

    # 2. `RegistrationSynQuick` by *ANTS*. It uses nipype interface.
    ants_registration_node = npe.Node(name='antsRegistrationSynQuick',
                                      interface=ants.RegistrationSynQuick())
    ants_registration_node.inputs.fixed_image = ref_template
    ants_registration_node.inputs.transform_type = 'a'
    ants_registration_node.inputs.dimension = 3

    # 3. Crop image (using nifti). It uses custom interface, from utils file
    from .T1_linear_utils import crop_nifti

    cropnifti = npe.Node(name='cropnifti',
                         interface=nutil.Function(
                             function=crop_nifti,
                             input_names=['input_img', 'ref_crop'],
                             output_names=['output_img', 'crop_template']))
    cropnifti.inputs.ref_crop = ref_crop

    # ********* Deprecrecated ********** #
    # ** This step was not used in the final version ** #
    # 4. Histogram-based intensity normalization. This is a custom function
    #    performed by the binary `ImageMath` included with *ANTS*.

    #   from .T1_linear_utils import ants_histogram_intensity_normalization
    #
    #   # histogram-based intensity normalization
    #   intensitynorm = npe.Node(
    #           name='intensitynormalization',
    #           interface=nutil.Function(
    #               input_names=['image_dimension', 'crop_template', 'input_img'],
    #               output_names=['output_img'],
    #               function=ants_histogram_intensity_normalization
    #               )
    #           )
    #   intensitynorm.inputs.image_dimension = 3

    # DataSink and the output node

    from .T1_linear_utils import (container_from_filename, get_data_datasink)
    # Create node to write selected files into the CAPS
    from nipype.interfaces.io import DataSink

    get_ids = npe.Node(interface=nutil.Function(
        input_names=['image_id'],
        output_names=['image_id_out', 'subst_ls'],
        function=get_data_datasink),
                       name="GetIDs")

    # Find container path from t1w filename
    # =====================================
    container_path = npe.Node(nutil.Function(
        input_names=['bids_or_caps_filename'],
        output_names=['container'],
        function=container_from_filename),
                              name='ContainerPath')

    write_node = npe.Node(name="WriteCaps", interface=DataSink())
    write_node.inputs.base_directory = caps_directory
    write_node.inputs.parameterization = False

    # Connectiong the workflow
    from clinica.utils.nipype import fix_join

    wf = npe.Workflow(name='t1_linear_dl', base_dir=working_directory)

    wf.connect([
        (read_node, image_id_node, [('t1w', 'bids_or_caps_file')]),
        (read_node, container_path, [('t1w', 'bids_or_caps_filename')]),
        (image_id_node, ants_registration_node, [('image_id', 'output_prefix')
                                                 ]),
        (read_node, n4biascorrection, [("t1w", "input_image")]),
        (n4biascorrection, ants_registration_node, [('output_image',
                                                     'moving_image')]),
        (ants_registration_node, cropnifti, [('warped_image', 'input_img')]),
        (ants_registration_node, write_node, [('out_matrix', '@affine_mat')]),
        # Connect to DataSink
        (container_path, write_node, [(('container', fix_join, 't1_linear'),
                                       'container')]),
        (image_id_node, get_ids, [('image_id', 'image_id')]),
        (get_ids, write_node, [('image_id_out', '@image_id')]),
        (get_ids, write_node, [('subst_ls', 'substitutions')]),
        # (get_ids, write_node, [('regexp_subst_ls', 'regexp_substitutions')]),
        (n4biascorrection, write_node, [('output_image', '@outfile_corr')]),
        (ants_registration_node, write_node, [('warped_image', '@outfile_reg')
                                              ]),
        (cropnifti, write_node, [('output_img', '@outfile_crop')]),
    ])

    return wf