Esempio n. 1
0
def resolve_resolution(resolution, template, template_name, tag=None):

    import nipype.interfaces.afni as afni
    import nipype.pipeline.engine as pe
    from CPAC.utils.datasource import check_for_s3

    tagname = None
    local_path = None

    if "{" in template and tag is not None:
        tagname = "${" + tag + "}"
    try:
        if tagname is not None:
            local_path = check_for_s3(
                template.replace(tagname, str(resolution)))
    except (IOError, OSError):
        local_path = None

    ## TODO debug - it works in ipython but doesn't work in nipype wf
    # try:
    #     local_path = check_for_s3('/usr/local/fsl/data/standard/MNI152_T1_3.438mmx3.438mmx3.4mm_brain_mask_dil.nii.gz')
    # except (IOError, OSError):
    #     local_path = None

    if local_path is None:
        if tagname is not None:
            ref_template = template.replace(tagname, '1mm')
            local_path = check_for_s3(ref_template)
        elif tagname is None and "s3" in template:
            local_path = check_for_s3(template)
        else:
            local_path = template

        if "x" in str(resolution):
            resolution = tuple(
                float(i.replace('mm', '')) for i in resolution.split("x"))
        else:
            resolution = (float(resolution.replace('mm', '')), ) * 3

        resample = pe.Node(interface=afni.Resample(), name=template_name)
        resample.inputs.voxel_size = resolution
        resample.inputs.outputtype = 'NIFTI_GZ'
        resample.inputs.resample_mode = 'Cu'
        resample.inputs.in_file = local_path
        resample.base_dir = '.'

        resampled_template = resample.run()
        local_path = resampled_template.outputs.out_file

    return local_path
Esempio n. 2
0
def resolve_resolution(resolution, template, template_name, tag=None):

    import nipype.interfaces.afni as afni
    import nipype.pipeline.engine as pe
    from CPAC.utils.datasource import check_for_s3

    tagname = None
    local_path = None
    # TODO XL think a better way to check template
    if "{" in template and tag is not None:
        tagname = "${" + tag + "}"
    try:
        if tagname is not None:
            local_path = check_for_s3(
                template.replace(tagname, str(resolution)))
    except IOError:
        local_path = None

    if local_path is None:
        if tagname is not None:
            ref_template = template.replace(tagname, '1mm')
            local_path = check_for_s3(ref_template)
        elif tagname is None and "s3" in template:
            local_path = check_for_s3(template)
        else:
            local_path = template

        if "x" in str(resolution):
            resolution = tuple(
                float(i.replace('mm', '')) for i in resolution.split("x"))
        else:
            resolution = (float(resolution.replace('mm', '')), ) * 3

        resample = pe.Node(interface=afni.Resample(), name=template_name)
        resample.inputs.voxel_size = resolution
        resample.inputs.outputtype = 'NIFTI_GZ'
        resample.inputs.resample_mode = 'Cu'
        resample.inputs.in_file = local_path
        resample.base_dir = '.'

        resampled_template = resample.run()
        local_path = resampled_template.outputs.out_file

    return local_path
Esempio n. 3
0
def test_check_s3():

    import os
    from CPAC.utils.datasource import check_for_s3

    data = check_for_s3(
        file_path='s3://fcp-indi/resources/cpac/resources/rois_2mm.nii.gz',
        creds_path=None,
        dl_dir='/tmp',
        img_type='anat')

    assert os.path.isfile(data)
Esempio n. 4
0
def test_check_s3():

    import os
    from CPAC.utils.datasource import check_for_s3

    data = check_for_s3(
        file_path='s3://fcp-indi/resources/cpac/resources/rois_2mm.nii.gz',
        creds_path=None,
        dl_dir='/tmp',
        img_type='anat'
    )

    assert os.path.isfile(data)
Esempio n. 5
0
def setup_test_wf(s3_prefix, paths_list, test_name, workdirs_to_keep=None):
    """Set up a basic template Nipype workflow for testing single nodes or
    small sub-workflows.
    """

    import os
    import shutil
    from CPAC.pipeline import nipype_pipeline_engine as pe
    from CPAC.utils.datasource import check_for_s3
    from CPAC.utils.interfaces.datasink import DataSink

    test_dir = os.path.join(os.getcwd(), test_name)
    work_dir = os.path.join(test_dir, "workdir")
    out_dir = os.path.join(test_dir, "output")

    if os.path.exists(out_dir):
        try:
            shutil.rmtree(out_dir)
        except:
            pass

    if os.path.exists(work_dir):
        for dirname in os.listdir(work_dir):
            if workdirs_to_keep:
                for keepdir in workdirs_to_keep:
                    print("{0} --- {1}\n".format(dirname, keepdir))
                    if keepdir in dirname:
                        continue
            try:
                shutil.rmtree(os.path.join(work_dir, dirname))
            except:
                pass

    local_paths = {}
    for subpath in paths_list:
        s3_path = os.path.join(s3_prefix, subpath)
        local_path = check_for_s3(s3_path, dl_dir=test_dir)
        local_paths[subpath] = local_path

    wf = pe.Workflow(name=test_name)
    wf.base_dir = os.path.join(work_dir)
    wf.config['execution'] = {
        'hash_method': 'timestamp',
        'crashdump_dir': os.path.abspath(test_dir)
    }

    ds = pe.Node(DataSink(), name='sinker_{0}'.format(test_name))
    ds.inputs.base_directory = out_dir
    ds.inputs.parameterization = True

    return (wf, ds, local_paths)
Esempio n. 6
0
    def test_sublist(self, sublist):
        '''
        Instance method to test a subject list for errors

        Parameters
        ----------
        self : MainFrame (wx.Frame object)
            the method is aware of the instance as self
        sublist : list (dict)
            a C-PAC-formatted subject list (yaml list of dictionaries)

        Returns
        -------
        pass_flg : boolean
            flag which indicates whether the subject list passed testing
        '''

        # Import packages
        import os
        import tempfile
        import nibabel as nb

        from CPAC.utils.datasource import check_for_s3

        # Init variables
        err_str = ''
        err_msg = ''
        not_found_flg = False
        bad_dim_flg = False
        pass_flg = False
        checked_s3 = False
        s3_str = 's3://'

        # Check to ensure the user is providing an actual subject
        # list and not some other kind of file
        try:
            subInfo = sublist[0]
        except:
            msg = 'ERROR: Subject list file not in proper format - ' \
                  'check if you loaded the correct file? \n\n'\
                  'Error name: config_window_0001'
            errDlg4 = wx.MessageDialog(self, msg, 'Subject List Error',
                                       wx.OK | wx.ICON_ERROR)
            errDlg4.ShowModal()
            errDlg4.Destroy()
            # Raise Exception
            raise Exception

        # Another check to ensure the actual subject list was generated
        # properly and that it will work
        if 'subject_id' not in subInfo:
            msg = 'ERROR: Subject list file not in proper format - '\
                  'check if you loaded the correct file? \n\n'\
                  'Error name: config_window_0002'
            errDlg3 = wx.MessageDialog(self, msg, 'Subject List Error',
                                       wx.OK | wx.ICON_ERROR)
            errDlg3.ShowModal()
            errDlg3.Destroy()
            # Raise Exception
            raise Exception

        # Iterate and test each subject's files
        for sub in sublist:
            anat_file = sub['anat']
            try:
                func_files = sub['func']
            except KeyError:
                func_files = sub['rest']
            checked_anat_s3 = False

            if not anat_file:
                err = "\n\n[!] Could not read in at least one of your anatom"\
                      "ical input files. Please double-check the formatting "\
                      "of your participant list YAML file.\n\n"
                raise Exception(err)

            if not func_files:
                err = "\n\n[!] Could not read in at least one of your functi"\
                      "onal input files. Please double-check the formatting "\
                      "of your participant list YAML file.\n\n"
                raise Exception(err)

            if anat_file.lower().startswith(s3_str):
                dl_dir = tempfile.mkdtemp()
                try:
                    creds_path = sub['creds_path']
                except KeyError:
                    # if no creds path is provided, it could be that the user
                    # is downloading public data - leave it to downstream to
                    # handle creds issues
                    creds_path = None
                anat_file = check_for_s3(anat_file, creds_path, dl_dir=dl_dir)
                checked_anat_s3 = True
            # Check if anatomical file exists
            if os.path.exists(anat_file):
                img = nb.load(anat_file)
                hdr = img.get_header()
                dims = hdr.get_data_shape()
                # Check to make sure it has the proper dimensions
                if len(dims) != 3:
                    bad_dim_flg = True
                    err_str_suffix = 'Anat file not 3-dimensional: %s\n' \
                                     % anat_file
                    err_str = err_str + err_str_suffix
            # Anat file doesnt exist
            else:
                not_found_flg = True
                err_str_suffix = 'File not found: %s\n' % anat_file
                err_str = err_str + err_str_suffix
            # If we're just checking s3 files, remove the temporarily
            # downloaded
            if checked_anat_s3:
                try:
                    os.remove(anat_file)
                except:
                    pass
            # For each functional file
            for func_file in func_files.values():
                checked_s3 = False
                if func_file.lower().startswith(s3_str):
                    dl_dir = tempfile.mkdtemp()
                    try:
                        creds_path = sub['creds_path']
                    except KeyError:
                        # if no creds path is provided, it could be that the
                        # user is downloading public data - leave it to down-
                        # stream to handle creds issues
                        creds_path = None
                    func_file = check_for_s3(func_file,
                                             creds_path,
                                             dl_dir=dl_dir,
                                             img_type='func')
                    checked_s3 = True
                # Check if functional file exists
                if os.path.exists(func_file):
                    img = nb.load(func_file)
                    hdr = img.get_header()
                    dims = hdr.get_data_shape()
                    # Check to make sure it has the proper dimensions
                    if len(dims) != 4:
                        bad_dim_flg = True
                        err_str_suffix = 'Func file not 4-dimensional: %s\n' \
                                         % func_file
                        err_str = err_str + err_str_suffix
                # Functional file doesnt exist
                else:
                    not_found_flg = True
                    err_str_suffix = 'File not found: %s\n' % func_file
                    err_str = err_str + err_str_suffix
                # If we're just checking s3 files, remove the temporarily
                # downloaded
                if checked_s3:
                    try:
                        os.remove(func_file)
                    except:
                        pass
            # Check flags for error message
            if not_found_flg:
                err_msg = 'One or more of your input files are missing.\n'
            if bad_dim_flg:
                err_msg = err_msg + 'One or more of your input images have '\
                          'improper dimensionality\n'
            # If err_msg was populated, display in window
            if err_msg:
                err_msg = 'ERROR: ' + err_msg + \
                          'See terminal output for more details'
                errDlgFileTest = wx.MessageDialog(self, err_msg,
                                                  'Pipeline Not Ready',
                                                  wx.OK | wx.ICON_ERROR)
                errDlgFileTest.ShowModal()
                errDlgFileTest.Destroy()
                raise Exception(err_str)
            else:
                pass_flg = True

        # Return the flag
        return pass_flg
Esempio n. 7
0
    def test_sublist(self, sublist):
        '''
        Instance method to test a subject list for errors

        Parameters
        ----------
        self : MainFrame (wx.Frame object)
            the method is aware of the instance as self
        sublist : list (dict)
            a C-PAC-formatted subject list (yaml list of dictionaries)

        Returns
        -------
        pass_flg : boolean
            flag which indicates whether the subject list passed testing
        '''

        # Import packages
        import os
        import tempfile
        import nibabel as nb

        from CPAC.utils.datasource import check_for_s3

        # Init variables
        err_str = ''
        err_msg = ''
        not_found_flg = False
        bad_dim_flg = False
        pass_flg = False
        checked_s3 = False
        s3_str = 's3://'

        # Check to ensure the user is providing an actual subject
        # list and not some other kind of file
        try:
            subInfo = sublist[0]
        except:
            msg = 'ERROR: Subject list file not in proper format - ' \
                  'check if you loaded the correct file? \n\n'\
                  'Error name: config_window_0001'
            errDlg4 = wx.MessageDialog(self, msg, 'Subject List Error',
                                       wx.OK | wx.ICON_ERROR)
            errDlg4.ShowModal()
            errDlg4.Destroy()
            # Raise Exception
            raise Exception

        # Another check to ensure the actual subject list was generated
        # properly and that it will work
        if 'subject_id' not in subInfo:
            msg = 'ERROR: Subject list file not in proper format - '\
                  'check if you loaded the correct file? \n\n'\
                  'Error name: config_window_0002'
            errDlg3 = wx.MessageDialog(self, msg , 'Subject List Error',
                                       wx.OK | wx.ICON_ERROR)
            errDlg3.ShowModal()
            errDlg3.Destroy()
            # Raise Exception
            raise Exception

        # Iterate and test each subject's files
        for sub in sublist:
            anat_file = sub['anat']
            try:
                func_files = sub['func']
            except KeyError:
                func_files = sub['rest']
            checked_anat_s3 = False

            if not anat_file:
                err = "\n\n[!] Could not read in at least one of your anatom"\
                      "ical input files. Please double-check the formatting "\
                      "of your participant list YAML file.\n\n"
                raise Exception(err)

            if not func_files:
                err = "\n\n[!] Could not read in at least one of your functi"\
                      "onal input files. Please double-check the formatting "\
                      "of your participant list YAML file.\n\n"
                raise Exception(err)

            if not isinstance(func_files, dict):
                err = "\n\n[!] The functional files in the participant " \
                      "list YAML should be listed with a scan name key and " \
                      "a file path value.\n\nFor example:\nfunc_1: " \
                      "/path/to/func_1.nii.gz\n\n"
                raise Exception(err)

            if anat_file.lower().startswith(s3_str):
                dl_dir = tempfile.mkdtemp()
                try:
                    creds_path = sub['creds_path']
                except KeyError:
                    # if no creds path is provided, it could be that the user
                    # is downloading public data - leave it to downstream to
                    # handle creds issues
                    creds_path = None
                anat_file = check_for_s3(anat_file, creds_path, dl_dir=dl_dir)
                checked_anat_s3 = True
            # Check if anatomical file exists
            if os.path.exists(anat_file):
                try:
                    img = nb.load(anat_file)
                except Exception as e:
                    print(e)
                    continue
                hdr = img.get_header()
                dims = hdr.get_data_shape()
                # Check to make sure it has the proper dimensions
                if len(dims) != 3:
                    bad_dim_flg = True
                    err_str_suffix = 'Anat file not 3-dimensional: %s\n' \
                                     % anat_file
                    err_str = err_str + err_str_suffix
            # Anat file doesnt exist
            else:
                not_found_flg = True
                err_str_suffix = 'File not found: %s\n' % anat_file
                err_str = err_str + err_str_suffix
            # If we're just checking s3 files, remove the temporarily 
            # downloaded
            if checked_anat_s3:
                try:
                    os.remove(anat_file)
                except:
                    pass
            # For each functional file
            for func_file in func_files.values():
                checked_s3 = False
                if '.nii' not in func_file:
                    # probably a JSON file
                    continue
                if func_file.lower().startswith(s3_str):
                    dl_dir = tempfile.mkdtemp()
                    try:
                        creds_path = sub['creds_path']
                    except KeyError:
                        # if no creds path is provided, it could be that the 
                        # user is downloading public data - leave it to down-
                        # stream to handle creds issues
                        creds_path = None
                    func_file = check_for_s3(func_file, creds_path,
                                             dl_dir=dl_dir, img_type='func')
                    checked_s3 = True
                # Check if functional file exists
                if os.path.exists(func_file):
                    try:
                        img = nb.load(func_file)
                    except Exception as e:
                        print(e)
                        continue
                    hdr = img.get_header()
                    dims = hdr.get_data_shape()
                    # Check to make sure it has the proper dimensions
                    if len(dims) != 4:
                        bad_dim_flg = True
                        err_str_suffix = 'Func file not 4-dimensional: %s\n' \
                                         % func_file
                        err_str = err_str + err_str_suffix
                # Functional file doesnt exist
                else:
                    not_found_flg = True
                    err_str_suffix = 'File not found: %s\n' % func_file
                    err_str = err_str + err_str_suffix
                # If we're just checking s3 files, remove the temporarily 
                # downloaded
                if checked_s3:
                    try:
                        os.remove(func_file)
                    except:
                        pass
            # Check flags for error message
            if not_found_flg:
                err_msg = 'One or more of your input files are missing.\n'
            if bad_dim_flg:
                err_msg = ''.join([err_msg, 'One or more of your input '
                                            'images have improper '
                                            'dimensionality\n'])
            # If err_msg was populated, display in window
            if err_msg:
                err_msg = 'ERROR: ' + err_msg + \
                          'See terminal output for more details'
                errDlgFileTest = wx.MessageDialog(self,
                                                  err_msg,
                                                  'Pipeline Not Ready',
                                                  wx.OK | wx.ICON_ERROR)
                errDlgFileTest.ShowModal()
                errDlgFileTest.Destroy()
                raise Exception(err_str)
            else:
                pass_flg = True

        # Return the flag
        return pass_flg
Esempio n. 8
0
def create_anat_preproc(method='afni',
                        already_skullstripped=False,
                        c=None,
                        wf_name='anat_preproc'):
    """The main purpose of this workflow is to process T1 scans. Raw mprage file is deobliqued, reoriented
    into RPI and skullstripped. Also, a whole brain only mask is generated from the skull stripped image
    for later use in registration.

    Returns
    -------
    anat_preproc : workflow
        Anatomical Preprocessing Workflow

    Notes
    -----
    `Source <https://github.com/FCP-INDI/C-PAC/blob/master/CPAC/anat_preproc/anat_preproc.py>`_

    Workflow Inputs::
        inputspec.anat : string
            User input anatomical (T1) Image, in any of the 8 orientations

    Workflow Outputs::

        outputspec.refit : string
            Path to deobliqued anatomical image

        outputspec.reorient : string
            Path to RPI oriented anatomical image

        outputspec.skullstrip : string
            Path to skull stripped RPI oriented mprage file with normalized intensities.

        outputspec.brain : string
            Path to skull stripped RPI brain image with original intensity values and not normalized or scaled.

    Order of commands:
    - Deobliqing the scans. ::
        3drefit -deoblique mprage.nii.gz

    - Re-orienting the Image into Right-to-Left Posterior-to-Anterior Inferior-to-Superior  (RPI) orientation ::
        3dresample -orient RPI
                   -prefix mprage_RPI.nii.gz
                   -inset mprage.nii.gz

    - Skull-Stripping the image ::
        Using AFNI ::
            3dSkullStrip -input mprage_RPI.nii.gz
                         -o_ply mprage_RPI_3dT.nii.gz
        or using BET ::
            bet mprage_RPI.nii.gz

    - The skull-stripping step modifies the intensity values. To get back the original intensity values, we do an element wise product of RPI data with step function of skull-stripped data ::
        3dcalc -a mprage_RPI.nii.gz
               -b mprage_RPI_3dT.nii.gz
               -expr 'a*step(b)'
               -prefix mprage_RPI_3dc.nii.gz

    High Level Workflow Graph:
    .. image:: ../images/anatpreproc_graph.dot.png
       :width: 500

    Detailed Workflow Graph:
    .. image:: ../images/anatpreproc_graph_detailed.dot.png
       :width: 500

    Examples
    --------
    >>> from CPAC.anat_preproc import create_anat_preproc
    >>> preproc = create_anat_preproc()
    >>> preproc.inputs.inputspec.anat = 'sub1/anat/mprage.nii.gz'
    >>> preproc.run() #doctest: +SKIP
    """

    preproc = pe.Workflow(name=wf_name)

    inputnode = pe.Node(util.IdentityInterface(fields=['anat', 'brain_mask']),
                        name='inputspec')

    outputnode = pe.Node(util.IdentityInterface(
        fields=['refit', 'reorient', 'skullstrip', 'brain', 'brain_mask']),
                         name='outputspec')

    anat_deoblique = pe.Node(interface=afni.Refit(), name='anat_deoblique')
    anat_deoblique.inputs.deoblique = True
    preproc.connect(inputnode, 'anat', anat_deoblique, 'in_file')

    preproc.connect(anat_deoblique, 'out_file', outputnode, 'refit')
    # Disable non_local_means_filtering and n4_bias_field_correction when run niworkflows-ants
    if method == 'niworkflows-ants':
        c.non_local_means_filtering = False
        c.n4_bias_field_correction = False

    if c.non_local_means_filtering and c.n4_bias_field_correction:
        denoise = pe.Node(interface=ants.DenoiseImage(), name='anat_denoise')
        preproc.connect(anat_deoblique, 'out_file', denoise, 'input_image')
        n4 = pe.Node(interface=ants.N4BiasFieldCorrection(dimension=3,
                                                          shrink_factor=2,
                                                          copy_header=True),
                     name='anat_n4')
        preproc.connect(denoise, 'output_image', n4, 'input_image')
    elif c.non_local_means_filtering and not c.n4_bias_field_correction:
        denoise = pe.Node(interface=ants.DenoiseImage(), name='anat_denoise')
        preproc.connect(anat_deoblique, 'out_file', denoise, 'input_image')
    elif not c.non_local_means_filtering and c.n4_bias_field_correction:
        n4 = pe.Node(interface=ants.N4BiasFieldCorrection(dimension=3,
                                                          shrink_factor=2,
                                                          copy_header=True),
                     name='anat_n4')
        preproc.connect(anat_deoblique, 'out_file', n4, 'input_image')

    # Anatomical reorientation
    anat_reorient = pe.Node(interface=afni.Resample(), name='anat_reorient')
    anat_reorient.inputs.orientation = 'RPI'
    anat_reorient.inputs.outputtype = 'NIFTI_GZ'

    if c.n4_bias_field_correction:
        preproc.connect(n4, 'output_image', anat_reorient, 'in_file')
    elif c.non_local_means_filtering and not c.n4_bias_field_correction:
        preproc.connect(denoise, 'output_image', anat_reorient, 'in_file')
    else:
        preproc.connect(anat_deoblique, 'out_file', anat_reorient, 'in_file')

    preproc.connect(anat_reorient, 'out_file', outputnode, 'reorient')

    if already_skullstripped:

        anat_skullstrip = pe.Node(
            interface=util.IdentityInterface(fields=['out_file']),
            name='anat_skullstrip')

        preproc.connect(anat_reorient, 'out_file', anat_skullstrip, 'out_file')

        preproc.connect(anat_skullstrip, 'out_file', outputnode, 'skullstrip')

        preproc.connect(anat_skullstrip, 'out_file', outputnode, 'brain')

    else:

        if method == 'afni':
            # Skull-stripping using AFNI 3dSkullStrip
            inputnode_afni = pe.Node(util.IdentityInterface(fields=[
                'mask_vol', 'shrink_factor', 'var_shrink_fac',
                'shrink_fac_bot_lim', 'avoid_vent', 'niter', 'pushout',
                'touchup', 'fill_hole', 'avoid_eyes', 'use_edge', 'exp_frac',
                'smooth_final', 'push_to_edge', 'use_skull', 'perc_int',
                'max_inter_iter', 'blur_fwhm', 'fac', 'monkey'
            ]),
                                     name='AFNI_options')

            skullstrip_args = pe.Node(util.Function(
                input_names=[
                    'spat_norm', 'spat_norm_dxyz', 'mask_vol', 'shrink_fac',
                    'var_shrink_fac', 'shrink_fac_bot_lim', 'avoid_vent',
                    'niter', 'pushout', 'touchup', 'fill_hole', 'avoid_eyes',
                    'use_edge', 'exp_frac', 'smooth_final', 'push_to_edge',
                    'use_skull', 'perc_int', 'max_inter_iter', 'blur_fwhm',
                    'fac', 'monkey'
                ],
                output_names=['expr'],
                function=create_3dskullstrip_arg_string),
                                      name='anat_skullstrip_args')

            preproc.connect([(inputnode_afni, skullstrip_args,
                              [('mask_vol', 'mask_vol'),
                               ('shrink_factor', 'shrink_fac'),
                               ('var_shrink_fac', 'var_shrink_fac'),
                               ('shrink_fac_bot_lim', 'shrink_fac_bot_lim'),
                               ('avoid_vent', 'avoid_vent'),
                               ('niter', 'niter'), ('pushout', 'pushout'),
                               ('touchup', 'touchup'),
                               ('fill_hole', 'fill_hole'),
                               ('avoid_eyes', 'avoid_eyes'),
                               ('use_edge', 'use_edge'),
                               ('exp_frac', 'exp_frac'),
                               ('smooth_final', 'smooth_final'),
                               ('push_to_edge', 'push_to_edge'),
                               ('use_skull', 'use_skull'),
                               ('perc_int', 'perc_int'),
                               ('max_inter_iter', 'max_inter_iter'),
                               ('blur_fwhm', 'blur_fwhm'), ('fac', 'fac'),
                               ('monkey', 'monkey')])])

            anat_skullstrip = pe.Node(interface=afni.SkullStrip(),
                                      name='anat_skullstrip')

            anat_skullstrip.inputs.outputtype = 'NIFTI_GZ'

            preproc.connect(anat_reorient, 'out_file', anat_skullstrip,
                            'in_file')
            preproc.connect(skullstrip_args, 'expr', anat_skullstrip, 'args')

            # Generate anatomical brain mask

            anat_brain_mask = pe.Node(interface=afni.Calc(),
                                      name='anat_brain_mask')

            anat_brain_mask.inputs.expr = 'step(a)'
            anat_brain_mask.inputs.outputtype = 'NIFTI_GZ'

            preproc.connect(anat_skullstrip, 'out_file', anat_brain_mask,
                            'in_file_a')

            # Apply skull-stripping step mask to original volume
            anat_skullstrip_orig_vol = pe.Node(interface=afni.Calc(),
                                               name='anat_skullstrip_orig_vol')

            anat_skullstrip_orig_vol.inputs.expr = 'a*step(b)'
            anat_skullstrip_orig_vol.inputs.outputtype = 'NIFTI_GZ'

            preproc.connect(anat_reorient, 'out_file',
                            anat_skullstrip_orig_vol, 'in_file_a')

            preproc.connect(anat_brain_mask, 'out_file',
                            anat_skullstrip_orig_vol, 'in_file_b')

            preproc.connect(anat_brain_mask, 'out_file', outputnode,
                            'brain_mask')

            preproc.connect(anat_skullstrip_orig_vol, 'out_file', outputnode,
                            'brain')

        elif method == 'fsl':
            # Skull-stripping using FSL BET
            inputnode_bet = pe.Node(util.IdentityInterface(fields=[
                'frac', 'mask_boolean', 'mesh_boolean', 'outline', 'padding',
                'radius', 'reduce_bias', 'remove_eyes', 'robust', 'skull',
                'surfaces', 'threshold', 'vertical_gradient'
            ]),
                                    name='BET_options')

            anat_skullstrip = pe.Node(interface=fsl.BET(),
                                      name='anat_skullstrip')
            anat_skullstrip.inputs.output_type = 'NIFTI_GZ'

            preproc.connect(anat_reorient, 'out_file', anat_skullstrip,
                            'in_file')

            preproc.connect([(inputnode_bet, anat_skullstrip, [
                ('frac', 'frac'),
                ('mask_boolean', 'mask'),
                ('mesh_boolean', 'mesh'),
                ('outline', 'outline'),
                ('padding', 'padding'),
                ('radius', 'radius'),
                ('reduce_bias', 'reduce_bias'),
                ('remove_eyes', 'remove_eyes'),
                ('robust', 'robust'),
                ('skull', 'skull'),
                ('surfaces', 'surfaces'),
                ('threshold', 'threshold'),
                ('vertical_gradient', 'vertical_gradient'),
            ])])

            preproc.connect(anat_skullstrip, 'out_file', outputnode,
                            'skullstrip')

            # Apply skull-stripping step mask to original volume
            anat_skullstrip_orig_vol = pe.Node(interface=afni.Calc(),
                                               name='anat_skullstrip_orig_vol')

            anat_skullstrip_orig_vol.inputs.expr = 'a*step(b)'
            anat_skullstrip_orig_vol.inputs.outputtype = 'NIFTI_GZ'

            preproc.connect(anat_reorient, 'out_file',
                            anat_skullstrip_orig_vol, 'in_file_a')

            preproc.connect(anat_skullstrip, 'out_file',
                            anat_skullstrip_orig_vol, 'in_file_b')

            preproc.connect(anat_skullstrip, 'mask_file', outputnode,
                            'brain_mask')

            preproc.connect(anat_skullstrip_orig_vol, 'out_file', outputnode,
                            'brain')

        elif method == 'niworkflows-ants':
            # Skull-stripping using niworkflows-ants
            anat_skullstrip_ants = init_brain_extraction_wf(
                tpl_target_path=c.niworkflows_ants_template_path,
                tpl_mask_path=c.niworkflows_ants_mask_path,
                tpl_regmask_path=c.niworkflows_ants_regmask_path,
                name='anat_skullstrip_ants')

            preproc.connect(anat_reorient, 'out_file', anat_skullstrip_ants,
                            'inputnode.in_files')

            preproc.connect(anat_skullstrip_ants, 'copy_xform.out_file',
                            outputnode, 'skullstrip')

            preproc.connect(anat_skullstrip_ants, 'copy_xform.out_file',
                            outputnode, 'brain')

            preproc.connect(anat_skullstrip_ants,
                            'atropos_wf.copy_xform.out_mask', outputnode,
                            'brain_mask')

        elif method == 'mask':

            brain_mask_deoblique = pe.Node(interface=afni.Refit(),
                                           name='brain_mask_deoblique')
            brain_mask_deoblique.inputs.deoblique = True
            preproc.connect(inputnode, 'brain_mask', brain_mask_deoblique,
                            'in_file')

            brain_mask_reorient = pe.Node(interface=afni.Resample(),
                                          name='brain_mask_reorient')
            brain_mask_reorient.inputs.orientation = 'RPI'
            brain_mask_reorient.inputs.outputtype = 'NIFTI_GZ'
            preproc.connect(brain_mask_deoblique, 'out_file',
                            brain_mask_reorient, 'in_file')

            anat_skullstrip_orig_vol = pe.Node(interface=afni.Calc(),
                                               name='anat_skullstrip_orig_vol')
            anat_skullstrip_orig_vol.inputs.expr = 'a*step(b)'
            anat_skullstrip_orig_vol.inputs.outputtype = 'NIFTI_GZ'

            preproc.connect(anat_reorient, 'out_file',
                            anat_skullstrip_orig_vol, 'in_file_a')

            preproc.connect(brain_mask_reorient, 'out_file',
                            anat_skullstrip_orig_vol, 'in_file_b')

            preproc.connect(brain_mask_reorient, 'out_file', outputnode,
                            'brain_mask')

            preproc.connect(anat_skullstrip_orig_vol, 'out_file', outputnode,
                            'brain')

        elif method == 'unet':
            """
            UNet
            options (following numbers are default):
            input_slice: 3
            conv_block: 5
            kernel_root: 16
            rescale_dim: 256
            """
            # TODO: add options to pipeline_config
            train_model = UNet2d(dim_in=3, num_conv_block=5, kernel_root=16)
            unet_path = check_for_s3(c.unet_model)
            checkpoint = torch.load(unet_path, map_location={'cuda:0': 'cpu'})
            train_model.load_state_dict(checkpoint['state_dict'])
            model = nn.Sequential(train_model, nn.Softmax2d())

            # create a node called unet_mask
            unet_mask = pe.Node(util.Function(input_names=['model', 'cimg_in'],
                                              output_names=['out_path'],
                                              function=predict_volumes),
                                name='unet_mask')

            unet_mask.inputs.model = model
            preproc.connect(anat_reorient, 'out_file', unet_mask, 'cimg_in')
            """
            Revised mask with ANTs
            """
            # fslmaths <whole head> -mul <mask> brain.nii.gz
            unet_masked_brain = pe.Node(interface=fsl.MultiImageMaths(),
                                        name='unet_masked_brain')
            unet_masked_brain.inputs.op_string = "-mul %s"
            preproc.connect(anat_reorient, 'out_file', unet_masked_brain,
                            'in_file')
            preproc.connect(unet_mask, 'out_path', unet_masked_brain,
                            'operand_files')

            # flirt -v -dof 6 -in brain.nii.gz -ref NMT_SS_0.5mm.nii.gz -o brain_rot2atl -omat brain_rot2atl.mat -interp sinc
            # TODO change it to ANTs linear transform
            native_brain_to_template_brain = pe.Node(
                interface=fsl.FLIRT(), name='native_brain_to_template_brain')
            native_brain_to_template_brain.inputs.reference = c.template_brain_only_for_anat
            native_brain_to_template_brain.inputs.dof = 6
            native_brain_to_template_brain.inputs.interp = 'sinc'
            preproc.connect(unet_masked_brain, 'out_file',
                            native_brain_to_template_brain, 'in_file')

            # flirt -in head.nii.gz -ref NMT_0.5mm.nii.gz -o head_rot2atl -applyxfm -init brain_rot2atl.mat
            # TODO change it to ANTs linear transform
            native_head_to_template_head = pe.Node(
                interface=fsl.FLIRT(), name='native_head_to_template_head')
            native_head_to_template_head.inputs.reference = c.template_skull_for_anat
            native_head_to_template_head.inputs.apply_xfm = True
            preproc.connect(anat_reorient, 'out_file',
                            native_head_to_template_head, 'in_file')
            preproc.connect(native_brain_to_template_brain, 'out_matrix_file',
                            native_head_to_template_head, 'in_matrix_file')

            # fslmaths NMT_SS_0.5mm.nii.gz -bin templateMask.nii.gz
            template_brain_mask = pe.Node(interface=fsl.maths.MathsCommand(),
                                          name='template_brain_mask')
            template_brain_mask.inputs.in_file = c.template_brain_only_for_anat
            template_brain_mask.inputs.args = '-bin'

            # ANTS 3 -m  CC[head_rot2atl.nii.gz,NMT_0.5mm.nii.gz,1,5] -t SyN[0.25] -r Gauss[3,0] -o atl2T1rot -i 60x50x20 --use-Histogram-Matching  --number-of-affine-iterations 10000x10000x10000x10000x10000 --MI-option 32x16000
            ants_template_head_to_template = pe.Node(
                interface=ants.Registration(),
                name='template_head_to_template')
            ants_template_head_to_template.inputs.metric = ['CC']
            ants_template_head_to_template.inputs.metric_weight = [1, 5]
            ants_template_head_to_template.inputs.moving_image = c.template_skull_for_anat
            ants_template_head_to_template.inputs.transforms = ['SyN']
            ants_template_head_to_template.inputs.transform_parameters = [
                (0.25, )
            ]
            ants_template_head_to_template.inputs.interpolation = 'NearestNeighbor'
            ants_template_head_to_template.inputs.number_of_iterations = [[
                60, 50, 20
            ]]
            ants_template_head_to_template.inputs.smoothing_sigmas = [[
                0.6, 0.2, 0.0
            ]]
            ants_template_head_to_template.inputs.shrink_factors = [[4, 2, 1]]
            ants_template_head_to_template.inputs.convergence_threshold = [
                1.e-8
            ]
            preproc.connect(native_head_to_template_head, 'out_file',
                            ants_template_head_to_template, 'fixed_image')

            # antsApplyTransforms -d 3 -i templateMask.nii.gz -t atl2T1rotWarp.nii.gz atl2T1rotAffine.txt -r brain_rot2atl.nii.gz -o brain_rot2atl_mask.nii.gz
            template_head_transform_to_template = pe.Node(
                interface=ants.ApplyTransforms(),
                name='template_head_transform_to_template')
            template_head_transform_to_template.inputs.dimension = 3
            preproc.connect(template_brain_mask, 'out_file',
                            template_head_transform_to_template, 'input_image')
            preproc.connect(native_brain_to_template_brain, 'out_file',
                            template_head_transform_to_template,
                            'reference_image')
            preproc.connect(ants_template_head_to_template,
                            'forward_transforms',
                            template_head_transform_to_template, 'transforms')

            # convert_xfm -omat brain_rot2native.mat -inverse brain_rot2atl.mat
            invt = pe.Node(interface=fsl.ConvertXFM(), name='convert_xfm')
            invt.inputs.invert_xfm = True
            preproc.connect(native_brain_to_template_brain, 'out_matrix_file',
                            invt, 'in_file')

            # flirt -in brain_rot2atl_mask.nii.gz -ref brain.nii.gz -o brain_mask.nii.gz -applyxfm -init brain_rot2native.mat
            template_brain_to_native_brain = pe.Node(
                interface=fsl.FLIRT(), name='template_brain_to_native_brain')
            template_brain_to_native_brain.inputs.apply_xfm = True
            preproc.connect(template_head_transform_to_template,
                            'output_image', template_brain_to_native_brain,
                            'in_file')
            preproc.connect(unet_masked_brain, 'out_file',
                            template_brain_to_native_brain, 'reference')
            preproc.connect(invt, 'out_file', template_brain_to_native_brain,
                            'in_matrix_file')

            # fslmaths brain_mask.nii.gz -thr .5 -bin brain_mask_thr.nii.gz
            refined_mask = pe.Node(interface=fsl.Threshold(),
                                   name='refined_mask')
            refined_mask.inputs.thresh = 0.5
            preproc.connect(template_brain_to_native_brain, 'out_file',
                            refined_mask, 'in_file')

            # get a new brain with mask
            refined_brain = pe.Node(interface=fsl.MultiImageMaths(),
                                    name='refined_brain')
            refined_brain.inputs.op_string = "-mul %s"
            preproc.connect(anat_reorient, 'out_file', refined_brain,
                            'in_file')
            preproc.connect(refined_mask, 'out_file', refined_brain,
                            'operand_files')

            preproc.connect(refined_mask, 'out_file', outputnode, 'brain_mask')
            preproc.connect(refined_brain, 'out_file', outputnode, 'brain')

    return preproc