コード例 #1
0
def coregistration_4D(source_file, ref, out_file=None, spm_path=None):
    '''
    Coregistration with spm + fsl for 4D files.
    Why? Nor SPM, nor fsl are able to do this by default
    :param source_file: path to input 4D file
    :param ref: reference file to co-register the source-file to
    :param out_file: output file
    :param spm_path: path to spm
    :return: path to coregistered file
    '''
    if spm_path is not None:
        mlab.MatlabCommand.set_default_paths(spm_path)
    if spm.SPMCommand().version is None:
        raise Exception('SPM path not set correctly:', spm_path,
                        spm.SPMCommand().version)
    main_dir, source_file_name = os.path.split(source_file)
    if out_file is None:
        out_file = os.path.join(main_dir, 'r' + source_file_name)

    split_folder = os.path.join(main_dir, '4D_split')
    if not os.path.exists(os.path.join(main_dir, '4D_split')):
        os.mkdir(split_folder)
    split = Split(in_file=source_file, dimension='t')
    split.inputs.in_file = source_file
    split.inputs.dimension = 't'
    split.inputs.out_base_name = os.path.join(split_folder, '4D_vol_')
    split.inputs.output_type = 'NIFTI'
    split = split.run()

    split_files = split.outputs.out_files
    index_file = split_files.pop(0)

    coreg = spm.Coregister()
    coreg.inputs.target = ref
    coreg.inputs.source = index_file
    coreg.inputs.apply_to_files = split_files
    coreg = coreg.run()

    merger = Merge()
    merger.inputs.in_files = coreg.outputs.coregistered_files
    merger.inputs.dimension = 't'
    merger.inputs.output_type = 'NIFTI_GZ'
    merger.inputs.merged_file = out_file
    merger = merger.run()

    shutil.rmtree(split_folder)

    return merger.outputs.merged_file
コード例 #2
0
def use_spm_standalone():
    """
    Use SPM Standalone with MATLAB Common Runtime
    """
    import os
    from colorama import Fore
    import platform
    from clinica.utils.stream import cprint
    from nipype.interfaces import spm
    # This section of code determines whether to use SPM standalone or not
    if all(elem in os.environ.keys() for elem in ['SPMSTANDALONE_HOME', 'MCR_HOME']):
        spm_standalone_home = os.getenv('SPMSTANDALONE_HOME')
        mcr_home = os.getenv('MCR_HOME')
        if os.path.exists(spm_standalone_home) and os.path.exists(mcr_home):
            cprint(Fore.GREEN + 'SPM standalone has been found and will be used in this pipeline' + Fore.RESET)
            if platform.system().lower().startswith('darwin'):
                matlab_cmd = ('cd %s && ./run_spm12.sh %s script' %
                              (spm_standalone_home, mcr_home))
            elif platform.system().lower().startswith('linux'):
                matlab_cmd = ('%s %s script' %
                              (os.path.join(spm_standalone_home, 'run_spm12.sh'), mcr_home))
            else:
                raise SystemError('Clinica only support macOS and Linux')
            spm.SPMCommand.set_mlab_paths(matlab_cmd=matlab_cmd, use_mcr=True)
            cprint("Using SPM standalone version %s" % spm.SPMCommand().version)
        else:
            raise FileNotFoundError('$SPMSTANDALONE_HOME and $MCR_HOME are defined, but linked to non existent folder ')
コード例 #3
0
    singularity_cmd = 'singularity run -B /home/mpib/wittkuhn -B /mnt/beegfs/home/wittkuhn /home/mpib/wittkuhn/highspeed/highspeed-glm/tools/spm/spm12.simg'
    singularity_spm = 'eval \$SPMMCRCMD'
    path_matlab = ' '.join([singularity_cmd, singularity_spm])
    spm.SPMCommand.set_mlab_paths(matlab_cmd=path_matlab, use_mcr=True)
# grab the list of subjects from the bids data set:
layout = BIDSLayout(opj(path_root, 'bids'))
# get all subject ids:
sub_list = sorted(layout.get_subjects())
# create a template to add the "sub-" prefix to the ids
sub_template = ['sub-'] * len(sub_list)
# add the prefix to all ids:
sub_list = ["%s%s" % t for t in zip(sub_template, sub_list)]
# if user defined to run specific subject
sub_list = sub_list[int(sys.argv[1]):int(sys.argv[2])]
# print the SPM version:
print('using SPM version %s' % spm.SPMCommand().version)
# ======================================================================
# DEFINE PBS CLUSTER JOB TEMPLATE (NEEDED WHEN RUNNING ON THE CLUSTER):
# ======================================================================
job_template = """
#PBS -l walltime=10:00:00
#PBS -j oe
#PBS -o /home/mpib/wittkuhn/highspeed/highspeed-glm/logs/glm
#PBS -m n
#PBS -v FSLOUTPUTTYPE=NIFTI_GZ
source /etc/bash_completion.d/virtualenvwrapper
workon highspeed-glm
module load fsl/5.0
module load matlab/R2017b
module load freesurfer/6.0.0
"""
コード例 #4
0
    def build_core_nodes(self):
        """Build and connect the core nodes of the pipelines.
        """

        import os
        import platform
        import nipype.interfaces.spm as spm
        import nipype.interfaces.matlab as mlab
        import nipype.pipeline.engine as npe
        import nipype.interfaces.utility as nutil
        import clinica.pipelines.t1_volume_tissue_segmentation.t1_volume_tissue_segmentation_utils as seg_utils
        import clinica.pipelines.t1_volume_create_dartel.t1_volume_create_dartel_utils as dartel_utils
        import clinica.pipelines.t1_volume_dartel2mni.t1_volume_dartel2mni_utils as dartel2mni_utils
        from clinica.utils.io import unzip_nii

        spm_home = os.getenv("SPM_HOME")
        mlab_home = os.getenv("MATLABCMD")
        mlab.MatlabCommand.set_default_matlab_cmd(mlab_home)
        mlab.MatlabCommand.set_default_paths(spm_home)

        if 'SPMSTANDALONE_HOME' in os.environ:
            if 'MCR_HOME' in os.environ:
                matlab_cmd = os.path.join(os.environ['SPMSTANDALONE_HOME'],
                                          'run_spm12.sh') \
                             + ' ' + os.environ['MCR_HOME'] \
                             + ' script'
                spm.SPMCommand.set_mlab_paths(matlab_cmd=matlab_cmd, use_mcr=True)
                version = spm.SPMCommand().version
            else:
                raise EnvironmentError('MCR_HOME variable not in environnement. Althought, '
                                       + 'SPMSTANDALONE_HOME has been found')
        else:
            version = spm.Info.getinfo()

        if version:
            if isinstance(version, dict):
                spm_path = version['path']
                if version['name'] == 'SPM8':
                    print('You are using SPM version 8. The recommended version to use with Clinica is SPM 12. '
                          + 'Please upgrade your SPM toolbox.')
                    tissue_map = os.path.join(spm_path, 'toolbox/Seg/TPM.nii')
                elif version['name'] == 'SPM12':
                    tissue_map = os.path.join(spm_path, 'tpm/TPM.nii')
                else:
                    raise RuntimeError('SPM version 8 or 12 could not be found. Please upgrade your SPM toolbox.')
            if isinstance(version, str):
                if float(version) >= 12.7169:
                    if platform.system() == 'Darwin':
                        tissue_map = os.path.join(str(spm_home), 'spm12.app/Contents/MacOS/spm12_mcr/spm12/spm12/tpm/TPM.nii')
                    else:
                        tissue_map = os.path.join(str(spm_home), 'spm12_mcr/spm/spm12/tpm/TPM.nii')
                else:
                    raise RuntimeError('SPM standalone version not supported. Please upgrade SPM standalone.')
        else:
            raise RuntimeError('SPM could not be found. Please verify your SPM_HOME environment variable.')

        # Unzipping
        # ===============================
        unzip_node = npe.MapNode(nutil.Function(input_names=['in_file'],
                                                output_names=['out_file'],
                                                function=unzip_nii),
                                 name='unzip_node', iterfield=['in_file'])

        # Unified Segmentation
        # ===============================
        new_segment = npe.MapNode(spm.NewSegment(),
                                  name='new_segment',
                                  iterfield=['channel_files'])

        if self.parameters['affine_regularization'] is not None:
            new_segment.inputs.affine_regularization = self.parameters['affine_regularization']
        if self.parameters['channel_info'] is not None:
            new_segment.inputs.channel_info = self.parameters['channel_info']
        if self.parameters['sampling_distance'] is not None:
            new_segment.inputs.sampling_distance = self.parameters['sampling_distance']
        if self.parameters['warping_regularization'] is not None:
            new_segment.inputs.warping_regularization = self.parameters['warping_regularization']

        # Check if we need to save the forward transformation for registering the T1 to the MNI space
        if self.parameters['save_t1_mni'] is not None and self.parameters['save_t1_mni']:
            if self.parameters['write_deformation_fields'] is not None:
                self.parameters['write_deformation_fields'][1] = True
            else:
                self.parameters['write_deformation_fields'] = [False, True]

        if self.parameters['write_deformation_fields'] is not None:
            new_segment.inputs.write_deformation_fields = self.parameters['write_deformation_fields']

        if self.parameters['tpm'] is not None:
            tissue_map = self.parameters['tpm']

        new_segment.inputs.tissues = seg_utils.get_tissue_tuples(tissue_map,
                                                                 self.parameters['tissue_classes'],
                                                                 self.parameters['dartel_tissues'],
                                                                 self.parameters['save_warped_unmodulated'],
                                                                 self.parameters['save_warped_modulated'])

        # Apply segmentation deformation to T1 (into MNI space)
        # ========================================================
        if self.parameters['save_t1_mni'] is not None and self.parameters['save_t1_mni']:

            t1_to_mni = npe.MapNode(seg_utils.ApplySegmentationDeformation(),
                                    name='t1_to_mni',
                                    iterfield=['deformation_field', 'in_files'])
            self.connect([
                (unzip_node, t1_to_mni, [('out_file', 'in_files')]),
                (new_segment, t1_to_mni, [('forward_deformation_field', 'deformation_field')]),
                (t1_to_mni, self.output_node, [('out_files', 't1_mni')])
            ])

        # DARTEL template
        # ===============================
        dartel_template = npe.Node(spm.DARTEL(),
                                   name='dartel_template')

        if self.parameters['iteration_parameters'] is not None:
            dartel_template.inputs.iteration_parameters = self.parameters['iteration_parameters']
        if self.parameters['optimization_parameters'] is not None:
            dartel_template.inputs.optimization_parameters = self.parameters['optimization_parameters']
        if self.parameters['regularization_form'] is not None:
            dartel_template.inputs.regularization_form = self.parameters['regularization_form']

        # DARTEL2MNI Registration
        # =======================
        dartel2mni_node = npe.MapNode(spm.DARTELNorm2MNI(),
                                      name='dartel2MNI',
                                      iterfield=['apply_to_files', 'flowfield_files'])

        if self.parameters['bounding_box'] is not None:
            dartel2mni_node.inputs.bounding_box = self.parameters['bounding_box']
        if self.parameters['voxel_size'] is not None:
            dartel2mni_node.inputs.voxel_size = self.parameters['voxel_size']
        dartel2mni_node.inputs.modulate = self.parameters['modulation']
        dartel2mni_node.inputs.fwhm = 0

        # Smoothing
        # =========
        if self.parameters['fwhm'] is not None and len(self.parameters['fwhm']) > 0:
            smoothing_node = npe.MapNode(spm.Smooth(),
                                         name='smoothing_node',
                                         iterfield=['in_files'])

            smoothing_node.iterables = [('fwhm', [[x, x, x] for x in self.parameters['fwhm']]),
                                        ('out_prefix', ['fwhm-' + str(x) + 'mm_' for x in self.parameters['fwhm']])]
            smoothing_node.synchronize = True

            join_smoothing_node = npe.JoinNode(interface=nutil.Function(input_names=['smoothed_normalized_files'],
                                                                        output_names=['smoothed_normalized_files'],
                                                                        function=dartel2mni_utils.join_smoothed_files),
                                               joinsource='smoothing_node',
                                               joinfield='smoothed_normalized_files',
                                               name='join_smoothing_node')
            self.connect([
                (dartel2mni_node, smoothing_node, [('normalized_files', 'in_files')]),
                (smoothing_node, join_smoothing_node, [('smoothed_files', 'smoothed_normalized_files')]),
                (join_smoothing_node, self.output_node, [('smoothed_normalized_files', 'smoothed_normalized_files')])
            ])
        else:
            self.output_node.inputs.smoothed_normalized_files = []

        # Atlas Statistics
        # ================
        atlas_stats_node = npe.MapNode(nutil.Function(input_names=['in_image',
                                                                   'in_atlas_list'],
                                                      output_names=['atlas_statistics'],
                                                      function=dartel2mni_utils.atlas_statistics),
                                       name='atlas_stats_node',
                                       iterfield=['in_image'])
        atlas_stats_node.inputs.in_atlas_list = self.parameters['atlas_list']

        # Connection
        # ==========
        self.connect([
            (self.input_node, unzip_node, [('input_images', 'in_file')]),
            (unzip_node, new_segment, [('out_file', 'channel_files')]),
            (new_segment, self.output_node, [('bias_corrected_images', 'bias_corrected_images'),
                                             ('bias_field_images', 'bias_field_images'),
                                             ('dartel_input_images', 'dartel_input_images'),
                                             ('forward_deformation_field', 'forward_deformation_field'),
                                             ('inverse_deformation_field', 'inverse_deformation_field'),
                                             ('modulated_class_images', 'modulated_class_images'),
                                             ('native_class_images', 'native_class_images'),
                                             ('normalized_class_images', 'normalized_class_images'),
                                             ('transformation_mat', 'transformation_mat')]),
            (new_segment, dartel_template, [(('dartel_input_images', dartel_utils.get_class_images,
                                              self.parameters['dartel_tissues']), 'image_files')]),
            (dartel_template, self.output_node, [('dartel_flow_fields', 'dartel_flow_fields'),
                                                 ('final_template_file', 'final_template_file'),
                                                 ('template_files', 'template_files')]),
            (new_segment, dartel2mni_node, [(('native_class_images', seg_utils.group_nested_images_by_subject),
                                             'apply_to_files')]),
            (dartel_template, dartel2mni_node, [(('dartel_flow_fields', dartel2mni_utils.prepare_flowfields,
                                                  self.parameters['tissue_classes']), 'flowfield_files')]),
            (dartel_template, dartel2mni_node, [('final_template_file', 'template_file')]),
            (dartel2mni_node, self.output_node, [('normalized_files', 'normalized_files')]),
            (dartel2mni_node, atlas_stats_node, [(('normalized_files', dartel2mni_utils.select_gm_images),
                                                  'in_image')]),
            (atlas_stats_node, self.output_node, [('atlas_statistics', 'atlas_statistics')])
        ])
コード例 #5
0
    def build_core_nodes(self):
        """Build and connect the core nodes of the pipelines.
        """

        import os
        import nipype.interfaces.spm as spm
        import nipype.interfaces.matlab as mlab
        import nipype.pipeline.engine as npe
        import nipype.interfaces.utility as nutil
        from clinica.utils.io import unzip_nii
        from clinica.pipelines.t1_volume_dartel2mni.t1_volume_dartel2mni_utils import prepare_flowfields, join_smoothed_files, atlas_statistics, select_gm_images

        spm_home = os.getenv("SPM_HOME")
        mlab_home = os.getenv("MATLABCMD")
        mlab.MatlabCommand.set_default_matlab_cmd(mlab_home)
        mlab.MatlabCommand.set_default_paths(spm_home)

        if 'SPMSTANDALONE_HOME' in os.environ:
            if 'MCR_HOME' in os.environ:
                matlab_cmd = os.path.join(os.environ['SPMSTANDALONE_HOME'],
                                          'run_spm12.sh') \
                             + ' ' + os.environ['MCR_HOME'] \
                             + ' script'
                spm.SPMCommand.set_mlab_paths(matlab_cmd=matlab_cmd,
                                              use_mcr=True)
                version = spm.SPMCommand().version
            else:
                raise EnvironmentError(
                    'MCR_HOME variable not in environnement. Althought, ' +
                    'SPMSTANDALONE_HOME has been found')
        else:
            version = spm.Info.getinfo()

        if version:
            if isinstance(version, dict):
                spm_path = version['path']
                if version['name'] == 'SPM8':
                    print(
                        'You are using SPM version 8. The recommended version to use with Clinica is SPM 12. '
                        + 'Please upgrade your SPM toolbox.')
                    tissue_map = os.path.join(spm_path, 'toolbox/Seg/TPM.nii')
                elif version['name'] == 'SPM12':
                    tissue_map = os.path.join(spm_path, 'tpm/TPM.nii')
                else:
                    raise RuntimeError(
                        'SPM version 8 or 12 could not be found. Please upgrade your SPM toolbox.'
                    )
            if isinstance(version, str):
                if float(version) >= 12.7169:
                    tissue_map = os.path.join(
                        str(spm_home), 'spm12_mcr/spm/spm12/tpm/TPM.nii')
                else:
                    raise RuntimeError(
                        'SPM standalone version not supported. Please upgrade SPM standalone.'
                    )
        else:
            raise RuntimeError(
                'SPM could not be found. Please verify your SPM_HOME environment variable.'
            )

        # Unzipping
        # =========
        unzip_tissues_node = npe.MapNode(nutil.Function(
            input_names=['in_file'],
            output_names=['out_file'],
            function=unzip_nii),
                                         name='unzip_tissues_node',
                                         iterfield=['in_file'])
        unzip_flowfields_node = npe.MapNode(nutil.Function(
            input_names=['in_file'],
            output_names=['out_file'],
            function=unzip_nii),
                                            name='unzip_flowfields_node',
                                            iterfield=['in_file'])
        unzip_template_node = npe.Node(nutil.Function(
            input_names=['in_file'],
            output_names=['out_file'],
            function=unzip_nii),
                                       name='unzip_template_node')

        # DARTEL2MNI Registration
        # =======================
        dartel2mni_node = npe.MapNode(
            spm.DARTELNorm2MNI(),
            name='dartel2MNI',
            iterfield=['apply_to_files', 'flowfield_files'])

        if self.parameters['bounding_box'] is not None:
            dartel2mni_node.inputs.bounding_box = self.parameters[
                'bounding_box']
        if self.parameters['voxel_size'] is not None:
            dartel2mni_node.inputs.voxel_size = self.parameters['voxel_size']
        dartel2mni_node.inputs.modulate = self.parameters['modulation']
        dartel2mni_node.inputs.fwhm = 0

        # Smoothing
        # =========
        if self.parameters['fwhm'] is not None and len(
                self.parameters['fwhm']) > 0:
            smoothing_node = npe.MapNode(spm.Smooth(),
                                         name='smoothing_node',
                                         iterfield=['in_files'])

            smoothing_node.iterables = [
                ('fwhm', [[x, x, x] for x in self.parameters['fwhm']]),
                ('out_prefix',
                 ['fwhm-' + str(x) + 'mm_' for x in self.parameters['fwhm']])
            ]
            smoothing_node.synchronize = True

            join_smoothing_node = npe.JoinNode(
                interface=nutil.Function(
                    input_names=['smoothed_normalized_files'],
                    output_names=['smoothed_normalized_files'],
                    function=join_smoothed_files),
                joinsource='smoothing_node',
                joinfield='smoothed_normalized_files',
                name='join_smoothing_node')
            self.connect([(dartel2mni_node, smoothing_node,
                           [('normalized_files', 'in_files')]),
                          (smoothing_node, join_smoothing_node,
                           [('smoothed_files', 'smoothed_normalized_files')]),
                          (join_smoothing_node, self.output_node,
                           [('smoothed_normalized_files',
                             'smoothed_normalized_files')])])
        else:
            self.output_node.inputs.smoothed_normalized_files = []

        # Atlas Statistics
        # ================
        atlas_stats_node = npe.MapNode(nutil.Function(
            input_names=['in_image', 'in_atlas_list'],
            output_names=['atlas_statistics'],
            function=atlas_statistics),
                                       name='atlas_stats_node',
                                       iterfield=['in_image'])
        atlas_stats_node.inputs.in_atlas_list = self.parameters['atlas_list']

        # Connection
        # ==========
        self.connect([(self.input_node, unzip_tissues_node, [('apply_to_files',
                                                              'in_file')]),
                      (self.input_node, unzip_flowfields_node,
                       [('flowfield_files', 'in_file')]),
                      (self.input_node, unzip_template_node, [('template_file',
                                                               'in_file')]),
                      (unzip_tissues_node, dartel2mni_node,
                       [('out_file', 'apply_to_files')]),
                      (unzip_flowfields_node, dartel2mni_node,
                       [(('out_file', prepare_flowfields,
                          self.parameters['tissues']), 'flowfield_files')]),
                      (unzip_template_node, dartel2mni_node,
                       [('out_file', 'template_file')]),
                      (dartel2mni_node, self.output_node,
                       [('normalized_files', 'normalized_files')]),
                      (dartel2mni_node, atlas_stats_node,
                       [(('normalized_files', select_gm_images), 'in_image')]),
                      (atlas_stats_node, self.output_node,
                       [('atlas_statistics', 'atlas_statistics')])])
コード例 #6
0
def software_check():
    """This function returns the spm standalone version installed inside the docker
    """
    spm.SPMCommand.set_mlab_paths(matlab_cmd=template_dict['matlab_cmd'],
                                  use_mcr=True)
    return (spm.SPMCommand().version)
コード例 #7
0
from fmri_preprocessing.interfaces.ArtifacRemotion import ArtifacRemotion
from nipype.interfaces.utility import IdentityInterface
from nipype.interfaces.io import SelectFiles, DataSink
from nipype.algorithms.rapidart import ArtifactDetect
from nipype import Workflow, Node
from nipype.interfaces.spm import Normalize12
from nipype.algorithms.misc import Gunzip
from miscellaneous.utils import *
import nipype.interfaces.spm as spm
import os

template = '/home/runlab/data/Atlas/TPM.nii'
matlab_cmd = '/home/colciencias/programas/spm12/run_spm12.sh /usr/local/MATLAB/MATLAB_Runtime/v95/ script'
spm.SPMCommand.set_mlab_paths(matlab_cmd=matlab_cmd, use_mcr=True)

print('SPM version: ' + str(spm.SPMCommand().version))

base_dir = '/home/colciencias/test/base/'
structural_dir = '/home/colciencias/base/struct/'
experiment_dir = opj(base_dir, 'output/')
output_dir = 'datasink'
working_dir = 'workingdir'

subject_list = ['1']

# list of subject identifiers

fwhm = 8  # Smoothing widths to apply (Gaussian kernel size)
TR = 2  # Repetition time
init_volume = 0  # Firts volumen identification which will use in the pipeline
iso_size = 2  # Isometric resample of functional images to voxel size (in mm)
コード例 #8
0
def run(base_dir):
    template = '/home/brainlab/Desktop/Rudas/Data/Parcellation/TPM.nii'
    matlab_cmd = '/home/brainlab/Desktop/Rudas/Tools/spm12_r7487/spm12/run_spm12.sh /home/brainlab/Desktop/Rudas/Tools/MCR/v713/ script'
    spm.SPMCommand.set_mlab_paths(matlab_cmd=matlab_cmd, use_mcr=True)

    print('SPM version: ' + str(spm.SPMCommand().version))

    structural_dir = '/home/brainlab/Desktop/Rudas/Data/Propofol/Structurals/'
    experiment_dir = opj(base_dir, 'output/')
    output_dir = 'datasink'
    working_dir = 'workingdir'
    '''

    subject_list = ['2014_05_02_02CB',
                    '2014_05_16_16RA',
                    '2014_05_30_30AQ',
                    '2014_07_04_04HD']
    '''
    subject_list = [
        '2014_05_02_02CB', '2014_05_16_16RA', '2014_05_30_30AQ',
        '2014_07_04_04HD', '2014_07_04_04SG', '2014_08_13_13CA',
        '2014_10_08_08BC', '2014_10_08_08VR', '2014_10_22_22CY',
        '2014_10_22_22TK', '2014_11_17_17EK', '2014_11_17_17NA',
        '2014_11_19_19SA', '2014_11_19_AK', '2014_11_25.25JK',
        '2014_11_27_27HF', '2014_12_10_10JR'
    ]

    # list of subject identifiers

    fwhm = 8  # Smoothing widths to apply (Gaussian kernel size)
    TR = 2  # Repetition time
    init_volume = 0  # Firts volumen identification which will use in the pipeline
    iso_size = 2  # Isometric resample of functional images to voxel size (in mm)

    # ExtractROI - skip dummy scans
    extract = Node(ExtractROI(t_min=init_volume,
                              t_size=-1,
                              output_type='NIFTI'),
                   name="extract")

    # MCFLIRT - motion correction
    mcflirt = Node(MCFLIRT(mean_vol=True, save_plots=True,
                           output_type='NIFTI'),
                   name="motion_correction")

    # SliceTimer - correct for slice wise acquisition
    slicetimer = Node(SliceTimer(index_dir=False,
                                 interleaved=True,
                                 output_type='NIFTI',
                                 time_repetition=TR),
                      name="slice_timing_correction")

    # Smooth - image smoothing
    smooth = Node(spm.Smooth(fwhm=fwhm), name="smooth")

    n4bias = Node(N4Bias(out_file='t1_n4bias.nii.gz'), name='n4bias')

    descomposition = Node(Descomposition(n_components=20,
                                         low_pass=0.1,
                                         high_pass=0.01,
                                         tr=TR),
                          name='descomposition')

    # Artifact Detection - determines outliers in functional images
    art = Node(ArtifactDetect(norm_threshold=2,
                              zintensity_threshold=3,
                              mask_type='spm_global',
                              parameter_source='FSL',
                              use_differences=[True, False],
                              plot_type='svg'),
               name="artifact_detection")

    extract_confounds_ws_csf = Node(
        ExtractConfounds(out_file='ev_without_gs.csv'),
        name='extract_confounds_ws_csf')

    extract_confounds_gs = Node(ExtractConfounds(out_file='ev_with_gs.csv',
                                                 delimiter=','),
                                name='extract_confounds_global_signal')

    signal_extraction = Node(SignalExtraction(
        time_series_out_file='time_series.csv',
        correlation_matrix_out_file='correlation_matrix.png',
        atlas_identifier='cort-maxprob-thr25-2mm',
        tr=TR,
        plot=True),
                             name='signal_extraction')

    art_remotion = Node(ArtifacRemotion(out_file='fmri_art_removed.nii'),
                        name='artifact_remotion')

    # BET - Skullstrip anatomical anf funtional images
    bet_t1 = Node(BET(frac=0.5, robust=True, mask=True,
                      output_type='NIFTI_GZ'),
                  name="bet_t1")

    # FAST - Image Segmentation
    segmentation = Node(FAST(output_type='NIFTI'), name="segmentation")

    # Normalize - normalizes functional and structural images to the MNI template
    normalize_fmri = Node(Normalize12(jobtype='estwrite',
                                      tpm=template,
                                      write_voxel_sizes=[2, 2, 2],
                                      write_bounding_box=[[-90, -126, -72],
                                                          [90, 90, 108]]),
                          name="normalize_fmri")

    gunzip = Node(Gunzip(), name="gunzip")

    normalize_t1 = Node(Normalize12(
        jobtype='estwrite',
        tpm=template,
        write_voxel_sizes=[iso_size, iso_size, iso_size],
        write_bounding_box=[[-90, -126, -72], [90, 90, 108]]),
                        name="normalize_t1")

    normalize_masks = Node(Normalize12(
        jobtype='estwrite',
        tpm=template,
        write_voxel_sizes=[iso_size, iso_size, iso_size],
        write_bounding_box=[[-90, -126, -72], [90, 90, 108]]),
                           name="normalize_masks")

    # Threshold - Threshold WM probability image
    threshold = Node(Threshold(thresh=0.5, args='-bin',
                               output_type='NIFTI_GZ'),
                     name="wm_mask_threshold")

    # FLIRT - pre-alignment of functional images to anatomical images
    coreg_pre = Node(FLIRT(dof=6, output_type='NIFTI_GZ'),
                     name="linear_warp_estimation")

    # FLIRT - coregistration of functional images to anatomical images with BBR
    coreg_bbr = Node(FLIRT(dof=6,
                           cost='bbr',
                           schedule=opj(os.getenv('FSLDIR'),
                                        'etc/flirtsch/bbr.sch'),
                           output_type='NIFTI_GZ'),
                     name="nonlinear_warp_estimation")

    # Apply coregistration warp to functional images
    applywarp = Node(FLIRT(interp='spline',
                           apply_isoxfm=iso_size,
                           output_type='NIFTI'),
                     name="registration_fmri")

    # Apply coregistration warp to mean file
    applywarp_mean = Node(FLIRT(interp='spline',
                                apply_isoxfm=iso_size,
                                output_type='NIFTI_GZ'),
                          name="registration_mean_fmri")

    # Infosource - a function free node to iterate over the list of subject names
    infosource = Node(IdentityInterface(fields=['subject_id']),
                      name="infosource")
    infosource.iterables = [('subject_id', subject_list)]

    # SelectFiles - to grab the data (alternativ to DataGrabber)
    anat_file = opj(structural_dir, '{subject_id}', 't1.nii')
    func_file = opj('{subject_id}', 'fmri.nii')

    templates = {'anat': anat_file, 'func': func_file}

    selectfiles = Node(SelectFiles(templates, base_directory=base_dir),
                       name="selectfiles")

    # Datasink - creates output folder for important outputs
    datasink = Node(DataSink(base_directory=experiment_dir,
                             container=output_dir),
                    name="datasink")

    # Create a coregistration workflow
    coregwf = Workflow(name='coreg_fmri_to_t1')
    coregwf.base_dir = opj(experiment_dir, working_dir)

    # Create a preprocessing workflow
    preproc = Workflow(name='preproc')
    preproc.base_dir = opj(experiment_dir, working_dir)

    # Connect all components of the coregistration workflow

    coregwf.connect([
        (bet_t1, n4bias, [('out_file', 'in_file')]),
        (n4bias, segmentation, [('out_file', 'in_files')]),
        (segmentation, threshold, [(('partial_volume_files', get_latest),
                                    'in_file')]),
        (n4bias, coreg_pre, [('out_file', 'reference')]),
        (threshold, coreg_bbr, [('out_file', 'wm_seg')]),
        (coreg_pre, coreg_bbr, [('out_matrix_file', 'in_matrix_file')]),
        (coreg_bbr, applywarp, [('out_matrix_file', 'in_matrix_file')]),
        (n4bias, applywarp, [('out_file', 'reference')]),
        (coreg_bbr, applywarp_mean, [('out_matrix_file', 'in_matrix_file')]),
        (n4bias, applywarp_mean, [('out_file', 'reference')]),
    ])

    ## Use the following DataSink output substitutions
    substitutions = [('_subject_id_', 'sub-')]
    #                 ('_fwhm_', 'fwhm-'),
    #                 ('_roi', ''),
    #                 ('_mcf', ''),
    #                 ('_st', ''),
    #                 ('_flirt', ''),
    #                 ('.nii_mean_reg', '_mean'),
    #                 ('.nii.par', '.par'),
    #                 ]
    #subjFolders = [('fwhm-%s/' % f, 'fwhm-%s_' % f) for f in fwhm]

    #substitutions.extend(subjFolders)
    datasink.inputs.substitutions = substitutions

    # Connect all components of the preprocessing workflow
    preproc.connect([
        (infosource, selectfiles, [('subject_id', 'subject_id')]),
        (selectfiles, extract, [('func', 'in_file')]),
        (extract, mcflirt, [('roi_file', 'in_file')]),
        (mcflirt, slicetimer, [('out_file', 'in_file')]),
        (selectfiles, coregwf, [('anat', 'bet_t1.in_file'),
                                ('anat', 'nonlinear_warp_estimation.reference')
                                ]),
        (mcflirt, coregwf, [('mean_img', 'linear_warp_estimation.in_file'),
                            ('mean_img', 'nonlinear_warp_estimation.in_file'),
                            ('mean_img', 'registration_mean_fmri.in_file')]),
        (slicetimer, coregwf, [('slice_time_corrected_file',
                                'registration_fmri.in_file')]),
        (coregwf, art, [('registration_fmri.out_file', 'realigned_files')]),
        (mcflirt, art, [('par_file', 'realignment_parameters')]),
        (art, art_remotion, [('outlier_files', 'outlier_files')]),
        (coregwf, art_remotion, [('registration_fmri.out_file', 'in_file')]),
        (coregwf, gunzip, [('n4bias.out_file', 'in_file')]),
        (selectfiles, normalize_fmri, [('anat', 'image_to_align')]),
        (art_remotion, normalize_fmri, [('out_file', 'apply_to_files')]),
        (selectfiles, normalize_t1, [('anat', 'image_to_align')]),
        (gunzip, normalize_t1, [('out_file', 'apply_to_files')]),
        (selectfiles, normalize_masks, [('anat', 'image_to_align')]),
        (coregwf, normalize_masks, [(('segmentation.partial_volume_files',
                                      get_wm_csf), 'apply_to_files')]),
        (normalize_fmri, smooth, [('normalized_files', 'in_files')]),
        (smooth, extract_confounds_ws_csf, [('smoothed_files', 'in_file')]),
        (normalize_masks, extract_confounds_ws_csf, [('normalized_files',
                                                      'list_mask')]),
        (mcflirt, extract_confounds_ws_csf, [('par_file', 'file_concat')]),

        #(smooth, extract_confounds_gs, [('smoothed_files', 'in_file')]),
        #(normalize_t1, extract_confounds_gs, [(('normalized_files',change_to_list), 'list_mask')]),
        #(extract_confounds_ws_csf, extract_confounds_gs, [('out_file', 'file_concat')]),
        (smooth, signal_extraction, [('smoothed_files', 'in_file')]),
        #(extract_confounds_gs, signal_extraction, [('out_file', 'confounds_file')]),
        (extract_confounds_ws_csf, signal_extraction, [('out_file',
                                                        'confounds_file')]),

        #(smooth, descomposition, [('smoothed_files', 'in_file')]),
        #(extract_confounds_ws_csf, descomposition, [('out_file', 'confounds_file')]),

        #(extract_confounds_gs, datasink, [('out_file', 'preprocessing.@confounds_with_gs')]),
        (extract_confounds_ws_csf, datasink,
         [('out_file', 'preprocessing.@confounds_without_gs')]),
        (smooth, datasink, [('smoothed_files', 'preprocessing.@smoothed')]),
        (normalize_fmri, datasink, [('normalized_files',
                                     'preprocessing.@fmri_normalized')]),
        (normalize_t1, datasink, [('normalized_files',
                                   'preprocessing.@t1_normalized')]),
        (normalize_masks, datasink, [('normalized_files',
                                      'preprocessing.@masks_normalized')]),
        (signal_extraction, datasink, [('time_series_out_file',
                                        'preprocessing.@time_serie')]),
        (signal_extraction, datasink, [('correlation_matrix_out_file',
                                        'preprocessing.@correlation_matrix')]),
        (signal_extraction, datasink,
         [('fmri_cleaned_out_file', 'preprocessing.@fmri_cleaned_out_file')]),
        #(descomposition, datasink, [('out_file', 'preprocessing.@descomposition')]),
        #(descomposition, datasink, [('plot_files', 'preprocessing.@descomposition_plot_files')])
    ])

    preproc.write_graph(graph2use='colored', format='png', simple_form=True)
    preproc.run()
コード例 #9
0
    from nipype.interfaces.spm import NewSegment, Smooth
    from nipype.interfaces.io import DataSink
    from nipype.interfaces.utility import Function
    import nipype.pipeline.engine as pe
    import glob, json, os, sys, argparse
    import corr
    import sys

    # Connect spm12 standalone to nipype
    from nipype.interfaces import spm

    matlab_cmd = '/opt/spm12/run_spm12.sh /opt/mcr/v92 script'
    spm.SPMCommand.set_mlab_paths(matlab_cmd=matlab_cmd, use_mcr=True)

    # Test whether spm version is printed to the screen
    spm.SPMCommand().version

    # Setting the parameters from collected arguments

    transf_mat_path = '/data/mat_file/transform.mat'
    tpm_path = '/data/tpm_file/TPM.nii'

    #Create vbm_spm12 dir
    if not os.path.exists(vbm_out + "/vbm_spm12"):
        os.makedirs(vbm_out + "/vbm_spm12")

    # Reorientation node and settings
    reorient = pe.Node(interface=ApplyTransform(), name='reorient')
    reorient.inputs.mat = transf_mat_path
    reorient.inputs.in_file = nifti_file
    reorient.inputs.out_file = vbm_out + "/vbm_spm12/Re.nii"
コード例 #10
0
    def build_core_nodes(self):
        """Build and connect the core nodes of the pipelines.
        """

        import os
        import nipype.interfaces.spm as spm
        import nipype.interfaces.matlab as mlab
        import nipype.pipeline.engine as npe
        import nipype.interfaces.utility as nutil
        from clinica.utils.io import unzip_nii

        spm_home = os.getenv("SPM_HOME")
        mlab_home = os.getenv("MATLABCMD")
        mlab.MatlabCommand.set_default_matlab_cmd(mlab_home)
        mlab.MatlabCommand.set_default_paths(spm_home)

        if 'SPMSTANDALONE_HOME' in os.environ:
            if 'MCR_HOME' in os.environ:
                matlab_cmd = (os.path.join(os.environ['SPMSTANDALONE_HOME'],
                                           'run_spm12.sh') + ' ' +
                              os.environ['MCR_HOME'] + ' script')
                spm.SPMCommand.set_mlab_paths(matlab_cmd=matlab_cmd,
                                              use_mcr=True)
                version = spm.SPMCommand().version
            else:
                raise EnvironmentError(
                    'MCR_HOME variable not in environnement. Althought, ' +
                    'SPMSTANDALONE_HOME has been found')
        else:
            version = spm.Info.getinfo()

        if version:
            if isinstance(version, dict):
                spm_path = version['path']
                if version['name'] == 'SPM8':
                    print(
                        'You are using SPM version 8. The recommended version to use with Clinica is SPM 12. '
                        + 'Please upgrade your SPM toolbox.')
                    tissue_map = os.path.join(spm_path, 'toolbox/Seg/TPM.nii')
                elif version['name'] == 'SPM12':
                    tissue_map = os.path.join(spm_path, 'tpm/TPM.nii')
                else:
                    raise RuntimeError(
                        'SPM version 8 or 12 could not be found. Please upgrade your SPM toolbox.'
                    )
            if isinstance(version, str):
                if float(version) >= 12.7169:
                    tissue_map = os.path.join(
                        str(spm_home), 'spm12_mcr/spm/spm12/tpm/TPM.nii')
                else:
                    raise RuntimeError(
                        'SPM standalone version not supported. Please upgrade SPM standalone.'
                    )
        else:
            raise RuntimeError(
                'SPM could not be found. Please verify your SPM_HOME environment variable.'
            )

        # Unzipping
        # =========
        unzip_node = npe.MapNode(nutil.Function(input_names=['in_file'],
                                                output_names=['out_file'],
                                                function=unzip_nii),
                                 name='unzip_node',
                                 iterfield=['in_file'])

        # DARTEL template
        # ===============
        dartel_template = npe.Node(spm.DARTEL(), name='dartel_template')

        if self.parameters['iteration_parameters'] is not None:
            dartel_template.inputs.iteration_parameters = self.parameters[
                'iteration_parameters']
        if self.parameters['optimization_parameters'] is not None:
            dartel_template.inputs.optimization_parameters = self.parameters[
                'optimization_parameters']
        if self.parameters['regularization_form'] is not None:
            dartel_template.inputs.regularization_form = self.parameters[
                'regularization_form']
        if self.parameters['template_prefix'] is not None:
            dartel_template.inputs.template_prefix = self.parameters[
                'template_prefix']

        # Connection
        # ==========
        self.connect([(self.input_node, unzip_node, [
            ('dartel_input_images', 'in_file')
        ]), (unzip_node, dartel_template, [('out_file', 'image_files')]),
                      (dartel_template, self.output_node,
                       [('dartel_flow_fields', 'dartel_flow_fields'),
                        ('final_template_file', 'final_template_file'),
                        ('template_files', 'template_files')])])
コード例 #11
0
    def build_core_nodes(self):
        """Build and connect the core nodes of the pipelines.
        """

        import os
        import platform
        import nipype.interfaces.spm as spm
        import nipype.interfaces.matlab as mlab
        import nipype.pipeline.engine as npe
        import nipype.interfaces.utility as nutil
        import clinica.pipelines.t1_volume_tissue_segmentation.t1_volume_tissue_segmentation_utils as utils
        from clinica.utils.io import unzip_nii

        spm_home = os.getenv("SPM_HOME")
        mlab_home = os.getenv("MATLABCMD")
        mlab.MatlabCommand.set_default_matlab_cmd(mlab_home)
        mlab.MatlabCommand.set_default_paths(spm_home)

        if 'SPMSTANDALONE_HOME' in os.environ:
            if 'MCR_HOME' in os.environ:
                matlab_cmd = os.path.join(os.environ['SPMSTANDALONE_HOME'],
                                          'run_spm12.sh') \
                             + ' ' + os.environ['MCR_HOME'] \
                             + ' script'
                spm.SPMCommand.set_mlab_paths(matlab_cmd=matlab_cmd,
                                              use_mcr=True)
                version = spm.SPMCommand().version
            else:
                raise EnvironmentError(
                    'MCR_HOME variable not in environnement. Althought, ' +
                    'SPMSTANDALONE_HOME has been found')
        else:
            version = spm.Info.getinfo()

        if version:
            if isinstance(version, dict):
                spm_path = version['path']
                if version['name'] == 'SPM8':
                    print(
                        'You are using SPM version 8. The recommended version to use with Clinica is SPM 12. '
                        + 'Please upgrade your SPM toolbox.')
                    tissue_map = os.path.join(spm_path, 'toolbox/Seg/TPM.nii')
                elif version['name'] == 'SPM12':
                    tissue_map = os.path.join(spm_path, 'tpm/TPM.nii')
                else:
                    raise RuntimeError(
                        'SPM version 8 or 12 could not be found. Please upgrade your SPM toolbox.'
                    )
            if isinstance(version, str):
                if float(version) >= 12.7169:
                    if platform.system() == 'Darwin':
                        tissue_map = os.path.join(
                            str(spm_home),
                            'spm12.app/Contents/MacOS/spm12_mcr/spm12/spm12/tpm/TPM.nii'
                        )
                    else:
                        tissue_map = os.path.join(
                            str(spm_home), 'spm12_mcr/spm/spm12/tpm/TPM.nii')
                else:
                    raise RuntimeError(
                        'SPM standalone version not supported. Please upgrade SPM standalone.'
                    )
        else:
            raise RuntimeError(
                'SPM could not be found. Please verify your SPM_HOME environment variable.'
            )

        # Unzipping
        # ===============================
        unzip_node = npe.MapNode(nutil.Function(input_names=['in_file'],
                                                output_names=['out_file'],
                                                function=unzip_nii),
                                 name='unzip_node',
                                 iterfield=['in_file'])

        # Unified Segmentation
        # ===============================
        new_segment = npe.MapNode(spm.NewSegment(),
                                  name='new_segment',
                                  iterfield=['channel_files'])

        if self.parameters['affine_regularization'] is not None:
            new_segment.inputs.affine_regularization = self.parameters[
                'affine_regularization']
        if self.parameters['channel_info'] is not None:
            new_segment.inputs.channel_info = self.parameters['channel_info']
        if self.parameters['sampling_distance'] is not None:
            new_segment.inputs.sampling_distance = self.parameters[
                'sampling_distance']
        if self.parameters['warping_regularization'] is not None:
            new_segment.inputs.warping_regularization = self.parameters[
                'warping_regularization']

        # Check if we need to save the forward transformation for registering the T1 to the MNI space
        if self.parameters['save_t1_mni'] is not None and self.parameters[
                'save_t1_mni']:
            if self.parameters['write_deformation_fields'] is not None:
                self.parameters['write_deformation_fields'][1] = True
            else:
                self.parameters['write_deformation_fields'] = [False, True]

        if self.parameters['write_deformation_fields'] is not None:
            new_segment.inputs.write_deformation_fields = self.parameters[
                'write_deformation_fields']

        if self.parameters['tpm'] is not None:
            tissue_map = self.parameters['tpm']

        new_segment.inputs.tissues = utils.get_tissue_tuples(
            tissue_map, self.parameters['tissue_classes'],
            self.parameters['dartel_tissues'],
            self.parameters['save_warped_unmodulated'],
            self.parameters['save_warped_modulated'])

        # Apply segmentation deformation to T1 (into MNI space)
        # ========================================================
        if self.parameters['save_t1_mni'] is not None and self.parameters[
                'save_t1_mni']:

            t1_to_mni = npe.MapNode(
                utils.ApplySegmentationDeformation(),
                name='t1_to_mni',
                iterfield=['deformation_field', 'in_files'])
            self.connect([
                (unzip_node, t1_to_mni, [('out_file', 'in_files')]),
                (new_segment, t1_to_mni, [('forward_deformation_field',
                                           'deformation_field')]),
                (t1_to_mni, self.output_node, [('out_files', 't1_mni')])
            ])

        # Connection
        # ==========
        self.connect([
            (self.input_node, unzip_node, [('input_images', 'in_file')]),
            (unzip_node, new_segment, [('out_file', 'channel_files')]),
            (new_segment, self.output_node,
             [('bias_corrected_images', 'bias_corrected_images'),
              ('bias_field_images', 'bias_field_images'),
              ('dartel_input_images', 'dartel_input_images'),
              ('forward_deformation_field', 'forward_deformation_field'),
              ('inverse_deformation_field', 'inverse_deformation_field'),
              ('modulated_class_images', 'modulated_class_images'),
              ('native_class_images', 'native_class_images'),
              ('normalized_class_images', 'normalized_class_images'),
              ('transformation_mat', 'transformation_mat')])
        ])
コード例 #12
0
    def run(self):
        matlab_cmd = self.paths['spm_path'] + ' ' + self.paths[
            'mcr_path'] + '/ script'
        spm.SPMCommand.set_mlab_paths(matlab_cmd=matlab_cmd, use_mcr=True)
        print(matlab_cmd)
        print('SPM version: ' + str(spm.SPMCommand().version))

        experiment_dir = opj(self.paths['input_path'], 'output/')
        output_dir = 'datasink'
        working_dir = 'workingdir'

        subject_list = self.subject_list

        # list of subject identifiers
        fwhm = self.parameters[
            'fwhm']  # Smoothing widths to apply (Gaussian kernel size)
        tr = self.parameters['tr']  # Repetition time
        init_volume = self.parameters[
            'init_volume']  # Firts volumen identification which will use in the pipeline
        iso_size = self.parameters[
            'iso_size']  # Isometric resample of functional images to voxel size (in mm)
        low_pass = self.parameters['low_pass']
        high_pass = self.parameters['high_pass']
        t1_relative_path = self.paths['t1_relative_path']
        fmri_relative_path = self.paths['fmri_relative_path']

        # ExtractROI - skip dummy scans
        extract = Node(ExtractROI(t_min=init_volume,
                                  t_size=-1,
                                  output_type='NIFTI'),
                       name="extract")  #FSL

        # MCFLIRT - motion correction
        mcflirt = Node(MCFLIRT(mean_vol=True,
                               save_plots=True,
                               output_type='NIFTI'),
                       name="motion_correction")  #FSL

        # SliceTimer - correct for slice wise acquisition
        slicetimer = Node(SliceTimer(index_dir=False,
                                     interleaved=True,
                                     output_type='NIFTI',
                                     time_repetition=tr),
                          name="slice_timing_correction")  #FSL

        # Smooth - image smoothing

        denoise = Node(Denoise(), name="denoising")  #Interfaces with dipy

        smooth = Node(spm.Smooth(fwhm=fwhm), name="smooth")  #SPM

        n4bias = Node(N4Bias(out_file='t1_n4bias.nii.gz'),
                      name='n4bias')  #Interface with SimpleITK

        descomposition = Node(Descomposition(n_components=20,
                                             low_pass=0.1,
                                             high_pass=0.01,
                                             tr=tr),
                              name='descomposition')  #Interface with nilearn

        # Artifact Detection - determines outliers in functional images
        art = Node(ArtifactDetect(norm_threshold=2,
                                  zintensity_threshold=3,
                                  mask_type='spm_global',
                                  parameter_source='FSL',
                                  use_differences=[True, False],
                                  plot_type='svg'),
                   name="artifact_detection")  #Rapidart

        extract_confounds_ws_csf = Node(
            ExtractConfounds(out_file='ev_without_gs.csv'),
            name='extract_confounds_ws_csf')  #Interfece

        extract_confounds_gs = Node(ExtractConfounds(out_file='ev_with_gs.csv',
                                                     delimiter=','),
                                    name='extract_confounds_global_signal')

        signal_extraction = Node(SignalExtraction(
            time_series_out_file='time_series.csv',
            correlation_matrix_out_file='correlation_matrix.png',
            labels_parcellation_path=self.paths['labels_parcellation_path'],
            mask_mni_path=self.paths['mask_mni_path'],
            tr=tr,
            low_pass=low_pass,
            high_pass=high_pass,
            plot=False),
                                 name='signal_extraction')
        signal_extraction.iterables = [('image_parcellation_path',
                                        self.paths['image_parcellation_path'])]

        art_remotion = Node(
            ArtifacRemotion(out_file='fmri_art_removed.nii'),
            name='artifact_remotion')  #This interface requires implementation

        # BET - Skullstrip anatomical anf funtional images
        bet_t1 = Node(BET(frac=0.5,
                          robust=True,
                          mask=True,
                          output_type='NIFTI_GZ'),
                      name="bet_t1")  #FSL

        # FAST - Image Segmentation
        segmentation = Node(FAST(output_type='NIFTI'),
                            name="segmentation")  #FSL

        # Normalize - normalizes functional and structural images to the MNI template
        normalize_fmri = Node(Normalize12(
            jobtype='estwrite',
            tpm=self.paths['template_spm_path'],
            write_voxel_sizes=[iso_size, iso_size, iso_size],
            write_bounding_box=[[-90, -126, -72], [90, 90, 108]]),
                              name="normalize_fmri")  #SPM

        gunzip = Node(Gunzip(), name="gunzip")

        normalize_t1 = Node(Normalize12(
            jobtype='estwrite',
            tpm=self.paths['template_spm_path'],
            write_voxel_sizes=[iso_size, iso_size, iso_size],
            write_bounding_box=[[-90, -126, -72], [90, 90, 108]]),
                            name="normalize_t1")

        normalize_masks = Node(Normalize12(
            jobtype='estwrite',
            tpm=self.paths['template_spm_path'],
            write_voxel_sizes=[iso_size, iso_size, iso_size],
            write_bounding_box=[[-90, -126, -72], [90, 90, 108]]),
                               name="normalize_masks")

        # Threshold - Threshold WM probability image
        threshold = Node(Threshold(thresh=0.5,
                                   args='-bin',
                                   output_type='NIFTI_GZ'),
                         name="wm_mask_threshold")

        # FLIRT - pre-alignment of functional images to anatomical images
        coreg_pre = Node(FLIRT(dof=6, output_type='NIFTI_GZ'),
                         name="linear_warp_estimation")

        # FLIRT - coregistration of functional images to anatomical images with BBR
        coreg_bbr = Node(FLIRT(dof=6,
                               cost='bbr',
                               schedule=opj(os.getenv('FSLDIR'),
                                            'etc/flirtsch/bbr.sch'),
                               output_type='NIFTI_GZ'),
                         name="nonlinear_warp_estimation")

        # Apply coregistration warp to functional images
        applywarp = Node(FLIRT(interp='spline',
                               apply_isoxfm=iso_size,
                               output_type='NIFTI'),
                         name="registration_fmri")

        # Apply coregistration warp to mean file
        applywarp_mean = Node(FLIRT(interp='spline',
                                    apply_isoxfm=iso_size,
                                    output_type='NIFTI_GZ'),
                              name="registration_mean_fmri")

        # Infosource - a function free node to iterate over the list of subject names
        infosource = Node(IdentityInterface(fields=['subject_id']),
                          name="infosource")
        infosource.iterables = [('subject_id', subject_list)]

        # SelectFiles - to grab the data (alternativ to DataGrabber)
        anat_file = opj('{subject_id}', t1_relative_path)
        func_file = opj('{subject_id}', fmri_relative_path)

        #anat_file = opj('{subject_id}/anat/', 'data.nii')
        #func_file = opj('{subject_id}/func/', 'data.nii')

        templates = {'anat': anat_file, 'func': func_file}

        selectfiles = Node(SelectFiles(
            templates, base_directory=self.paths['input_path']),
                           name="selectfiles")

        # Datasink - creates output folder for important outputs
        datasink = Node(DataSink(base_directory=experiment_dir,
                                 container=output_dir),
                        name="datasink")

        # Create a coregistration workflow
        coregwf = Workflow(name='coreg_fmri_to_t1')
        coregwf.base_dir = opj(experiment_dir, working_dir)

        # Create a preprocessing workflow
        preproc = Workflow(name='preproc')
        preproc.base_dir = opj(experiment_dir, working_dir)

        # Connect all components of the coregistration workflow

        coregwf.connect([
            (bet_t1, n4bias, [('out_file', 'in_file')]),
            (n4bias, segmentation, [('out_file', 'in_files')]),
            (segmentation, threshold, [(('partial_volume_files', get_latest),
                                        'in_file')]),
            (n4bias, coreg_pre, [('out_file', 'reference')]),
            (threshold, coreg_bbr, [('out_file', 'wm_seg')]),
            (coreg_pre, coreg_bbr, [('out_matrix_file', 'in_matrix_file')]),
            (coreg_bbr, applywarp, [('out_matrix_file', 'in_matrix_file')]),
            (n4bias, applywarp, [('out_file', 'reference')]),
            (coreg_bbr, applywarp_mean, [('out_matrix_file', 'in_matrix_file')
                                         ]),
            (n4bias, applywarp_mean, [('out_file', 'reference')]),
        ])

        ## Use the following DataSink output substitutions
        substitutions = [('_subject_id_', 'sub-')]
        #                 ('_fwhm_', 'fwhm-'),
        #                 ('_roi', ''),
        #                 ('_mcf', ''),
        #                 ('_st', ''),
        #                 ('_flirt', ''),
        #                 ('.nii_mean_reg', '_mean'),
        #                 ('.nii.par', '.par'),
        #                 ]
        # subjFolders = [('fwhm-%s/' % f, 'fwhm-%s_' % f) for f in fwhm]

        # substitutions.extend(subjFolders)
        datasink.inputs.substitutions = substitutions

        # Connect all components of the preprocessing workflow
        preproc.connect([
            (infosource, selectfiles, [('subject_id', 'subject_id')]),
            (selectfiles, extract, [('func', 'in_file')]),
            (extract, mcflirt, [('roi_file', 'in_file')]),
            (mcflirt, slicetimer, [('out_file', 'in_file')]),
            (selectfiles, denoise, [('anat', 'in_file')]),
            (denoise, coregwf, [('out_file', 'bet_t1.in_file'),
                                ('out_file',
                                 'nonlinear_warp_estimation.reference')]),
            (mcflirt, coregwf,
             [('mean_img', 'linear_warp_estimation.in_file'),
              ('mean_img', 'nonlinear_warp_estimation.in_file'),
              ('mean_img', 'registration_mean_fmri.in_file')]),
            (slicetimer, coregwf, [('slice_time_corrected_file',
                                    'registration_fmri.in_file')]),
            (coregwf, art, [('registration_fmri.out_file', 'realigned_files')
                            ]),
            (mcflirt, art, [('par_file', 'realignment_parameters')]),
            (art, art_remotion, [('outlier_files', 'outlier_files')]),
            (coregwf, art_remotion, [('registration_fmri.out_file', 'in_file')
                                     ]),
            (coregwf, gunzip, [('n4bias.out_file', 'in_file')]),
            (selectfiles, normalize_fmri, [('anat', 'image_to_align')]),
            (art_remotion, normalize_fmri, [('out_file', 'apply_to_files')]),
            (selectfiles, normalize_t1, [('anat', 'image_to_align')]),
            (gunzip, normalize_t1, [('out_file', 'apply_to_files')]),
            (selectfiles, normalize_masks, [('anat', 'image_to_align')]),
            (coregwf, normalize_masks, [(('segmentation.partial_volume_files',
                                          get_wm_csf), 'apply_to_files')]),
            (normalize_fmri, smooth, [('normalized_files', 'in_files')]),
            (smooth, extract_confounds_ws_csf, [('smoothed_files', 'in_file')
                                                ]),
            (normalize_masks, extract_confounds_ws_csf, [('normalized_files',
                                                          'list_mask')]),
            (mcflirt, extract_confounds_ws_csf, [('par_file', 'file_concat')]),
            (art, extract_confounds_ws_csf, [('outlier_files', 'outlier_files')
                                             ]),

            # (smooth, extract_confounds_gs, [('smoothed_files', 'in_file')]),
            # (normalize_t1, extract_confounds_gs, [(('normalized_files',change_to_list), 'list_mask')]),
            # (extract_confounds_ws_csf, extract_confounds_gs, [('out_file', 'file_concat')]),
            (smooth, signal_extraction, [('smoothed_files', 'in_file')]),
            # (extract_confounds_gs, signal_extraction, [('out_file', 'confounds_file')]),
            (extract_confounds_ws_csf, signal_extraction,
             [('out_file', 'confounds_file')]),

            #(smooth, descomposition, [('smoothed_files', 'in_file')]),
            #(extract_confounds_ws_csf, descomposition, [('out_file', 'confounds_file')]),

            # (extract_confounds_gs, datasink, [('out_file', 'preprocessing.@confounds_with_gs')]),
            (denoise, datasink, [('out_file', 'preprocessing.@t1_denoised')]),
            (extract_confounds_ws_csf, datasink,
             [('out_file', 'preprocessing.@confounds_without_gs')]),
            (smooth, datasink, [('smoothed_files', 'preprocessing.@smoothed')
                                ]),
            (normalize_fmri, datasink, [('normalized_files',
                                         'preprocessing.@fmri_normalized')]),
            (normalize_t1, datasink, [('normalized_files',
                                       'preprocessing.@t1_normalized')]),
            (normalize_masks, datasink, [('normalized_files',
                                          'preprocessing.@masks_normalized')]),
            (signal_extraction, datasink, [('time_series_out_file',
                                            'preprocessing.@time_serie')]),
            (signal_extraction, datasink,
             [('correlation_matrix_out_file',
               'preprocessing.@correlation_matrix')])
        ])
        #(signal_extraction, datasink,
        # [('fmri_cleaned_out_file', 'preprocessing.@fmri_cleaned_out_file')])])
        #,
        #(descomposition, datasink, [('out_file', 'preprocessing.@descomposition')]),
        #(descomposition, datasink, [('plot_files', 'preprocessing.@descomposition_plot_files')])
        #])

        preproc.write_graph(graph2use='colored',
                            format='png',
                            simple_form=True)
        preproc.run()