Ejemplo n.º 1
0
def create_pipeline(name="dsi_track", opt="", ensemble=""):
    parameters = {'nos': 5000}

    ensemble_dict = {'angle': 'angle_thres', 'min_length': 'min_length'}

    inputnode = pe.Node(interface=util.IdentityInterface(
        fields=["odf", "seed", "angle", "algorithm", "min_length"]),
                        name="inputnode")

    if opt is not None:
        opt_list = opt.split(',')
        for o in opt_list:
            try:
                [key, value] = o.split(':')
                parameters[key] = value
            except ValueError:
                print(o + ': irregular format, skipping')

    if ensemble:
        tckgen = pe.MapNode(dsi.FiberTrack(),
                            name='track',
                            iterfield=ensemble_dict[ensemble])
        gunzip = pe.MapNode(interface=Gunzip(),
                            name="gunzip",
                            iterfield='in_file')
    else:
        tckgen = pe.Node(dsi.FiberTrack(), name='track')
        gunzip = pe.Node(interface=Gunzip(), name="gunzip")
    tckgen.inputs.nos = int(parameters['nos'])

    tckmerge = pe.Node(interface=dtk.TrackMerge(), name="merge")

    output_fields = ["tck"]
    outputnode = pe.Node(
        interface=util.IdentityInterface(fields=output_fields),
        name="outputnode")

    workflow = pe.Workflow(name=name)
    workflow.base_output_dir = name

    workflow.connect([(inputnode, tckgen, [("odf", "in_file"),
                                           ("angle", "angle_thres"),
                                           ("min_length", "min_length")]),
                      (tckgen, gunzip, [("out_file", "in_file")])])

    if inputnode.inputs.seed:
        workflow.connect([(inputnode, tckgen, [("seed", "seed_image")])])

    if ensemble:
        workflow.connect([(gunzip, tckmerge, [("out_file", "track_files")]),
                          (tckmerge, outputnode, [("track_file", "tck")])])
    else:
        workflow.connect([(gunzip, outputnode, [("out_file", "tck")])])

    return workflow
Ejemplo n.º 2
0
 def gunzipper(in_files):
     from nipype.algorithms.misc import Gunzip
     if isinstance(in_files,list):
         outputs = []
         for i in in_files:
             if i.endswith('.gz'):
                 res = Gunzip(in_file=i).run()
                 outputs.append(res.outputs.out_file)
             else: outputs.append(i)
     else:
         if in_files.endswith('.gz'):
             res = Gunzip(in_file=in_files).run()
             outputs = res.outputs.out_file  
         else: outputs = in_files
     return outputs      
Ejemplo n.º 3
0
def spm_warp_to_mni(wf_name="spm_warp_to_mni"):
    """ Run Gunzip and SPM Normalize12 to the list of files input and outputs the list of warped files.

    It does:
    - Warp each individual input image to the standard SPM template

    Parameters
    ----------
    wf_name: str
        Name of the workflow.

    Nipype Inputs
    -------------
    warp_input.in_files: list of traits.File
        The raw NIFTI_GZ image files

    Nipype outputs
    --------------
    warp_output.warped_files: list of existing file
        The warped files.

    Returns
    -------
    wf: nipype Workflow
    """
    # input
    # check if spm_pet_preproc.do_petpvc is True
    in_fields = ["in_files"]
    out_fields = ["warped_files"]

    input = setup_node(
        IdentityInterface(fields=in_fields, mandatory_inputs=True),
        name="warp_input",
    )

    gunzip = pe.MapNode(Gunzip(), name="gunzip", iterfield=['in_file'])

    warp = setup_node(spm.Normalize12(jobtype='estwrite',
                                      affine_regularization_type='mni'),
                      name="normalize12",
                      type="map",
                      iterfield=['image_to_align'])

    # output
    output = setup_node(IdentityInterface(fields=out_fields),
                        name="warp_output")

    # Create the workflow object
    wf = pe.Workflow(name=wf_name)

    wf.connect([
        # inputs
        (input, gunzip, [("in_files", "in_file")]),
        (gunzip, warp, [("out_file", "image_to_align")]),

        # output
        (warp, output, [("normalized_image", "warped_files")]),
    ])

    return wf
    def make_segment(self):
        # Ref: http://nipype.readthedocs.io/en/0.12.1/interfaces/generated/nipype.interfaces.fsl.utils.html#reorient2std
        ro = Node(interface=fsl.Reorient2Std(), name='ro')

        # Ref: http://nipype.readthedocs.io/en/latest/interfaces/generated/interfaces.spm/preprocess.html#segment
        seg = Node(interface=spm.NewSegment(channel_info=(0.0001, 60, (True,
                                                                       True))),
                   name="seg")

        spm_tissues_split = Node(Function(['in_list'], ['gm', 'wm', 'csf'],
                                          self.spm_tissues),
                                 name='spm_tissues_split')

        gzip = Node(Function(['in_list'], ['out_list'], self.gzip_spm),
                    name='gzip')

        segment = Workflow(name='Segment', base_dir=self.temp_dir)

        gunzip = Node(interface=Gunzip(), name='gunzip')
        # for new segment
        segment.connect(ro, 'out_file', gunzip, 'in_file')
        segment.connect(gunzip, 'out_file', seg, 'channel_files')
        segment.connect(seg, 'native_class_images', spm_tissues_split,
                        'in_list')
        return segment
Ejemplo n.º 5
0
def test_Gunzip_outputs():
    output_map = dict(out_file=dict(), )
    outputs = Gunzip.output_spec()

    for key, metadata in output_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(outputs.traits()[key], metakey), value
Ejemplo n.º 6
0
def test_Gunzip_outputs():
    output_map = dict(out_file=dict(),
    )
    outputs = Gunzip.output_spec()

    for key, metadata in output_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(outputs.traits()[key], metakey), value
Ejemplo n.º 7
0
def unzip_nii(in_file):
    from nipype.algorithms.misc import Gunzip
    from nipype.utils.filemanip import split_filename
    from traits.trait_base import _Undefined

    if (in_file is None) or isinstance(in_file, _Undefined):
        return None

    if not isinstance(in_file, str):  # type(in_file) is list:
        return [unzip_nii(f) for f in in_file]

    _, base, ext = split_filename(in_file)

    # Not compressed
    if ext[-3:].lower() != ".gz":
        return in_file
    # Compressed
    gunzip = Gunzip(in_file=in_file)
    gunzip.run()
    return gunzip.aggregate_outputs().out_file
Ejemplo n.º 8
0
def test_Gunzip_inputs():
    input_map = dict(ignore_exception=dict(nohash=True,
    usedefault=True,
    ),
    in_file=dict(mandatory=True,
    ),
    )
    inputs = Gunzip.input_spec()

    for key, metadata in input_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(inputs.traits()[key], metakey), value
Ejemplo n.º 9
0
def test_Gunzip_inputs():
    input_map = dict(
        ignore_exception=dict(
            nohash=True,
            usedefault=True,
        ),
        in_file=dict(mandatory=True, ),
    )
    inputs = Gunzip.input_spec()

    for key, metadata in input_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(inputs.traits()[key], metakey), value
Ejemplo n.º 10
0
def test_spm(name='test_spm_3d'):
    """
    A simple workflow to test SPM's installation. By default will split the 4D volume in
    time-steps.
    """
    workflow = pe.Workflow(name=name)

    inputnode = pe.Node(niu.IdentityInterface(fields=['in_data']),
                        name='inputnode')
    dgr = pe.Node(nio.DataGrabber(template="feeds/data/fmri.nii.gz",
                                  outfields=['out_file'],
                                  sort_filelist=False),
                  name='datasource')

    stc = pe.Node(spm.SliceTiming(num_slices=21,
                                  time_repetition=1.0,
                                  time_acquisition=2. - 2. / 32,
                                  slice_order=list(range(21, 0, -1)),
                                  ref_slice=10),
                  name='stc')
    realign_estimate = pe.Node(spm.Realign(jobtype='estimate'),
                               name='realign_estimate')
    realign_write = pe.Node(spm.Realign(jobtype='write'), name='realign_write')
    realign_estwrite = pe.Node(spm.Realign(jobtype='estwrite'),
                               name='realign_estwrite')
    smooth = pe.Node(spm.Smooth(fwhm=[6, 6, 6]), name='smooth')

    if name == 'test_spm_3d':
        split = pe.Node(fsl.Split(dimension="t", output_type="NIFTI"),
                        name="split")
        workflow.connect([(dgr, split, [(('out_file', _get_first), 'in_file')
                                        ]),
                          (split, stc, [("out_files", "in_files")])])
    elif name == 'test_spm_4d':
        gunzip = pe.Node(Gunzip(), name="gunzip")
        workflow.connect([(dgr, gunzip, [(('out_file', _get_first), 'in_file')
                                         ]),
                          (gunzip, stc, [("out_file", "in_files")])])
    else:
        raise NotImplementedError(
            'No implementation of the test workflow \'{}\' was found'.format(
                name))

    workflow.connect([
        (inputnode, dgr, [('in_data', 'base_directory')]),
        (stc, realign_estimate, [('timecorrected_files', 'in_files')]),
        (realign_estimate, realign_write, [('modified_in_files', 'in_files')]),
        (stc, realign_estwrite, [('timecorrected_files', 'in_files')]),
        (realign_write, smooth, [('realigned_files', 'in_files')])
    ])
    return workflow
Ejemplo n.º 11
0
def Couple_Preproc_Pipeline(base_dir=None,
                            output_dir=None,
                            subject_id=None,
                            spm_path=None):
    """ Create a preprocessing workflow for the Couples Conflict Study using nipype

    Args:
        base_dir: path to data folder where raw subject folder is located
        output_dir: path to where key output files should be saved
        subject_id: subject_id (str)
        spm_path: path to spm folder

    Returns:
        workflow: a nipype workflow that can be run
        
    """

    from nipype.interfaces.dcm2nii import Dcm2nii
    from nipype.interfaces.fsl import Merge, TOPUP, ApplyTOPUP
    import nipype.interfaces.io as nio
    import nipype.interfaces.utility as util
    from nipype.interfaces.utility import Merge as Merge_List
    from nipype.pipeline.engine import Node, Workflow
    from nipype.interfaces.fsl.maths import UnaryMaths
    from nipype.interfaces.nipy.preprocess import Trim
    from nipype.algorithms.rapidart import ArtifactDetect
    from nipype.interfaces import spm
    from nipype.interfaces.spm import Normalize12
    from nipype.algorithms.misc import Gunzip
    from nipype.interfaces.nipy.preprocess import ComputeMask
    import nipype.interfaces.matlab as mlab
    from nltools.utils import get_resource_path, get_vox_dims, get_n_volumes
    from nltools.interfaces import Plot_Coregistration_Montage, PlotRealignmentParameters, Create_Covariates
    import os
    import glob

    ########################################
    ## Setup Paths and Nodes
    ########################################

    # Specify Paths
    canonical_file = os.path.join(spm_path, 'canonical', 'single_subj_T1.nii')
    template_file = os.path.join(spm_path, 'tpm', 'TPM.nii')

    # Set the way matlab should be called
    mlab.MatlabCommand.set_default_matlab_cmd("matlab -nodesktop -nosplash")
    mlab.MatlabCommand.set_default_paths(spm_path)

    # Get File Names for different types of scans.  Parse into separate processing streams
    datasource = Node(interface=nio.DataGrabber(
        infields=['subject_id'], outfields=['struct', 'ap', 'pa']),
                      name='datasource')
    datasource.inputs.base_directory = base_dir
    datasource.inputs.template = '*'
    datasource.inputs.field_template = {
        'struct': '%s/Study*/t1w_32ch_mpr_08mm*',
        'ap': '%s/Study*/distortion_corr_32ch_ap*',
        'pa': '%s/Study*/distortion_corr_32ch_pa*'
    }
    datasource.inputs.template_args = {
        'struct': [['subject_id']],
        'ap': [['subject_id']],
        'pa': [['subject_id']]
    }
    datasource.inputs.subject_id = subject_id
    datasource.inputs.sort_filelist = True

    # iterate over functional scans to define paths
    scan_file_list = glob.glob(
        os.path.join(base_dir, subject_id, 'Study*', '*'))
    func_list = [s for s in scan_file_list if "romcon_ap_32ch_mb8" in s]
    func_list = [s for s in func_list
                 if "SBRef" not in s]  # Exclude sbref for now.
    func_source = Node(interface=util.IdentityInterface(fields=['scan']),
                       name="func_source")
    func_source.iterables = ('scan', func_list)

    # Create Separate Converter Nodes for each different type of file. (dist corr scans need to be done before functional)
    ap_dcm2nii = Node(interface=Dcm2nii(), name='ap_dcm2nii')
    ap_dcm2nii.inputs.gzip_output = True
    ap_dcm2nii.inputs.output_dir = '.'
    ap_dcm2nii.inputs.date_in_filename = False

    pa_dcm2nii = Node(interface=Dcm2nii(), name='pa_dcm2nii')
    pa_dcm2nii.inputs.gzip_output = True
    pa_dcm2nii.inputs.output_dir = '.'
    pa_dcm2nii.inputs.date_in_filename = False

    f_dcm2nii = Node(interface=Dcm2nii(), name='f_dcm2nii')
    f_dcm2nii.inputs.gzip_output = True
    f_dcm2nii.inputs.output_dir = '.'
    f_dcm2nii.inputs.date_in_filename = False

    s_dcm2nii = Node(interface=Dcm2nii(), name='s_dcm2nii')
    s_dcm2nii.inputs.gzip_output = True
    s_dcm2nii.inputs.output_dir = '.'
    s_dcm2nii.inputs.date_in_filename = False

    ########################################
    ## Setup Nodes for distortion correction
    ########################################

    # merge output files into list
    merge_to_file_list = Node(interface=Merge_List(2),
                              infields=['in1', 'in2'],
                              name='merge_to_file_list')

    # fsl merge AP + PA files (depends on direction)
    merger = Node(interface=Merge(dimension='t'), name='merger')
    merger.inputs.output_type = 'NIFTI_GZ'

    # use topup to create distortion correction map
    topup = Node(interface=TOPUP(), name='topup')
    topup.inputs.encoding_file = os.path.join(get_resource_path(),
                                              'epi_params_APPA_MB8.txt')
    topup.inputs.output_type = "NIFTI_GZ"
    topup.inputs.config = 'b02b0.cnf'

    # apply topup to all functional images
    apply_topup = Node(interface=ApplyTOPUP(), name='apply_topup')
    apply_topup.inputs.in_index = [1]
    apply_topup.inputs.encoding_file = os.path.join(get_resource_path(),
                                                    'epi_params_APPA_MB8.txt')
    apply_topup.inputs.output_type = "NIFTI_GZ"
    apply_topup.inputs.method = 'jac'
    apply_topup.inputs.interp = 'spline'

    # Clear out Zeros from spline interpolation using absolute value.
    abs_maths = Node(interface=UnaryMaths(), name='abs_maths')
    abs_maths.inputs.operation = 'abs'

    ########################################
    ## Preprocessing
    ########################################

    # Trim - remove first 10 TRs
    n_vols = 10
    trim = Node(interface=Trim(), name='trim')
    trim.inputs.begin_index = n_vols

    #Realignment - 6 parameters - realign to first image of very first series.
    realign = Node(interface=spm.Realign(), name="realign")
    realign.inputs.register_to_mean = True

    #Coregister - 12 parameters
    coregister = Node(interface=spm.Coregister(), name="coregister")
    coregister.inputs.jobtype = 'estwrite'

    #Plot Realignment
    plot_realign = Node(interface=PlotRealignmentParameters(),
                        name="plot_realign")

    #Artifact Detection
    art = Node(interface=ArtifactDetect(), name="art")
    art.inputs.use_differences = [True, False]
    art.inputs.use_norm = True
    art.inputs.norm_threshold = 1
    art.inputs.zintensity_threshold = 3
    art.inputs.mask_type = 'file'
    art.inputs.parameter_source = 'SPM'

    # Gunzip - unzip the functional and structural images
    gunzip_struc = Node(Gunzip(), name="gunzip_struc")
    gunzip_func = Node(Gunzip(), name="gunzip_func")

    # Normalize - normalizes functional and structural images to the MNI template
    normalize = Node(interface=Normalize12(jobtype='estwrite',
                                           tpm=template_file),
                     name="normalize")

    #Plot normalization Check
    plot_normalization_check = Node(interface=Plot_Coregistration_Montage(),
                                    name="plot_normalization_check")
    plot_normalization_check.inputs.canonical_img = canonical_file

    #Create Mask
    compute_mask = Node(interface=ComputeMask(), name="compute_mask")
    #remove lower 5% of histogram of mean image
    compute_mask.inputs.m = .05

    #Smooth
    #implicit masking (.im) = 0, dtype = 0
    smooth = Node(interface=spm.Smooth(), name="smooth")
    smooth.inputs.fwhm = 6

    #Create Covariate matrix
    make_cov = Node(interface=Create_Covariates(), name="make_cov")

    # Create a datasink to clean up output files
    datasink = Node(interface=nio.DataSink(), name='datasink')
    datasink.inputs.base_directory = output_dir
    datasink.inputs.container = subject_id

    ########################################
    # Create Workflow
    ########################################

    workflow = Workflow(name='Preprocessed')
    workflow.base_dir = os.path.join(base_dir, subject_id)
    workflow.connect([
        (datasource, ap_dcm2nii, [('ap', 'source_dir')]),
        (datasource, pa_dcm2nii, [('pa', 'source_dir')]),
        (datasource, s_dcm2nii, [('struct', 'source_dir')]),
        (func_source, f_dcm2nii, [('scan', 'source_dir')]),
        (ap_dcm2nii, merge_to_file_list, [('converted_files', 'in1')]),
        (pa_dcm2nii, merge_to_file_list, [('converted_files', 'in2')]),
        (merge_to_file_list, merger, [('out', 'in_files')]),
        (merger, topup, [('merged_file', 'in_file')]),
        (topup, apply_topup, [('out_fieldcoef', 'in_topup_fieldcoef'),
                              ('out_movpar', 'in_topup_movpar')]),
        (f_dcm2nii, trim, [('converted_files', 'in_file')]),
        (trim, apply_topup, [('out_file', 'in_files')]),
        (apply_topup, abs_maths, [('out_corrected', 'in_file')]),
        (abs_maths, gunzip_func, [('out_file', 'in_file')]),
        (gunzip_func, realign, [('out_file', 'in_files')]),
        (s_dcm2nii, gunzip_struc, [('converted_files', 'in_file')]),
        (gunzip_struc, coregister, [('out_file', 'source')]),
        (coregister, normalize, [('coregistered_source', 'image_to_align')]),
        (realign, coregister, [('mean_image', 'target'),
                               ('realigned_files', 'apply_to_files')]),
        (realign, normalize, [(('mean_image', get_vox_dims),
                               'write_voxel_sizes')]),
        (coregister, normalize, [('coregistered_files', 'apply_to_files')]),
        (normalize, smooth, [('normalized_files', 'in_files')]),
        (realign, compute_mask, [('mean_image', 'mean_volume')]),
        (compute_mask, art, [('brain_mask', 'mask_file')]),
        (realign, art, [('realignment_parameters', 'realignment_parameters'),
                        ('realigned_files', 'realigned_files')]),
        (realign, plot_realign, [('realignment_parameters',
                                  'realignment_parameters')]),
        (normalize, plot_normalization_check, [('normalized_files', 'wra_img')
                                               ]),
        (realign, make_cov, [('realignment_parameters',
                              'realignment_parameters')]),
        (art, make_cov, [('outlier_files', 'spike_id')]),
        (normalize, datasink, [('normalized_files', 'structural.@normalize')]),
        (coregister, datasink, [('coregistered_source', 'structural.@struct')
                                ]),
        (topup, datasink, [('out_fieldcoef', 'distortion.@fieldcoef')]),
        (topup, datasink, [('out_movpar', 'distortion.@movpar')]),
        (smooth, datasink, [('smoothed_files', 'functional.@smooth')]),
        (plot_realign, datasink, [('plot', 'functional.@plot_realign')]),
        (plot_normalization_check, datasink,
         [('plot', 'functional.@plot_normalization')]),
        (make_cov, datasink, [('covariates', 'functional.@covariates')])
    ])
    return workflow
Ejemplo n.º 12
0
bet_fmri = Node(BET(frac=0.6, functional=True, output_type='NIFTI_GZ'),
                name="bet_fmri")

# FAST - Image Segmentation
segmentation = Node(FAST(output_type='NIFTI'), name="segmentation")

# Normalize - normalizes functional and structural images to the MNI template
normalize_fmri = Node(Normalize12(jobtype='estwrite',
                                  tpm=template,
                                  write_voxel_sizes=[2, 2, 2],
                                  write_bounding_box=[[-90, -126, -72],
                                                      [90, 90, 108]]),
                      name="normalize_fmri")

gunzip = Node(Gunzip(), name="gunzip")

normalize_t1 = Node(Normalize12(
    jobtype='estwrite',
    tpm=template,
    write_voxel_sizes=[iso_size, iso_size, iso_size],
    write_bounding_box=[[-90, -126, -72], [90, 90, 108]]),
                    name="normalize_t1")

normalize_masks = Node(Normalize12(
    jobtype='estwrite',
    tpm=template,
    write_voxel_sizes=[iso_size, iso_size, iso_size],
    write_bounding_box=[[-90, -126, -72], [90, 90, 108]]),
                       name="normalize_masks")
Ejemplo n.º 13
0
def fmri_cleanup_wf(wf_name="fmri_cleanup"):
    """ Run the resting-state fMRI pre-processing workflow against the rest files in `data_dir`.

    Tasks:
    - Trim first 6 volumes of the rs-fMRI file.
    - Slice Timing correction.
    - Motion and nuisance correction.
    - Calculate brain mask in fMRI space.
    - Bandpass frequency filtering for resting-state fMRI.
    - Smoothing.
    - Tissue maps co-registration to fMRI space.

    Parameters
    ----------
    wf_name: str

    Nipype Inputs
    -------------
    rest_input.in_file: traits.File
        The resting-state fMRI file.

    rest_input.anat: traits.File
        Path to the high-contrast anatomical image.

    rest_input.tissues: list of traits.File
        Paths to the tissue segmentations in anatomical space.
        Expected to have this order: GM, WM and CSF.

    rest_input.highpass_sigma:traits.Float
        Band pass timeseries filter higher bound in Hz.

    rest_input.lowpass_sigma: traits.Float
        Band pass timeseries filter lower bound in Hz.

    Nipype Outputs
    --------------
    rest_output.smooth: traits.File
        The isotropically smoothed time filtered nuisance corrected image.

    rest_output.nuis_corrected: traits.File
        The nuisance corrected fMRI file.

    rest_output.motion_params: traits.File
        The affine transformation file.

    rest_output.time_filtered: traits.File
        The bandpass time filtered fMRI file.

    rest_output.epi_brain_mask: traits.File
        An estimated brain mask from mean EPI volume.

    rest_output.tissues_brain_mask: traits.File
        A brain mask calculated from the addition of coregistered
        GM, WM and CSF segmentation volumes from the anatomical
        segmentation.

    rest_output.tissues: list of traits.File
        The tissues segmentation volume in fMRI space.
        Expected to have this order: GM, WM and CSF.

    rest_output.anat: traits.File
        The T1w image in fMRI space.

    rest_output.avg_epi: traits.File
        The average EPI image in fMRI space after slice-time and motion correction.

    rest_output.motion_regressors: traits.File

    rest_output.compcor_regressors: traits.File

    rest_output.art_displacement_files
        One image file containing the voxel-displacement timeseries.

    rest_output.art_intensity_files
        One file containing the global intensity values determined from the brainmask.

    rest_output.art_norm_files
        One file containing the composite norm.

    rest_output.art_outlier_files
         One file containing a list of 0-based indices corresponding to outlier volumes.

    rest_output.art_plot_files
        One image file containing the detected outliers.

    rest_output.art_statistic_files
        One file containing information about the different types of artifacts and if design info is provided then
        details of stimulus correlated motion and a listing or artifacts by event type.

    Returns
    -------
    wf: nipype Workflow
    """
    # Create the workflow object
    wf = pe.Workflow(name=wf_name)

    # specify input and output fields
    in_fields = [
        "in_file",
        "anat",
        "atlas_anat",
        "coreg_target",
        "tissues",
        "lowpass_freq",
        "highpass_freq",
    ]

    out_fields = [
        "motion_corrected",
        "motion_params",
        "tissues",
        "anat",
        "avg_epi",
        "time_filtered",
        "smooth",
        "tsnr_file",
        "epi_brain_mask",
        "tissues_brain_mask",
        "motion_regressors",
        "compcor_regressors",
        "gsr_regressors",
        "nuis_corrected",
        "art_displacement_files",
        "art_intensity_files",
        "art_norm_files",
        "art_outlier_files",
        "art_plot_files",
        "art_statistic_files",
    ]

    # input identities
    rest_input = setup_node(IdentityInterface(fields=in_fields, mandatory_inputs=True),
                            name="rest_input")

    # rs-fMRI preprocessing nodes
    trim = setup_node(Trim(), name="trim")

    stc_wf = auto_spm_slicetime()
    realign = setup_node(nipy_motion_correction(), name='realign')

    # average
    average = setup_node(
        Function(
            function=mean_img,
            input_names=["in_file"],
            output_names=["out_file"],
            imports=['from neuro_pypes.interfaces.nilearn import ni2file']
        ),
        name='average_epi'
    )

    mean_gunzip = setup_node(Gunzip(), name="mean_gunzip")

    # co-registration nodes
    coreg = setup_node(spm_coregister(cost_function="mi"), name="coreg_fmri")
    brain_sel = setup_node(Select(index=[0, 1, 2]), name="brain_sel")

    # brain mask made with EPI
    epi_mask = setup_node(ComputeMask(), name='epi_mask')

    # brain mask made with the merge of the tissue segmentations
    tissue_mask = setup_node(fsl.MultiImageMaths(), name='tissue_mask')
    tissue_mask.inputs.op_string = "-add %s -add %s -abs -kernel gauss 4 -dilM -ero -kernel gauss 1 -dilM -bin"
    tissue_mask.inputs.out_file = "tissue_brain_mask.nii.gz"

    # select tissues
    gm_select = setup_node(Select(index=[0]), name="gm_sel")
    wmcsf_select = setup_node(Select(index=[1, 2]), name="wmcsf_sel")

    # noise filter
    noise_wf = rest_noise_filter_wf()
    wm_select = setup_node(Select(index=[1]), name="wm_sel")
    csf_select = setup_node(Select(index=[2]), name="csf_sel")

    # bandpass filtering
    bandpass = setup_node(
        Function(
            input_names=['files', 'lowpass_freq', 'highpass_freq', 'tr'],
            output_names=['out_files'],
            function=bandpass_filter
        ),
        name='bandpass'
    )

    # smooth
    smooth = setup_node(
        Function(
            function=smooth_img,
            input_names=["in_file", "fwhm"],
            output_names=["out_file"],
            imports=['from neuro_pypes.interfaces.nilearn import ni2file']
        ),
        name="smooth"
    )
    smooth.inputs.fwhm = get_config_setting('fmri_smooth.fwhm', default=8)
    smooth.inputs.out_file = "smooth_std_{}.nii.gz".format(wf_name)

    # output identities
    rest_output = setup_node(IdentityInterface(fields=out_fields), name="rest_output")

    # Connect the nodes
    wf.connect([
        # trim
        (rest_input, trim, [("in_file", "in_file")]),

        # slice time correction
        (trim, stc_wf, [("out_file", "stc_input.in_file")]),

        # motion correction
        (stc_wf, realign, [("stc_output.timecorrected_files", "in_file")]),

        # coregistration target
        (realign, average, [("out_file", "in_file")]),
        (average, mean_gunzip, [("out_file", "in_file")]),
        (mean_gunzip, coreg, [("out_file", "target")]),

        # epi brain mask
        (average, epi_mask, [("out_file", "mean_volume")]),

        # coregistration
        (rest_input, coreg, [("anat", "source")]),
        (rest_input, brain_sel, [("tissues", "inlist")]),
        (brain_sel, coreg, [(("out", flatten_list), "apply_to_files")]),

        # tissue brain mask
        (coreg, gm_select, [("coregistered_files", "inlist")]),
        (coreg, wmcsf_select, [("coregistered_files", "inlist")]),
        (gm_select, tissue_mask, [(("out", flatten_list), "in_file")]),
        (wmcsf_select, tissue_mask, [(("out", flatten_list), "operand_files")]),

        # nuisance correction
        (coreg, wm_select, [("coregistered_files", "inlist",)]),
        (coreg, csf_select, [("coregistered_files", "inlist",)]),
        (realign, noise_wf, [("out_file", "rest_noise_input.in_file",)]),
        (tissue_mask, noise_wf, [("out_file", "rest_noise_input.brain_mask")]),
        (wm_select, noise_wf, [(("out", flatten_list), "rest_noise_input.wm_mask")]),
        (csf_select, noise_wf, [(("out", flatten_list), "rest_noise_input.csf_mask")]),

        (realign, noise_wf, [("par_file", "rest_noise_input.motion_params",)]),

        # temporal filtering
        (noise_wf, bandpass, [("rest_noise_output.nuis_corrected", "files")]),
        # (realign,     bandpass,    [("out_file", "files")]),
        (stc_wf, bandpass, [("stc_output.time_repetition", "tr")]),
        (rest_input, bandpass, [
            ("lowpass_freq", "lowpass_freq"),
            ("highpass_freq", "highpass_freq"),
        ]),
        (bandpass, smooth, [("out_files", "in_file")]),

        # output
        (epi_mask, rest_output, [("brain_mask", "epi_brain_mask")]),
        (tissue_mask, rest_output, [("out_file", "tissues_brain_mask")]),
        (realign, rest_output, [
            ("out_file", "motion_corrected"),
            ("par_file", "motion_params"),
        ]),
        (coreg, rest_output, [
            ("coregistered_files", "tissues"),
            ("coregistered_source", "anat"),
        ]),
        (noise_wf, rest_output, [
            ("rest_noise_output.motion_regressors", "motion_regressors"),
            ("rest_noise_output.compcor_regressors", "compcor_regressors"),
            ("rest_noise_output.gsr_regressors", "gsr_regressors"),
            ("rest_noise_output.nuis_corrected", "nuis_corrected"),
            ("rest_noise_output.tsnr_file", "tsnr_file"),
            ("rest_noise_output.art_displacement_files", "art_displacement_files"),
            ("rest_noise_output.art_intensity_files", "art_intensity_files"),
            ("rest_noise_output.art_norm_files", "art_norm_files"),
            ("rest_noise_output.art_outlier_files", "art_outlier_files"),
            ("rest_noise_output.art_plot_files", "art_plot_files"),
            ("rest_noise_output.art_statistic_files", "art_statistic_files"),
        ]),
        (average, rest_output, [("out_file", "avg_epi")]),
        (bandpass, rest_output, [("out_files", "time_filtered")]),
        (smooth, rest_output, [("out_file", "smooth")]),
    ])

    return wf
Ejemplo n.º 14
0
subject_list = [
    d for d in os.listdir(DATA_DIR) if os.path.isdir(os.path.join(DATA_DIR, d))
]

# Infosource - function free node to iterate over the list of subject names (and/or sessions)
infosource = Node(IdentityInterface(fields=['subject_id']), name="infosource")
infosource.iterables = [('subject_id', subject_list)]

# SelectFiles - uses glob and regex to find your files
templates = dict(struct='{subject_id}/structural/structural.nii.gz',
                 func='{subject_id}/functional/*.nii.gz')
selectfiles = Node(SelectFiles(templates), "selectfiles")
selectfiles.inputs.base_directory = DATA_DIR

# Node which might come in handy when piping data to interfaces that are incompatible with gzipped format
gunzip_struct = Node(Gunzip(), name="gunzip_struct")

# Reorient images to match approximate orientation of the standard template images (MNI152)
reorient_func = Node(fsl.Reorient2Std(output_type='NIFTI_GZ'),
                     name='reorient_func')
reorient_struct = Node(fsl.Reorient2Std(output_type='NIFTI_GZ'),
                       name='reorient_struct')

# Convert functional images to float representation (FLOAT32)
img2float = Node(fsl.ImageMaths(out_data_type='float',
                                op_string='',
                                suffix='_dtype'),
                 name='img2float')


# Return the volume index of a file
Ejemplo n.º 15
0
def spm_anat_preprocessing(wf_name="spm_anat_preproc"):
    """ Run the T1 pre-processing workflow against the anat_hc files in `data_dir`.

    It does:
    - N4BiasFieldCorrection
    - SPM12 New Segment
    - SPM12 Warp of MPRAGE to MNI

    [Optional: from config]


    Nipype Inputs
    -------------
    anat_input.in_file: traits.File
        path to the anatomical image

    Nipype Outputs
    --------------
    anat_output.anat_mni: traits.File
        The bias-field normalized to MNI anatomical image.

    anat_output.tissues_warped: traits.File
        The tissue segmentation in MNI space from SPM.

    anat_output.tissues_native: traits.File
        The tissue segmentation in native space from SPM

    anat_output.affine_transform: traits.File
        The affine transformation file.

    anat_output.warp_forward: traits.File
        The forward (anat to MNI) warp field from SPM.

    anat_output.warp_inverse: traits.File
        The inverse (MNI to anat) warp field from SPM.

    anat_output.anat_biascorr: traits.File
        The bias-field corrected anatomical image

    anat_output.atlas_anat: traits.File
        The atlas file warped to anatomical space,
        if do_atlas and the atlas file is set in configuration.

    anat_output.brain_mask: traits.File
        A brain mask file in anatomical space.
        This is calculated by summing up the maps of segmented tissues
        (CSF, WM, GM) and then binarised.

    Returns
    -------
    wf: nipype Workflow
    """
    # Create the workflow object
    wf = pe.Workflow(name=wf_name)

    # specify input and output fields
    in_fields = ["in_file"]
    out_fields = [
        "anat_mni",
        "tissues_warped",
        "tissues_native",
        "affine_transform",
        "warp_forward",
        "warp_inverse",
        "anat_biascorr",
        "brain_mask",
    ]

    do_atlas, atlas_file = check_atlas_file()
    if do_atlas:
        in_fields += ["atlas_file"]
        out_fields += ["atlas_anat"]

    # input node
    anat_input = pe.Node(IdentityInterface(fields=in_fields,
                                           mandatory_inputs=True),
                         name="anat_input")

    # atlas registration
    if do_atlas:
        anat_input.inputs.set(atlas_file=atlas_file)

    # T1 preprocessing nodes
    biascor = setup_node(biasfield_correct(), name="bias_correction")
    gunzip_anat = setup_node(Gunzip(), name="gunzip_anat")
    segment = setup_node(spm_segment(), name="new_segment")
    warp_anat = setup_node(spm_apply_deformations(), name="warp_anat")

    tpm_bbox = setup_node(Function(function=get_bounding_box,
                                   input_names=["in_file"],
                                   output_names=["bbox"]),
                          name="tpm_bbox")
    tpm_bbox.inputs.in_file = spm_tpm_priors_path()

    # calculate brain mask from tissue maps
    tissues = setup_node(IdentityInterface(fields=["gm", "wm", "csf"],
                                           mandatory_inputs=True),
                         name="tissues")

    brain_mask = setup_node(Function(
        function=math_img,
        input_names=["formula", "out_file", "gm", "wm", "csf"],
        output_names=["out_file"],
        imports=['from pypes.interfaces.nilearn import ni2file']),
                            name='brain_mask')
    brain_mask.inputs.out_file = "tissues_brain_mask.nii.gz"
    brain_mask.inputs.formula = "np.abs(gm + wm + csf) > 0"

    # output node
    anat_output = pe.Node(IdentityInterface(fields=out_fields),
                          name="anat_output")

    # Connect the nodes
    wf.connect([
        # input
        (anat_input, biascor, [("in_file", "input_image")]),
        # new segment
        (biascor, gunzip_anat, [("output_image", "in_file")]),
        (gunzip_anat, segment, [("out_file", "channel_files")]),

        # Normalize12
        (segment, warp_anat, [("forward_deformation_field", "deformation_file")
                              ]),
        (segment, warp_anat, [("bias_corrected_images", "apply_to_files")]),
        (tpm_bbox, warp_anat, [("bbox", "write_bounding_box")]),

        # brain mask from tissues
        (segment, tissues, [
            (("native_class_images", selectindex, [0]), "gm"),
            (("native_class_images", selectindex, [1]), "wm"),
            (("native_class_images", selectindex, [2]), "csf"),
        ]),
        (tissues, brain_mask, [
            ("gm", "gm"),
            ("wm", "wm"),
            ("csf", "csf"),
        ]),

        # output
        (warp_anat, anat_output, [("normalized_files", "anat_mni")]),
        (segment, anat_output, [("modulated_class_images", "tissues_warped"),
                                ("native_class_images", "tissues_native"),
                                ("transformation_mat", "affine_transform"),
                                ("forward_deformation_field", "warp_forward"),
                                ("inverse_deformation_field", "warp_inverse"),
                                ("bias_corrected_images", "anat_biascorr")]),
        (brain_mask, anat_output, [("out_file", "brain_mask")]),
    ])

    # atlas warping nodes
    if do_atlas:
        gunzip_atlas = pe.Node(Gunzip(), name="gunzip_atlas")
        warp_atlas = setup_node(spm_apply_deformations(), name="warp_atlas")
        anat_bbox = setup_node(Function(function=get_bounding_box,
                                        input_names=["in_file"],
                                        output_names=["bbox"]),
                               name="anat_bbox")

        # set the warping interpolation to nearest neighbour.
        warp_atlas.inputs.write_interp = 0

        # connect the atlas registration nodes
        wf.connect([
            (anat_input, gunzip_atlas, [("atlas_file", "in_file")]),
            (gunzip_anat, anat_bbox, [("out_file", "in_file")]),
            (gunzip_atlas, warp_atlas, [("out_file", "apply_to_files")]),
            (segment, warp_atlas, [("inverse_deformation_field",
                                    "deformation_file")]),
            (anat_bbox, warp_atlas, [("bbox", "write_bounding_box")]),
            (warp_atlas, anat_output, [("normalized_files", "atlas_anat")]),
        ])
    return wf
Ejemplo n.º 16
0
def petpvc_workflow(wf_name="petpvc"):
    """ Run the PET pre-processing workflow against the gunzip_pet.in_file files.
    It coregisters the reference_file and tissues to PET space, then applies PVC and grey matter normalization.

    It does:
    - SPM12 Coregister T1 and tisues to PET
    - PVC the PET image in PET space

    Parameters
    ----------
    wf_name: str
        Name of the workflow.

    Nipype Inputs
    -------------
    pvc_input.in_file: traits.File
        The raw NIFTI_GZ PET image file

    pvc_input.reference_file: traits.File
        The anatomical image in its native space. For registration reference.

    pvc_input.tissues: list of traits.File
        List of tissues files from the New Segment process. At least the first
        3 tissues must be present.

    Nipype outputs
    --------------
    pvc_output.coreg_ref: existing file
        The coregistered reference_file image in PET space.

    pvc_output.coreg_others: list of existing files
        List of coregistered files from coreg_pet.apply_to_files

    pvc_output.pvc_out: existing file
        The output of the PETPVC process.

    pvc_output.petpvc_mask: existing file
        The mask built for the PETPVC.

    pvc_output.brain_mask: existing file
        A brain mask calculated with the tissues file.

    pvc_output.gm_norm: existing file
        The output of the grey matter intensity normalization process.
        This is the last step in the PET signal correction.

    Returns
    -------
    wf: nipype Workflow
    """
    # fixed parameters of the NUK mMR
    psf_fwhm = (4.3, 4.3, 4.3)

    # specify input and output fields
    in_fields = [
        "in_file",
        "reference_file",
        "tissues",
    ]

    out_fields = [
        "coreg_ref",
        "coreg_others",
        "pvc_out",
        "petpvc_mask",
        "brain_mask",
        "gm_norm",
    ]

    # input
    pet_input = setup_node(IdentityInterface(fields=in_fields,
                                             mandatory_inputs=True),
                           name="pvc_input")

    flat_list = pe.Node(Function(input_names=['list_of_lists'],
                                 output_names=['out'],
                                 function=flatten_list),
                        name='flatten_tissue_list')

    # coreg pet
    gunzip_pet = setup_node(Gunzip(), name="gunzip_pet")
    coreg_pet = setup_node(spm_coregister(cost_function="mi"),
                           name="coreg_pet")

    tissues_sel = setup_node(Select(index=[0, 1, 2]), name="tissues")
    select_gm = setup_node(Select(index=[0]), name="select_gm")
    pvc = setup_node(petpvc_cmd(fwhm_mm=psf_fwhm, pvc_method='RBV'),
                     name="pvc")

    # output
    pvc_output = setup_node(IdentityInterface(fields=out_fields),
                            name="pvc_output")

    # workflow to create the mask
    mask_wf = petpvc_mask(wf_name="petpvc_mask")

    # workflow for intensity normalization
    norm_wf = intensity_norm(wf_name="intensity_norm_gm")

    # Create the workflow object
    wf = pe.Workflow(name=wf_name)

    wf.connect([
        # inputs
        (pet_input, gunzip_pet, [("in_file", "in_file")]),
        (pet_input, tissues_sel, [("tissues", "inlist")]),
    ])

    # check how to perform the registration, to decide how to build the pipeline
    anat2pet = get_config_setting('registration.anat2pet', False)
    if anat2pet:
        wf.connect([
            # inputs
            (pet_input, coreg_pet, [("reference_file", "source")]),

            # unzip to coregister the reference file (anatomical image) to PET space.
            (gunzip_pet, coreg_pet, [("out_file", "target")]),
            (tissues_sel, flat_list, [("out", "list_of_lists")]),
            (flat_list, coreg_pet, [("out", "apply_to_files")]),

            # the list of tissues to the mask wf and the GM for PET intensity normalization
            (coreg_pet, select_gm, [("coregistered_files", "inlist")]),
            (coreg_pet, mask_wf, [("coregistered_files",
                                   "pvcmask_input.tissues")]),

            # the PET in native space to PVC correction
            (gunzip_pet, pvc, [("out_file", "in_file")]),

            # the merged file with 4 tissues to PCV correction
            (mask_wf, pvc, [("pvcmask_output.petpvc_mask", "mask_file")]),

            # normalize voxel values of PET PVCed by demeaning it entirely by GM PET voxel values
            (pvc, norm_wf, [("out_file", "intnorm_input.source")]),
            (select_gm, norm_wf, [("out", "intnorm_input.mask")]),

            # output
            (coreg_pet, pvc_output, [("coregistered_source", "coreg_ref")]),
            (coreg_pet, pvc_output, [("coregistered_files", "coreg_others")]),
            (pvc, pvc_output, [("out_file", "pvc_out")]),
            (mask_wf, pvc_output, [("pvcmask_output.brain_mask", "brain_mask")
                                   ]),
            (mask_wf, pvc_output, [("pvcmask_output.petpvc_mask",
                                    "petpvc_mask")]),
            (norm_wf, pvc_output, [("intnorm_output.out_file", "gm_norm")]),
        ])
    else:  # PET to ANAT
        wf.connect([
            # inputs
            (pet_input, coreg_pet, [("reference_file", "target")]),

            # unzip PET image and set as a source to register it to anatomical space.
            (gunzip_pet, coreg_pet, [("out_file", "source")]),
            (tissues_sel, flat_list, [("out", "list_of_lists")]),
            (flat_list, coreg_pet, [("out", "apply_to_files")]),

            # the list of tissues to the mask wf and the GM for PET intensity normalization
            (tissues_sel, select_gm, [("out", "inlist")]),
            (flat_list, mask_wf, [("out", "pvcmask_input.tissues")]),

            # the PET in ANAT space to PVC correction
            (coreg_pet, pvc, [("coregistered_source", "in_file")]),

            # the merged file with 4 tissues to PCV correction
            (mask_wf, pvc, [("pvcmask_output.petpvc_mask", "mask_file")]),

            # normalize voxel values of PET PVCed by demeaning it entirely by GM PET voxel values
            (pvc, norm_wf, [("out_file", "intnorm_input.source")]),
            (select_gm, norm_wf, [("out", "intnorm_input.mask")]),

            # output
            # TODO: coreg_ref should have a different name in this case
            (coreg_pet, pvc_output, [("coregistered_source", "coreg_ref")]),
            (coreg_pet, pvc_output, [("coregistered_files", "coreg_others")]),
            (pvc, pvc_output, [("out_file", "pvc_out")]),
            (mask_wf, pvc_output, [("pvcmask_output.brain_mask", "brain_mask")
                                   ]),
            (mask_wf, pvc_output, [("pvcmask_output.petpvc_mask",
                                    "petpvc_mask")]),
            (norm_wf, pvc_output, [("intnorm_output.out_file", "gm_norm")]),
        ])

    return wf
from nilearn.plotting import plot_glass_brain

experiment_dir = '/Volumes/INTENSO/DPX_EEG_fMRI/fMRI/output'
output_dir = '/Volumes/INTENSO/DPX_EEG_fMRI/fMRI/data'
working_dir = '/Volumes/INTENSO/DPX_EEG_fMRI/fMRI/workingdir'

# Smoothing withds used during preprocessing
fwhm = 5

# Which contrasts to use for the 2nd-level analysis
contrast_list = ['con_0001', 'con_0002', 'con_0003', 'con_0004', 'con_0005']

mask = "/usr/local/fsl/data/standard/MNI152_T1_1mm_brain_mask_dil.nii.gz"

# Gunzip - unzip the mask image
gunzip = Node(Gunzip(in_file=mask), name="gunzip")

# OneSampleTTestDesign - with only two cues being tested for differences, a one sample T-Test Design
# is sufficient
onesamplettestdes = Node(OneSampleTTestDesign(),
                         name="onesampttestdes")

# EstimateModel - estimates the model
level2estimate = Node(EstimateModel(estimation_method={'Classical': 1}),
                      name="level2estimate")

# EstimateContrast - estimates group contrast
level2conestimate = Node(EstimateContrast(group_contrast=True),
                         name="level2conestimate")
cont1 = ['Group', 'T', ['mean'], [1]]
level2conestimate.inputs.contrasts = [cont1]
Ejemplo n.º 18
0
def spm_anat_preprocessing(wf_name="spm_anat_preproc"):
    """ Run the T1 pre-processing workflow against the anat_hc
    files in `data_dir`.

    It does:
    - N4BiasFieldCorrection
    - SPM12 New Segment
    - SPM12 Warp of MPRAGE to MNI

    [Optional: from config]
    - Atlas file warping to MPRAGE
    - Cortical thickness (SPM+DiReCT)

    Nipype Inputs
    -------------
    anat_input.in_file: traits.File
        Path to the anatomical image.

    anat_input.atlas_file: traits.File
        Path to an atlas file in MNI space to be
        warped to the anatomical space.
        Can also be set through the configuration
        setting `atlas_file`.

    Nipype Outputs
    --------------
    anat_output.anat_mni: traits.File
        The bias-field normalized to MNI anatomical image.

    anat_output.tissues_warped: traits.File
        The tissue segmentation in MNI space from SPM.

    anat_output.tissues_native: traits.File
        The tissue segmentation in native space from SPM.

    anat_output.affine_transform: traits.File
        The affine transformation file.

    anat_output.warp_forward: traits.File
        The forward (anat to MNI) warp field from SPM.

    anat_output.warp_inverse: traits.File
        The inverse (MNI to anat) warp field from SPM.

    anat_output.anat_biascorr: traits.File
        The bias-field corrected anatomical image.

    anat_output.brain_mask: traits.File
        A brain mask file in anatomical space.
        This is calculated by summing up the maps of
        segmented tissues (CSF, WM, GM) and then binarised.

    anat_output.atlas_anat: traits.File
        If `atlas_file` is an existing file in MNI space.
        The atlas file warped to anatomical space,
        if do_atlas and the atlas file is set in configuration.

    anat_output.cortical_thickness: traits.File
        If `anat_preproc.do_cortical_thickness` is True.
        The cortical thickness estimations calculated with the
        SPM+DiReCT method (KellyKapowski).

    anat_output.warped_white_matter: warped_white_matter
        If `anat_preproc.do_cortical_thickness` is True.
        The warped white matter image calculated with the
        SPM+DiReCT method (KellyKapowski).

    Returns
    -------
    wf: nipype Workflow
    """
    # Create the workflow object
    wf = pe.Workflow(name=wf_name)

    # specify input and output fields
    in_fields = ["in_file"]
    out_fields = [
        "anat_mni",
        "tissues_warped",
        "tissues_native",
        "affine_transform",
        "warp_forward",
        "warp_inverse",
        "anat_biascorr",
        "brain_mask",
    ]

    # check if we have to warp an atlas files too.
    do_atlas, atlas_file = check_atlas_file()
    if do_atlas:
        in_fields += ["atlas_file"]
        out_fields += ["atlas_anat"]

    # check if we have to do cortical thickness (SPM+DiReCT) method.
    do_cortical_thickness = get_config_setting(
        'anat_preproc.do_cortical_thickness', False)
    if do_cortical_thickness:
        out_fields += [
            "cortical_thickness",
            "warped_white_matter",
        ]

    # input node
    anat_input = pe.Node(IdentityInterface(fields=in_fields,
                                           mandatory_inputs=True),
                         name="anat_input")

    # atlas registration
    if do_atlas and not isdefined(anat_input.inputs.atlas_file):
        anat_input.inputs.set(atlas_file=atlas_file)

    # T1 preprocessing nodes
    biascor = setup_node(biasfield_correct(), name="bias_correction")
    gunzip_anat = setup_node(Gunzip(), name="gunzip_anat")
    segment = setup_node(spm_segment(), name="new_segment")
    warp_anat = setup_node(spm_apply_deformations(), name="warp_anat")

    tpm_bbox = setup_node(Function(function=get_bounding_box,
                                   input_names=["in_file"],
                                   output_names=["bbox"]),
                          name="tpm_bbox")
    tpm_bbox.inputs.in_file = spm_tpm_priors_path()

    # calculate brain mask from tissue maps
    tissues = setup_node(IdentityInterface(fields=["gm", "wm", "csf"],
                                           mandatory_inputs=True),
                         name="tissues")

    brain_mask = setup_node(Function(
        function=math_img,
        input_names=["formula", "out_file", "gm", "wm", "csf"],
        output_names=["out_file"],
        imports=['from neuro_pypes.interfaces.nilearn import ni2file']),
                            name='brain_mask')
    brain_mask.inputs.out_file = "tissues_brain_mask.nii.gz"
    brain_mask.inputs.formula = "np.abs(gm + wm + csf) > 0"

    # output node
    anat_output = pe.Node(IdentityInterface(fields=out_fields),
                          name="anat_output")

    # Connect the nodes
    wf.connect([
        # input to biasfieldcorrection
        (anat_input, biascor, [("in_file", "input_image")]),

        # new segment
        (biascor, gunzip_anat, [("output_image", "in_file")]),
        (gunzip_anat, segment, [("out_file", "channel_files")]),

        # Normalize12
        (segment, warp_anat, [("forward_deformation_field", "deformation_file")
                              ]),
        (segment, warp_anat, [("bias_corrected_images", "apply_to_files")]),
        (tpm_bbox, warp_anat, [("bbox", "write_bounding_box")]),

        # brain mask from tissues
        (segment, tissues, [
            (("native_class_images", selectindex, 0), "gm"),
            (("native_class_images", selectindex, 1), "wm"),
            (("native_class_images", selectindex, 2), "csf"),
        ]),
        (tissues, brain_mask, [
            ("gm", "gm"),
            ("wm", "wm"),
            ("csf", "csf"),
        ]),

        # output
        (warp_anat, anat_output, [("normalized_files", "anat_mni")]),
        (segment, anat_output, [("modulated_class_images", "tissues_warped"),
                                ("native_class_images", "tissues_native"),
                                ("transformation_mat", "affine_transform"),
                                ("forward_deformation_field", "warp_forward"),
                                ("inverse_deformation_field", "warp_inverse"),
                                ("bias_corrected_images", "anat_biascorr")]),
        (brain_mask, anat_output, [("out_file", "brain_mask")]),
    ])

    # atlas warping nodes
    if do_atlas:
        gunzip_atlas = pe.Node(Gunzip(), name="gunzip_atlas")
        warp_atlas = setup_node(spm_apply_deformations(), name="warp_atlas")
        anat_bbox = setup_node(Function(function=get_bounding_box,
                                        input_names=["in_file"],
                                        output_names=["bbox"]),
                               name="anat_bbox")

        # set the warping interpolation to nearest neighbour.
        warp_atlas.inputs.write_interp = 0

        # connect the atlas registration nodes
        wf.connect([
            (anat_input, gunzip_atlas, [("atlas_file", "in_file")]),
            (gunzip_anat, anat_bbox, [("out_file", "in_file")]),
            (gunzip_atlas, warp_atlas, [("out_file", "apply_to_files")]),
            (segment, warp_atlas, [("inverse_deformation_field",
                                    "deformation_file")]),
            (anat_bbox, warp_atlas, [("bbox", "write_bounding_box")]),
            (warp_atlas, anat_output, [("normalized_files", "atlas_anat")]),
        ])

    # cortical thickness (SPM+DiReCT) method
    if do_cortical_thickness:
        from ..interfaces.ants import KellyKapowski

        segm_img = setup_node(Function(
            function=math_img,
            input_names=["formula", "out_file", "gm", "wm"],
            output_names=["out_file"],
            imports=['from neuro_pypes.interfaces.nilearn import ni2file']),
                              name='gm-wm_image')
        segm_img.inputs.out_file = "gm_wm.nii.gz"
        segm_img.inputs.formula = '((gm >= 0.5)*2 + (wm > 0.5)*3).astype(np.uint8)'

        # copy the header from the GM tissue image to the result from `gm-wm_image`.
        # this is necessary because the `gm-wm_image` operation sometimes modifies the
        # offset of the image, which will provoke an ANTs exception due to
        # ITK tolerance in ImageToImageFilter
        # https://github.com/stnava/ANTs/issues/74
        cp_hdr = setup_node(Function(
            function=copy_header,
            input_names=["in_file", "data_file"],
            output_names=["out_file"],
            imports=['from neuro_pypes.interfaces.nilearn import ni2file']),
                            name='copy_header')

        kk = setup_node(KellyKapowski(), name='direct')
        kk.inputs.cortical_thickness = 'direct_cortical_thickness.nii.gz'
        kk.inputs.warped_white_matter = 'direct_warped_white_matter.nii.gz'

        # connect the cortical thickness (SPM+DiReCT) method
        wf.connect([
            # create segmentation GM+WM file
            (tissues, segm_img, [("gm", "gm"), ("wm", "wm")]),
            (segm_img, cp_hdr, [("out_file", "data_file")]),
            (tissues, cp_hdr, [("gm", "in_file")]),

            # kellykapowski
            (cp_hdr, kk, [("out_file", "segmentation_image")]),
            (tissues, kk, [("gm", "gray_matter_prob_image"),
                           ("wm", "white_matter_prob_image")]),
            (kk, anat_output, [("cortical_thickness", "cortical_thickness"),
                               ("warped_white_matter", "warped_white_matter")
                               ]),
        ])
    return wf
Ejemplo n.º 19
0
def spm_register_to_template_wf(wf_name="spm_registration_to_template"):
    """Return a workflow that registers each reg_input.in_file to the file in reg_input.template.
    For now this does not do atlas registration.

    It does:
    - SPM12 Warp input image to the given template

    Parameters
    ----------
    wf_name: str
        Name of the workflow.

    Nipype Inputs
    -------------
    reg_input.in_file: traits.File
        The raw NIFTI_GZ subject image file.

    reg_input.template: list of traits.File
        The template file for inter-subject registration reference.

    Nipype outputs
    --------------
    reg_output.warped: existing file
        Image normalized to the given template.

    reg_output.warp_field: existing files
        Spatial normalization parameters .mat file.

    Returns
    -------
    wf: nipype Workflow
    """
    # specify input and output fields
    in_fields = [
        "in_file",
        "template",
    ]

    out_fields = [
        "warped",
        "warp_field",
    ]

    # input
    reg_input = setup_node(IdentityInterface(fields=in_fields,
                                             mandatory_inputs=True),
                           name="reg_input")

    # warp each subject to the group template
    gunzip_template = setup_node(
        Gunzip(),
        name="gunzip_template",
    )
    gunzip_input = setup_node(
        Gunzip(),
        name="gunzip_input",
    )

    warp2template = setup_node(spm.Normalize(jobtype="estwrite",
                                             out_prefix="wgrptemplate_"),
                               name="warp2template")

    get_bbox = setup_node(Function(function=get_bounding_box,
                                   input_names=["in_file"],
                                   output_names=["bbox"]),
                          name="get_bbox")

    # output
    reg_output = setup_node(IdentityInterface(fields=out_fields),
                            name="reg_output")

    # Create the workflow object
    wf = pe.Workflow(name=wf_name)

    wf.connect([
        # get template bounding box to apply to results
        (reg_input, get_bbox, [("template", "in_file")]),

        # gunzip some inputs
        (reg_input, gunzip_input, [("in_file", "in_file")]),
        (reg_input, gunzip_template, [("template", "in_file")]),

        # prepare the target parameters of the warp to template
        (gunzip_template, warp2template, [("out_file", "template")]),
        (get_bbox, warp2template, [("bbox", "write_bounding_box")]),

        # directly warp pet to the template
        (gunzip_input, warp2template, [("out_file", "source")]),

        # output
        (warp2template, reg_output, [
            ("normalization_parameters", "warp_field"),
            ("normalized_source", "warped"),
        ]),
    ])

    return wf
outDirSubj = os.path.join(outDirSite, iSubj)
# if the directory doesn't exist, create it
if not os.path.exists(outDirSubj):
    os.makedirs(outDirSubj)

# finally, output directory for the results in MNI space
outDir = outDirSubj
# if the directory doesn't exist, create it
if not os.path.exists(outDir):
    os.makedirs(outDir)

#
#    T1 Normalization nodes
#
# gunzip node
gunzip_T1w = Node(Gunzip(in_file=imageT1), name="gunzip_T1w")

# Segmentation, native space
segNative = Node(spm.NewSegment(), name='segNative')

# Normalize - normalizes structural images to the MNI template
normalizeT1 = Node(spm.Normalize12(jobtype='estwrite',
                                   tpm=fTPM,
                                   write_bounding_box=[[-90, -120, -70],
                                                       [90, 90, 105]]),
                   name="normalizeT1")

# Segmentation, template space
segMNI = Node(spm.NewSegment(), name='segMNI')

#
Ejemplo n.º 21
0
def genNormalizeDwiWF(
        name='NormalizeDwi',
        base_dir=op.abspath('.'),
        input_dir=None,
        input_temp='%s/%s/*%s',
        input_temp_args={
            'ref_T1': [['subject_id', 'bias_corrected_images', '_mT1.nii.gz']],
            'forward_deformation_field':
            [['subject_id', 'forward_deformation_field', '_y_T1.nii.gz']],
            'denoised_dwi':
            [['subject_id', 'denoised_dwi_series', '_dwi_denoised.nii.gz']],
            'bval': [['subject_id', 'raw_bvals', '_bval.gz']],
            'bvec': [['subject_id', 'processed_bvecs', '_bvecs.gz']],
            'apply_to': [[
                'subject_id', 'apply_to_files',
                ['_ICVF.nii.gz', '_ISOVF.nii.gz', '_OD.nii.gz']
            ]]
        },
        subjects=None,
        spm_standalone=None,
        mcr=None):

    # Generate WF
    wf = Workflow(name=name)
    wf.base_dir = base_dir

    #Node: subject List
    subjectList = Node(IdentityInterface(fields=['subject_id'],
                                         mandatory_inputs=True),
                       name="subjectList")
    if subjects:
        subjectList.iterables = ('subject_id', subjects)
    else:
        subjectList.iterables = ('subject_id', [
            pth for pth in os.listdir(input_dir)
            if os.path.isdir(op.join(input_dir, pth))
        ])
        print subjectList.iterables

    scanList = Node(DataGrabber(infields=['subject_id'],
                                outfields=[
                                    'ref_T1', 'forward_deformation_field',
                                    'denoised_dwi', 'bval', 'bvec', 'apply_to'
                                ]),
                    name="scanList")
    scanList.inputs.base_directory = input_dir
    scanList.inputs.ignore_exception = False
    scanList.inputs.raise_on_empty = True
    scanList.inputs.sort_filelist = False
    scanList.inputs.template = input_temp
    scanList.inputs.template_args = input_temp_args
    wf.connect(subjectList, "subject_id", scanList, "subject_id")

    # Unzip everythin for spm
    gunzipT1 = Node(Gunzip(), name='gunzipT1')
    wf.connect(scanList, "ref_T1", gunzipT1, "in_file")

    gunzipDF = Node(Gunzip(), name='gunzipDF')
    wf.connect(scanList, "forward_deformation_field", gunzipDF, "in_file")

    gunzipbval = Node(Gunzip(), name='gunzipbval')
    wf.connect(scanList, "bval", gunzipbval, "in_file")

    gunzipbvec = Node(Gunzip(), name='gunzipbvec')
    wf.connect(scanList, "bvec", gunzipbvec, "in_file")

    gunzipApplyTo = MapNode(Gunzip(),
                            iterfield=["in_file"],
                            name='gunzipApplyTo')
    wf.connect(scanList, "apply_to", gunzipApplyTo, "in_file")

    # Extract b=0 frames from denoised DWI and average them to make a ref_dwi
    dwib0 = Node(DWIExtract(), name="dwib0")
    dwib0.inputs.bzero = True
    dwib0.inputs.out_file = "dwib0.nii.gz"
    wf.connect(scanList, "denoised_dwi", dwib0, "in_file")
    wf.connect(gunzipbval, "out_file", dwib0, "in_bval")
    wf.connect(gunzipbvec, "out_file", dwib0, "in_bvec")

    # Make an average image
    avgb0 = Node(MeanImage(), name="avgb0")
    avgb0.inputs.nan2zeros = True
    avgb0.inputs.output_type = "NIFTI"
    avgb0.inputs.out_file = "avg_dwib0.nii"
    avgb0.inputs.dimension = "T"
    wf.connect(dwib0, "out_file", avgb0, "in_file")

    # spm Normalize WF
    spmNormProc = genSpmNormalizeDwiWF(name="spmNormProc",
                                       spm_standalone=spm_standalone,
                                       mcr=mcr)
    wf.connect(gunzipT1, "out_file", spmNormProc, "inputNode.ref_T1")
    wf.connect(gunzipDF, "out_file", spmNormProc,
               "inputNode.forward_deformation_field")
    wf.connect(avgb0, "out_file", spmNormProc, "inputNode.ref_dwi")
    wf.connect(gunzipApplyTo, "out_file", spmNormProc, "inputNode.apply_to")

    # Datasink
    datasink = Node(DataSink(base_directory=base_dir,
                             container='%sSink' % name),
                    name='Datasink')
    wf.connect(spmNormProc, "outputNode.normalized_files", datasink,
               "normalized_files")

    return wf
Ejemplo n.º 22
0
def matlab_noddi_processing(caps_directory,
                            num_cores,
                            bStep,
                            name='NoddiMatlab'):
    """
        This is a function to fit NODDI onto the multiple shells diffusion data based on this paper: NODDI: Practical in vivo neurite orientation
        dispersion and density imaging of the human brain, Neuroimage, 2012, Gary Zhang.

        TODO: hard code to find the path to matlab script.
        TODO: for the matlab script, using multiple cores to run one subject has a bug for python wrapper, but ok with one core fitting, improve this in the future
        TODO: deal with how to delete the original nii file in the output folder of NODDI
    Args:
        caps_directory: CAPS directory
        tsv: the tsv file containing the participant_id and session_id
        bStep: the bvalue to round
        working_directory: the path to working_directory
        num_cores: number of cores that fit Noddi model
        name: the name of the pipeline

    Returns:

    """
    import nipype.pipeline.engine as pe
    import nipype.interfaces.utility as niu
    from nipype.algorithms.misc import Gunzip
    import os
    import clinica.pipelines as clp

    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'subject_id_list', 'noddi_preprocessed_dwi', 'noddi_preprocessed_bvec',
        'noddi_preprocessed_bval', 'noddi_preprocessed_mask',
        'noddi_toolbox_dir', 'nifti_matlib_dir'
    ]),
                        name='inputnode')

    capsnode = pe.MapNode(
        name='capsnode',
        interface=niu.Function(
            input_names=['subject_id_list', 'caps_directory'],
            output_names=['temp_folder'],
            function=make_processing_caps),
        iterfield=['subject_id_list'])
    capsnode.inputs.caps_directory = caps_directory

    path_to_matscript = os.path.join(os.path.dirname(clp.__path__[0]),
                                     'lib/noddi')

    # output node
    outputnode = pe.Node(
        niu.IdentityInterface(fields=['fit_icvf', 'fit_isovf', 'fit_od']),
        name='outputnode')

    # gzip the nii.gz img to nii img
    # Gunzip - unzip functional
    gunzip_dwi = pe.MapNode(Gunzip(), name="gunzipdwi", iterfield=['in_file'])
    gunzip_mask = pe.MapNode(Gunzip(),
                             name="gunzipmask",
                             iterfield=['in_file'])

    # Node to wrap noddi matlab toolbox script.
    nodditoolbox = pe.MapNode(name='nodditoolbox',
                              interface=niu.Function(input_names=[
                                  'output_dir', 'noddi_img', 'brain_mask',
                                  'roi_mask', 'bval', 'bvec', 'prefix',
                                  'bStep', 'num_cores', 'path_to_matscript',
                                  'noddi_toolbox_dir', 'nifti_matlib_dir'
                              ],
                                                     output_names=[
                                                         'fit_icvf',
                                                         'fit_isovf', 'fit_od'
                                                     ],
                                                     function=runmatlab),
                              iterfield=[
                                  'output_dir', 'noddi_img', 'brain_mask',
                                  'roi_mask', 'bval', 'bvec', 'prefix'
                              ])
    nodditoolbox.inputs.path_to_matscript = path_to_matscript
    nodditoolbox.inputs.num_cores = num_cores
    nodditoolbox.inputs.bStep = bStep

    # zip the result imgs
    zip_icvf = pe.MapNode(name='zip_icvf',
                          interface=niu.Function(input_names=['in_file'],
                                                 output_names=['out_file'],
                                                 function=compress_nii),
                          iterfield=['in_file'])

    zip_isovf = pe.MapNode(name='zip_isovf',
                           interface=niu.Function(input_names=['in_file'],
                                                  output_names=['out_file'],
                                                  function=compress_nii),
                           iterfield=['in_file'])

    zip_odi = pe.MapNode(name='zip_odi',
                         interface=niu.Function(input_names=['in_file'],
                                                output_names=['out_file'],
                                                function=compress_nii),
                         iterfield=['in_file'])

    # workflow
    nodditoolbox_wf = pe.Workflow(name=name)
    # unzip
    nodditoolbox_wf.connect(inputnode, 'noddi_preprocessed_dwi', gunzip_dwi,
                            'in_file')
    nodditoolbox_wf.connect(inputnode, 'noddi_preprocessed_mask', gunzip_mask,
                            'in_file')
    # fit matlab toolbox
    nodditoolbox_wf.connect(gunzip_dwi, 'out_file', nodditoolbox, 'noddi_img')
    nodditoolbox_wf.connect(gunzip_mask, 'out_file', nodditoolbox,
                            'brain_mask')
    nodditoolbox_wf.connect(gunzip_mask, 'out_file', nodditoolbox, 'roi_mask')
    nodditoolbox_wf.connect(inputnode, 'noddi_preprocessed_bvec', nodditoolbox,
                            'bvec')
    nodditoolbox_wf.connect(inputnode, 'noddi_preprocessed_bval', nodditoolbox,
                            'bval')
    nodditoolbox_wf.connect(inputnode, 'subject_id_list', nodditoolbox,
                            'prefix')
    # nodditoolbox_wf.connect(inputnode, 'bStep', nodditoolbox, 'bStep')
    nodditoolbox_wf.connect(inputnode, 'noddi_toolbox_dir', nodditoolbox,
                            'noddi_toolbox_dir')
    nodditoolbox_wf.connect(inputnode, 'nifti_matlib_dir', nodditoolbox,
                            'nifti_matlib_dir')
    nodditoolbox_wf.connect(inputnode, 'subject_id_list', capsnode,
                            'subject_id_list')
    nodditoolbox_wf.connect(capsnode, 'temp_folder', nodditoolbox,
                            'output_dir')

    nodditoolbox_wf.connect(nodditoolbox, 'fit_icvf', zip_icvf, 'in_file')
    nodditoolbox_wf.connect(nodditoolbox, 'fit_isovf', zip_isovf, 'in_file')
    nodditoolbox_wf.connect(nodditoolbox, 'fit_od', zip_odi, 'in_file')
    # output node
    nodditoolbox_wf.connect(zip_icvf, 'out_file', outputnode, 'fit_icvf')
    nodditoolbox_wf.connect(zip_isovf, 'out_file', outputnode, 'fit_isovf')
    nodditoolbox_wf.connect(zip_odi, 'out_file', outputnode, 'fit_od')

    return nodditoolbox_wf
Ejemplo n.º 23
0
    input_names=['in_file', 'events_file', 'regressors_file', 'regressors_names'],
    function=_bids2nipypeinfo, output_names=['info', 'realign_file']),
    name='runinfo')

# Set the column names to be used from the confounds file
runinfo.inputs.regressors_names = ['dvars', 'framewise_displacement'] + \
    ['a_comp_cor_%02d' % i for i in range(6)] + ['cosine%02d' % i for i in range(4)]
#%%
cont1 = ['Trauma>Sad', 'T', ['trauma', 'sad'], [1, -1]]
cont2 = ['Trauma>Relax', 'T', ['trauma', 'relax'], [1, -1]]
cont3 = ['Sad>Relax', 'T', ['sad', 'relax'], [1, -1]]
cont4 = ['Trauma', 'T', ['trauma'], [1]]
cont5 = ['Sad', 'T', ['sad'], [1]]
contrasts = [cont1, cont2, cont3, cont4, cont5]
#%%
gunzip = MapNode(Gunzip(), name='gunzip',
                 iterfield=['in_file'])

#%% Addinf simple denozining procedures (remove dummy scans, smoothing, art detection) 
#extract = Node(fsl.ExtractROI(t_min=4, t_size=-1, output_type='NIFTI'),
#               name="extract")

smooth = Node(spm.Smooth(), name="smooth", fwhm = fwhm)
# Artifact Detection - determines outliers in functional images
#art = Node(ArtifactDetect(norm_threshold=2,
#                          zintensity_threshold=3,
#                          mask_type='spm_global',
#                          parameter_source='FSL',
#                          use_differences=[True, False],
#                          plot_type='svg'),
#           name="art")
Ejemplo n.º 24
0
def spm_mrpet_grouptemplate_preprocessing(wf_name="spm_mrpet_grouptemplate_preproc"):
    """ Run the PET pre-processing workflow against the gunzip_pet.in_file files.
    It depends on the anat_preproc_workflow, so if this has not been run, this function
    will run it too.

    This is identical to the workflow defined in `spm_mrpet_preprocessing`,
    with the only difference that we now normalize all subjects agains a custom
    template using the spm Old Normalize interface.

    It does:
    - SPM12 Coregister T1 and tissues to PET
    - PVC the PET image in PET space
    - SPM12 Warp PET to the given template

    Parameters
    ----------
    wf_name: str
        Name of the workflow.

    Nipype Inputs
    -------------
    pet_input.in_file: traits.File
        The raw NIFTI_GZ PET image file.

    pet_input.atlas_anat: traits.File
        The atlas file in anatomical space.

    pet_input.anat: traits.File
        Path to the high-contrast anatomical image.
        Reference file of the warp_field, i.e., the anatomical image in its native space.

    pet_input.tissues: list of traits.File
        List of tissues files from the New Segment process. At least the first
        3 tissues must be present.

    pet_input.pet_template: traits.File
        The template file for inter-subject registration reference.

    Nipype outputs
    --------------
    pet_output.pvc_out: existing file
        The results of the PVC process.

    pet_output.brain_mask: existing file
        A brain mask calculated with the tissues file.

    pet_output.coreg_ref: existing file
        The coregistered reference image to PET space.

    pet_output.coreg_others: list of existing files
        List of coregistered files from coreg_pet.apply_to_files.

    pet_output.pet_warped: existing file
        PET image normalized to the group template.

    pet_output.pvc_warped: existing file
        The outputs of the PETPVC workflow normalized to the group template.
        The result of every internal pre-processing step is normalized to the
        group template here.

    pet_output.warp_field: existing files
        Spatial normalization parameters .mat files.

    pet_output.gm_norm: existing file
        The output of the grey matter intensity normalization process.
        This is the last step in the PET signal correction, before registration.

    pet_output.atlas_pet: existing file
        Atlas image warped to PET space.
        If the `atlas_file` option is an existing file and `normalize_atlas` is True.

    Returns
    -------
    wf: nipype Workflow
    """
    # specify input and output fields
    in_fields  = ["in_file",
                  "anat",
                  "tissues",
                  "pet_template"]

    out_fields = ["brain_mask",
                  "coreg_others",
                  "coreg_ref",
                  "pvc_warped",
                  "pet_warped",
                  "warp_field",
                  "pvc_out",
                  "pvc_mask",
                  "gm_norm",]

    do_atlas, _ = check_atlas_file()
    if do_atlas:
        in_fields  += ["atlas_anat"]
        out_fields += ["atlas_pet" ]

    # input
    pet_input = setup_node(IdentityInterface(fields=in_fields, mandatory_inputs=True),
                           name="pet_input")

    # workflow to perform partial volume correction
    petpvc = petpvc_workflow(wf_name="petpvc")

    unzip_mrg = setup_node(Merge(4), name='merge_for_unzip')
    gunzipper = pe.MapNode(Gunzip(), name="gunzip", iterfield=['in_file'])

    # warp each subject to the group template
    gunzip_template = setup_node(Gunzip(), name="gunzip_template",)
    gunzip_pet      = setup_node(Gunzip(), name="gunzip_pet",)

    warp_mrg = setup_node(Merge(2), name='merge_for_warp')
    warp2template = setup_node(spm.Normalize(jobtype="estwrite", out_prefix="wgrptemplate_"),
                               name="warp2template",)

    get_bbox = setup_node(Function(function=get_bounding_box,
                                   input_names=["in_file"],
                                   output_names=["bbox"]),
                          name="get_bbox")

    # output
    pet_output = setup_node(IdentityInterface(fields=out_fields), name="pet_output")

    # Create the workflow object
    wf = pe.Workflow(name=wf_name)

    wf.connect([
                # inputs
                (pet_input,   petpvc,  [("in_file", "pvc_input.in_file"),
                                        ("anat",    "pvc_input.reference_file"),
                                        ("tissues", "pvc_input.tissues")]),

                # get template bounding box to apply to results
                (pet_input, get_bbox,  [("pet_template", "in_file")]),

                # gunzip some inputs
                (pet_input, gunzip_pet,      [("in_file",      "in_file")]),
                (pet_input, gunzip_template, [("pet_template", "in_file")]),

                # gunzip some files for SPM Normalize
                (petpvc,    unzip_mrg, [("pvc_output.pvc_out",    "in1"),
                                        ("pvc_output.brain_mask", "in2"),
                                        ("pvc_output.gm_norm",    "in3")]),
                (pet_input, unzip_mrg, [("in_file",               "in4")]),

                (unzip_mrg, gunzipper, [("out", "in_file")]),

                (gunzipper, warp_mrg,  [("out_file", "in1")]),

                (warp_mrg, warp2template, [(("out", flatten_list), "apply_to_files")]),

                # prepare the target parameters of the warp to template
                (gunzip_pet,      warp2template, [("out_file", "source")]),
                (gunzip_template, warp2template, [("out_file", "template")]),
                (get_bbox,        warp2template, [("bbox",     "write_bounding_box")]),

                # output
                (warp2template, pet_output, [("normalization_parameters", "warp_field"),
                                             ("normalized_files" ,        "pvc_warped"),
                                             ("normalized_source",        "pet_warped"),
                                            ]),

                # output
                (petpvc,   pet_output, [("pvc_output.pvc_out",      "pvc_out"),
                                        ("pvc_output.brain_mask",   "brain_mask"),
                                        ("pvc_output.coreg_ref",    "coreg_ref"),
                                        ("pvc_output.coreg_others", "coreg_others"),
                                        ("pvc_output.gm_norm",      "gm_norm")]),
                ])

    if do_atlas:
        coreg_atlas = setup_node(spm_coregister(cost_function="mi"), name="coreg_atlas")

        # set the registration interpolation to nearest neighbour.
        coreg_atlas.inputs.write_interp = 0
        wf.connect([
                    (pet_input,   coreg_atlas, [("anat",                 "source")]),
                    (petpvc,      coreg_atlas, [("pvc_output.coreg_ref", "target")]),
                    (pet_input,   coreg_atlas, [("atlas_anat",           "apply_to_files")]),
                    (coreg_atlas, pet_output,  [("coregistered_files",   "atlas_pet")]),

                    # warp the atlas to the template space as well
                    (coreg_atlas, warp_mrg,    [("coregistered_files",   "in2")]),
        ])

    return wf
    '/media/Data/FromHPC/output/fmriprep/sub-%s/ses-1/func/sub-*_ses-1_task-%s_space-MNI152NLin2009cAsym_desc-preproc_bold.nii.gz'
}
#selectfiles = Node(SelectFiles(templates,
#                               base_directory=data_dir),
#                   name="selectfiles")

datasource = pe.Node(interface=nio.DataGrabber(infields=['subject_id'],
                                               outfields=['func']),
                     name='datasource')
datasource.inputs.base_directory = data_dir
datasource.inputs.template = '/media/Data/FromHPC/output/fmriprep/sub-%s/ses-1/func/sub-*_ses-1_task-%s_space-MNI152NLin2009cAsym_desc-preproc_bold.nii.gz'
datasource.inputs.template_args = info
datasource.inputs.sort_filelist = True
# add unzip node
# Gunzip - unzip functional
gunzip = MapNode(Gunzip(), name='gunzip', iterfield=['in_file'])

#os.chdir('/media/Data/work')


###########################
def subjectinfo(subject_id):

    from readConditionFiles_r_aPTSD import loadmat, _check_keys, _todict, readConditions
    import scipy.io as spio
    import os, json, glob, sys
    ###############################################################################################
    # Define experiment things (data_dir = filder where data files are present. )
    data_dir = '/media/Data/FromHPC/output/fmriprep'
    from bids.grabbids import BIDSLayout
    layout = BIDSLayout(data_dir)
Ejemplo n.º 26
0
def init_brain_extraction_wf(name='brain_extraction_wf',
                             in_template='OASIS30ANTs',
                             template_spec=None,
                             use_float=True,
                             normalization_quality='precise',
                             omp_nthreads=None,
                             mem_gb=3.0,
                             bids_suffix='T1w',
                             atropos_refine=True,
                             atropos_use_random_seed=True,
                             atropos_model=None,
                             use_laplacian=True,
                             bspline_fitting_distance=200):
    """
    Build a workflow for atlas-based brain extraction on anatomical MRI data.

    A Nipype implementation of the official ANTs' ``antsBrainExtraction.sh``
    workflow (only for 3D images).

    The official workflow is built as follows (and this implementation
    follows the same organization):

      1. Step 1 performs several clerical tasks (adding padding, calculating
         the Laplacian of inputs, affine initialization) and the core
         spatial normalization.
      2. Maps the brain mask into target space using the normalization
         calculated in 1.
      3. Superstep 1b: smart binarization of the brain mask
      4. Superstep 6: apply ATROPOS and massage its outputs
      5. Superstep 7: use results from 4 to refine the brain mask

    Workflow Graph
        .. workflow::
            :graph2use: orig
            :simple_form: yes

            from niworkflows.anat.ants import init_brain_extraction_wf
            wf = init_brain_extraction_wf()

    Parameters
    ----------
    in_template : str
        Name of the skull-stripping template ('OASIS30ANTs', 'NKI', or
        path).
        The brain template from which regions will be projected
        Anatomical template created using e.g. LPBA40 data set with
        ``buildtemplateparallel.sh`` in ANTs.
        The workflow will automatically search for a brain probability
        mask created using e.g. LPBA40 data set which have brain masks
        defined, and warped to anatomical template and
        averaged resulting in a probability image.
    use_float : bool
        Whether single precision should be used
    normalization_quality : str
        Use more precise or faster registration parameters
        (default: ``precise``, other possible values: ``testing``)
    omp_nthreads : int
        Maximum number of threads an individual process may use
    mem_gb : float
        Estimated peak memory consumption of the most hungry nodes
        in the workflow
    bids_suffix : str
        Sequence type of the first input image. For a list of acceptable values
        see https://bids-specification.readthedocs.io/en/latest/\
04-modality-specific-files/01-magnetic-resonance-imaging-data.html#anatomy-imaging-data
    atropos_refine : bool
        Enables or disables the whole ATROPOS sub-workflow
    atropos_use_random_seed : bool
        Whether ATROPOS should generate a random seed based on the
        system's clock
    atropos_model : tuple or None
        Allows to specify a particular segmentation model, overwriting
        the defaults based on ``bids_suffix``
    use_laplacian : bool
        Enables or disables alignment of the Laplacian as an additional
        criterion for image registration quality (default: True)
    bspline_fitting_distance : float
        The size of the b-spline mesh grid elements, in mm (default: 200)
    name : str, optional
        Workflow name (default: antsBrainExtraction)

    Inputs
    ------
    in_files : list
        List of input anatomical images to be brain-extracted,
        typically T1-weighted.
        If a list of anatomical images is provided, subsequently
        specified images are used during the segmentation process.
        However, only the first image is used in the registration
        of priors.
        Our suggestion would be to specify the T1w as the first image.
    in_mask : list, optional
        Mask used for registration to limit the metric
        computation to a specific region.

    Outputs
    -------
    out_file : str
        Skull-stripped and :abbr:`INU (intensity non-uniformity)`-corrected ``in_files``
    out_mask : str
        Calculated brain mask
    bias_corrected : str
        The ``in_files`` input images, after :abbr:`INU (intensity non-uniformity)`
        correction, before skull-stripping.
    bias_image : str
        The :abbr:`INU (intensity non-uniformity)` field estimated for each
        input in ``in_files``
    out_segm : str
        Output segmentation by ATROPOS
    out_tpms : str
        Output :abbr:`TPMs (tissue probability maps)` by ATROPOS

    """
    from templateflow.api import get as get_template
    wf = pe.Workflow(name)

    template_spec = template_spec or {}

    # suffix passed via spec takes precedence
    template_spec['suffix'] = template_spec.get('suffix', bids_suffix)

    tpl_target_path, common_spec = get_template_specs(
        in_template, template_spec=template_spec)

    # Get probabilistic brain mask if available
    tpl_mask_path = get_template(
        in_template, label='brain', suffix='probseg', **common_spec) or \
        get_template(in_template, desc='brain', suffix='mask', **common_spec)

    if omp_nthreads is None or omp_nthreads < 1:
        omp_nthreads = cpu_count()

    inputnode = pe.Node(niu.IdentityInterface(fields=['in_files', 'in_mask']),
                        name='inputnode')

    # Try to find a registration mask, set if available
    tpl_regmask_path = get_template(in_template,
                                    desc='BrainCerebellumExtraction',
                                    suffix='mask',
                                    **common_spec)
    if tpl_regmask_path:
        inputnode.inputs.in_mask = str(tpl_regmask_path)

    outputnode = pe.Node(niu.IdentityInterface(fields=[
        'out_file', 'out_mask', 'bias_corrected', 'bias_image', 'out_segm',
        'out_tpms'
    ]),
                         name='outputnode')

    copy_xform = pe.Node(CopyXForm(
        fields=['out_file', 'out_mask', 'bias_corrected', 'bias_image']),
                         name='copy_xform',
                         run_without_submitting=True)

    trunc = pe.MapNode(ImageMath(operation='TruncateImageIntensity',
                                 op2='0.01 0.999 256'),
                       name='truncate_images',
                       iterfield=['op1'])
    # inu_n4 = pe.MapNode(
    #     N4BiasFieldCorrection(
    #         dimension=3, save_bias=False, copy_header=True,
    #         n_iterations=[50] * 4, convergence_threshold=1e-7, shrink_factor=4,
    #         bspline_fitting_distance=bspline_fitting_distance),
    #     n_procs=omp_nthreads, name='inu_n4', iterfield=['input_image'])

    gunzip_4n4 = pe.MapNode(Gunzip(), name="gunzip_con", iterfield=['in_file'])

    inu_n4 = pe.MapNode(
        Segment(gm_output_type=[False, False, False],
                wm_output_type=[False, False, False],
                csf_output_type=[False, False, False],
                clean_masks="thorough",
                save_bias_corrected=True,
                bias_regularization=0.001,
                bias_fwhm=30,
                sampling_distance=3,
                use_mcr=True,
                affine_regularization="mni"),
        iterfield=['data'],
        name='inu_n4',
        mem_gb=32,
        serial=True,
        run_without_submitting=True)  # n_procs=1 for reproducibility

    res_tmpl = pe.Node(ResampleImageBySpacing(out_spacing=(4, 4, 4),
                                              apply_smoothing=True),
                       name='res_tmpl')
    res_tmpl.inputs.input_image = tpl_target_path
    res_target = pe.Node(ResampleImageBySpacing(out_spacing=(4, 4, 4),
                                                apply_smoothing=True),
                         name='res_target')

    lap_tmpl = pe.Node(ImageMath(operation='Laplacian', op2='1.5 1'),
                       name='lap_tmpl')
    lap_tmpl.inputs.op1 = tpl_target_path
    lap_target = pe.Node(ImageMath(operation='Laplacian', op2='1.5 1'),
                         name='lap_target')
    mrg_tmpl = pe.Node(niu.Merge(2), name='mrg_tmpl')
    mrg_tmpl.inputs.in1 = tpl_target_path
    mrg_target = pe.Node(niu.Merge(2), name='mrg_target')

    # Initialize transforms with antsAI
    init_aff = pe.Node(AI(metric=('Mattes', 32, 'Regular', 0.25),
                          transform=('Affine', 0.1),
                          search_factor=(15, 0.1),
                          principal_axes=False,
                          convergence=(10, 1e-6, 10),
                          verbose=True),
                       name='init_aff',
                       n_procs=omp_nthreads)

    # Tolerate missing ANTs at construction time
    _ants_version = Registration().version
    if _ants_version and parseversion(_ants_version) >= Version('2.3.0'):
        init_aff.inputs.search_grid = (40, (0, 40, 40))

    # Set up spatial normalization
    settings_file = 'antsBrainExtraction_%s.json' if use_laplacian \
        else 'antsBrainExtractionNoLaplacian_%s.json'
    norm = pe.Node(
        Registration(from_file=pkgr_fn('niworkflows.data', settings_file %
                                       normalization_quality)),
        name='norm',
        n_procs=omp_nthreads,
        mem_gb=mem_gb)
    norm.inputs.float = use_float
    fixed_mask_trait = 'fixed_image_mask'
    if _ants_version and parseversion(_ants_version) >= Version('2.2.0'):
        fixed_mask_trait += 's'

    map_brainmask = pe.Node(ApplyTransforms(interpolation='Gaussian',
                                            float=True),
                            name='map_brainmask',
                            mem_gb=1)
    map_brainmask.inputs.input_image = str(tpl_mask_path)

    thr_brainmask = pe.Node(ThresholdImage(dimension=3,
                                           th_low=0.5,
                                           th_high=1.0,
                                           inside_value=1,
                                           outside_value=0),
                            name='thr_brainmask')

    # Morphological dilation, radius=2
    dil_brainmask = pe.Node(ImageMath(operation='MD', op2='2'),
                            name='dil_brainmask')
    # Get largest connected component
    get_brainmask = pe.Node(ImageMath(operation='GetLargestComponent'),
                            name='get_brainmask')

    # Refine INU correction
    inu_n4_final = pe.MapNode(N4BiasFieldCorrection(
        dimension=3,
        save_bias=True,
        copy_header=True,
        n_iterations=[50] * 5,
        convergence_threshold=1e-7,
        shrink_factor=4,
        bspline_fitting_distance=bspline_fitting_distance),
                              n_procs=omp_nthreads,
                              name='inu_n4_final',
                              iterfield=['input_image'])
    if _ants_version and parseversion(_ants_version) >= Version('2.1.0'):
        inu_n4_final.inputs.rescale_intensities = True
    else:
        warn(
            """\
Found ANTs version %s, which is too old. Please consider upgrading to 2.1.0 or \
greater so that the --rescale-intensities option is available with \
N4BiasFieldCorrection.""" % _ants_version, DeprecationWarning)

    # Apply mask
    apply_mask = pe.MapNode(ApplyMask(),
                            iterfield=['in_file'],
                            name='apply_mask')

    wf.connect([
        (inputnode, trunc, [('in_files', 'op1')]),
        (inputnode, copy_xform, [(('in_files', _pop), 'hdr_file')]),
        (inputnode, inu_n4_final, [('in_files', 'input_image')]),
        (inputnode, init_aff, [('in_mask', 'fixed_image_mask')]),
        (inputnode, norm, [('in_mask', fixed_mask_trait)]),
        (inputnode, map_brainmask, [(('in_files', _pop), 'reference_image')]),
        (trunc, gunzip_4n4, [('output_image', 'in_file')]),
        (gunzip_4n4, inu_n4, [('out_file', 'data')]),
        (inu_n4, res_target, [(('bias_corrected_image', _pop), 'input_image')
                              ]),
        (res_tmpl, init_aff, [('output_image', 'fixed_image')]),
        (res_target, init_aff, [('output_image', 'moving_image')]),
        (init_aff, norm, [('output_transform', 'initial_moving_transform')]),
        (norm, map_brainmask, [('reverse_transforms', 'transforms'),
                               ('reverse_invert_flags',
                                'invert_transform_flags')]),
        (map_brainmask, thr_brainmask, [('output_image', 'input_image')]),
        (thr_brainmask, dil_brainmask, [('output_image', 'op1')]),
        (dil_brainmask, get_brainmask, [('output_image', 'op1')]),
        (inu_n4_final, apply_mask, [('output_image', 'in_file')]),
        (get_brainmask, apply_mask, [('output_image', 'mask_file')]),
        (get_brainmask, copy_xform, [('output_image', 'out_mask')]),
        (apply_mask, copy_xform, [('out_file', 'out_file')]),
        (inu_n4_final, copy_xform, [('output_image', 'bias_corrected'),
                                    ('bias_image', 'bias_image')]),
        (copy_xform, outputnode, [('out_file', 'out_file'),
                                  ('out_mask', 'out_mask'),
                                  ('bias_corrected', 'bias_corrected'),
                                  ('bias_image', 'bias_image')]),
    ])

    if use_laplacian:
        lap_tmpl = pe.Node(ImageMath(operation='Laplacian', op2='1.5 1'),
                           name='lap_tmpl')
        lap_tmpl.inputs.op1 = tpl_target_path
        lap_target = pe.Node(ImageMath(operation='Laplacian', op2='1.5 1'),
                             name='lap_target')
        mrg_tmpl = pe.Node(niu.Merge(2), name='mrg_tmpl')
        mrg_tmpl.inputs.in1 = tpl_target_path
        mrg_target = pe.Node(niu.Merge(2), name='mrg_target')
        wf.connect([
            (inu_n4, lap_target, [(('bias_corrected_image', _pop), 'op1')]),
            (lap_tmpl, mrg_tmpl, [('output_image', 'in2')]),
            (inu_n4, mrg_target, [('bias_corrected_image', 'in1')]),
            (lap_target, mrg_target, [('output_image', 'in2')]),
            (mrg_tmpl, norm, [('out', 'fixed_image')]),
            (mrg_target, norm, [('out', 'moving_image')]),
        ])
    else:
        norm.inputs.fixed_image = tpl_target_path
        wf.connect([
            (inu_n4, norm, [(('bias_corrected_image', _pop), 'moving_image')]),
        ])

    if atropos_refine:
        atropos_model = atropos_model or list(
            ATROPOS_MODELS[bids_suffix].values())
        atropos_wf = init_atropos_wf(
            use_random_seed=atropos_use_random_seed,
            omp_nthreads=omp_nthreads,
            mem_gb=mem_gb,
            in_segmentation_model=atropos_model,
        )
        sel_wm = pe.Node(niu.Select(index=atropos_model[-1] - 1),
                         name='sel_wm',
                         run_without_submitting=True)

        wf.disconnect([
            (get_brainmask, apply_mask, [('output_image', 'mask_file')]),
            (copy_xform, outputnode, [('out_mask', 'out_mask')]),
        ])
        wf.connect([
            (inu_n4, atropos_wf, [('bias_corrected_image',
                                   'inputnode.in_files')]),
            (thr_brainmask, atropos_wf, [('output_image', 'inputnode.in_mask')
                                         ]),
            (get_brainmask, atropos_wf, [('output_image',
                                          'inputnode.in_mask_dilated')]),
            (atropos_wf, sel_wm, [('outputnode.out_tpms', 'inlist')]),
            (sel_wm, inu_n4_final, [('out', 'weight_image')]),
            (atropos_wf, apply_mask, [('outputnode.out_mask', 'mask_file')]),
            (atropos_wf, outputnode, [('outputnode.out_mask', 'out_mask'),
                                      ('outputnode.out_segm', 'out_segm'),
                                      ('outputnode.out_tpms', 'out_tpms')]),
        ])
    return wf
Ejemplo n.º 27
0
def setup_DARTEL_warp_wf(subj_list, data_template, warp_template, work_dir,
                         out_dir):
    '''
    subj_list: list of strings for each subject
        e.g. ['sub-001', 'sub-002', 'sub-003']
    data_template: string to identify all data files (using glob).
            e.g. template = '/home/neuro/data/rest1_AROMA/nosmooth/sub-*/model/sub-*/_modelestimate0/res4d.nii.gz'
                The template can identify a larger set of files, and the subject_list will grab a subset.
                    e.g. The template may grab sub-001, sub-002, sub-003 ...
                    But if the subject_list only includes sub-001, then only sub-001 will be used.
                    This means the template can overgeneralize, but specific subjects can be easily excluded (e.g. for movement)
    warp_template: string to identify all dartel flowfield files (using glob).
        same as above.
        Dartel flowfield files are made by create_DARTEL_wf,
            also see jtnipyutil.fsmap.make_PAG_masks, and jtnipyutil.fsmap.create_aqueduct_template
    work_dir: string naming directory to store work.
    out_dir: string naming directory for output.
    '''
    import os
    import nibabel as nib
    import numpy as np
    import nipype.pipeline.engine as pe
    from nipype import IdentityInterface
    from nipype.interfaces.io import DataSink
    from nipype.interfaces.utility.wrappers import Function
    from nipype.interfaces.spm.preprocess import CreateWarped
    from jtnipyutil.util import files_from_template

    # create working directory if necessary.
    if not os.path.isdir(work_dir):
        os.makedirs(work_dir)
    if not os.path.isdir(out_dir):
        os.makedirs(out_dir)

    # set up data warp workflow
    apply_warp_wf = pe.Workflow(name='apply_warp_wf')
    apply_warp_wf.base_dir = work_dir

    # set up file lists
    inputspec = pe.Node(IdentityInterface(fields=['file_list', 'warp_list']),
                        name='inputspec')
    inputspec.inputs.file_list = files_from_template(subj_list, data_template)
    inputspec.inputs.warp_list = files_from_template(subj_list, warp_template)

    # rename files, as names are often indistinguishable (e.g. res4d.nii.gz)
    def rename_list(in_list):
        import nibabel as nib
        import os
        out_list = []
        for file in in_list:
            file_in = nib.load(file)
            nib.save(file_in,
                     os.path.join(os.getcwd(), '_'.join(file.split('/')[-3:])))
            out_list.append(
                os.path.join(os.getcwd(), '_'.join(file.split('/')[-3:])))
        return out_list

    rename = pe.Node(Function(input_names=['in_list'],
                              output_names=['out_list'],
                              function=rename_list),
                     name='rename')

    # dartel warping node.
    warp_data = pe.Node(interface=CreateWarped(), name='warp_data')
    #     warp_data.inputs.image_files = # from inputspec OR gunzip
    #     warp_data.inputs.flowfield_files = # from inputspec

    sinker = pe.Node(DataSink(), name='sinker')
    sinker.inputs.base_directory = out_dir

    # check if unzipping is necessary.
    apply_warp_wf.connect([
        (inputspec, rename, [('file_list', 'in_list')]),
        (inputspec, warp_data, [('warp_list', 'flowfield_files')]),
        (warp_data, sinker, [('warped_files', 'warped_files')])
    ])
    if any('nii.gz' in file
           for file in files_from_template(subj_list, data_template)):
        from nipype.algorithms.misc import Gunzip
        gunzip = pe.MapNode(interface=Gunzip(),
                            name='gunzip',
                            iterfield=['in_file'])
        apply_warp_wf.connect([(rename, gunzip, [('out_list', 'in_file')]),
                               (gunzip, warp_data, [('out_file', 'image_files')
                                                    ])])
    else:
        apply_warp_wf.connect([(rename, warp_data, [('out_list', 'image_files')
                                                    ])])
    return apply_warp_wf
Ejemplo n.º 28
0
def init_n4_only_wf(atropos_model=None,
                    atropos_refine=True,
                    atropos_use_random_seed=True,
                    bids_suffix='T1w',
                    mem_gb=3.0,
                    name='n4_only_wf',
                    omp_nthreads=None):
    """
    Build a workflow to sidetrack brain extraction on skull-stripped datasets.

    An alternative workflow to "init_brain_extraction_wf", for anatomical
    images which have already been brain extracted.

      1. Creates brain mask assuming all zero voxels are outside the brain
      2. Applies N4 bias field correction
      3. (Optional) apply ATROPOS and massage its outputs
      4. Use results from 3 to refine N4 bias field correction

    Workflow Graph
        .. workflow::
            :graph2use: orig
            :simple_form: yes

            from niworkflows.anat.ants import init_n4_only_wf
            wf = init_n4_only_wf()

    Parameters
    ----------
    omp_nthreads : int
        Maximum number of threads an individual process may use
    mem_gb : float
        Estimated peak memory consumption of the most hungry nodes
    bids_suffix : str
        Sequence type of the first input image. For a list of acceptable values see
        https://bids-specification.readthedocs.io/en/latest/04-modality-specific-files/01-magnetic-resonance-imaging-data.html#anatomy-imaging-data
    atropos_refine : bool
        Enables or disables the whole ATROPOS sub-workflow
    atropos_use_random_seed : bool
        Whether ATROPOS should generate a random seed based on the
        system's clock
    atropos_model : tuple or None
        Allows to specify a particular segmentation model, overwriting
        the defaults based on ``bids_suffix``
    name : str, optional
        Workflow name (default: ``'n4_only_wf'``).

    Inputs
    ------
    in_files
        List of input anatomical images to be bias corrected,
        typically T1-weighted.
        If a list of anatomical images is provided, subsequently
        specified images are used during the segmentation process.
        However, only the first image is used in the registration
        of priors.
        Our suggestion would be to specify the T1w as the first image.

    Outputs
    -------
    out_file
        :abbr:`INU (intensity non-uniformity)`-corrected ``in_files``
    out_mask
        Calculated brain mask
    bias_corrected
        Same as "out_file", provided for consistency with brain extraction
    bias_image
        The :abbr:`INU (intensity non-uniformity)` field estimated for each
        input in ``in_files``
    out_segm
        Output segmentation by ATROPOS
    out_tpms
        Output :abbr:`TPMs (tissue probability maps)` by ATROPOS

    """
    wf = pe.Workflow(name)

    inputnode = pe.Node(niu.IdentityInterface(fields=['in_files', 'in_mask']),
                        name='inputnode')

    outputnode = pe.Node(niu.IdentityInterface(fields=[
        'out_file', 'out_mask', 'bias_corrected', 'bias_image', 'out_segm',
        'out_tpms'
    ]),
                         name='outputnode')

    # Create brain mask
    thr_brainmask = pe.Node(Binarize(thresh_low=2), name='binarize')

    # INU correction
    inu_n4_final = pe.MapNode(N4BiasFieldCorrection(
        dimension=3,
        save_bias=True,
        copy_header=True,
        n_iterations=[50] * 5,
        convergence_threshold=1e-7,
        shrink_factor=4,
        bspline_fitting_distance=200),
                              n_procs=omp_nthreads,
                              name='inu_n4_final',
                              iterfield=['input_image'])

    # Check ANTs version
    try:
        inu_n4_final.inputs.rescale_intensities = True
    except ValueError:
        warn(
            "The installed ANTs version too old. Please consider upgrading to "
            "2.1.0 or greater.", DeprecationWarning)

    wf.connect([(inputnode, inu_n4_final, [('in_files', 'input_image')]),
                (inputnode, thr_brainmask, [(('in_files', _pop), 'in_file')]),
                (thr_brainmask, outputnode, [('out_mask', 'out_mask')]),
                (inu_n4_final, outputnode, [('output_image', 'out_file')]),
                (inu_n4_final, outputnode, [('output_image', 'bias_corrected')
                                            ]),
                (inu_n4_final, outputnode, [('bias_image', 'bias_image')])])

    # If atropos refine, do in4 twice
    if atropos_refine:
        # Morphological dilation, radius=2
        dil_brainmask = pe.Node(ImageMath(operation='MD', op2='2'),
                                name='dil_brainmask')
        # Get largest connected component
        get_brainmask = pe.Node(ImageMath(operation='GetLargestComponent'),
                                name='get_brainmask')
        atropos_model = atropos_model or list(
            ATROPOS_MODELS[bids_suffix].values())
        atropos_wf = init_atropos_wf(
            use_random_seed=atropos_use_random_seed,
            omp_nthreads=omp_nthreads,
            mem_gb=mem_gb,
            in_segmentation_model=atropos_model,
        )
        sel_wm = pe.Node(niu.Select(index=atropos_model[-1] - 1),
                         name='sel_wm',
                         run_without_submitting=True)

        gunzip_4n4 = pe.MapNode(Gunzip(),
                                name="gunzip_con",
                                iterfield=['in_file'])

        inu_n4 = pe.MapNode(
            Segment(gm_output_type=[False, False, False],
                    wm_output_type=[False, False, False],
                    csf_output_type=[False, False, False],
                    clean_masks="thorough",
                    save_bias_corrected=True,
                    bias_regularization=0.001,
                    bias_fwhm=30,
                    sampling_distance=3,
                    use_mcr=True,
                    affine_regularization="mni"),
            iterfield=['data'],
            name='inu_n4',
            mem_gb=32,
            serial=True,
            run_without_submitting=True)  # n_procs=1 for reproducibility

        wf.connect([
            (inputnode, gunzip_4n4, [('in_files', 'in_file')]),
            (gunzip_4n4, inu_n4, [('out_file', 'data')]),
            (inu_n4, atropos_wf, [('bias_corrected_image',
                                   'inputnode.in_files')]),
            (thr_brainmask, atropos_wf, [('out_mask', 'inputnode.in_mask')]),
            (thr_brainmask, dil_brainmask, [('out_mask', 'op1')]),
            (dil_brainmask, get_brainmask, [('output_image', 'op1')]),
            (get_brainmask, atropos_wf, [('output_image',
                                          'inputnode.in_mask_dilated')]),
            (atropos_wf, sel_wm, [('outputnode.out_tpms', 'inlist')]),
            (sel_wm, inu_n4_final, [('out', 'weight_image')]),
            (atropos_wf, outputnode, [('outputnode.out_segm', 'out_segm'),
                                      ('outputnode.out_tpms', 'out_tpms')]),
        ])

    return wf
Ejemplo n.º 29
0
def auto_spm_slicetime(in_file=traits.Undefined,
                       out_prefix='stc',
                       num_slices=traits.Undefined,
                       time_repetition=traits.Undefined,
                       time_acquisition=traits.Undefined,
                       ref_slice=traits.Undefined,
                       slice_order=traits.Undefined,
                       wf_name='auto_spm_slicetime'):
    """ A workflow that tries to automatically read the slice timing correction parameters
    from the input file and passes them to a spm.SliceTiming node.

    Parameters
    ----------
    in_files: str
        Path to the input file.

    out_prefix: str
        Prefix to the output file.
        Default: 'a'

    num_slices: int
        Number of slices of `in_files`.

    time_repetition: int or str
        The time repetition (TR) of the input dataset in seconds
        Default: 0
        If left to default will read the TR from the nifti image header.

    time_acquisition: int
        Time of volume acquisition. usually calculated as TR-(TR/num_slices)

    ref_slice: int
        Index of the reference slice

    slice_order: list of int
        List of integers with the order in which slices are acquired

    wf_name: str
        Name of the workflow

    Nipype Inputs
    -------------
    ## Mandatory:
    stc_input.in_files:

    ## Optional:
    stc_input.num_slices

    stc_input.slice_order

    stc_input.time_repetition

    stc_input.time_acquisition

    stc_input.ref_slice

    stc_input.slice_mode

    Nipype Outputs
    --------------
    stc_output.timecorrected_files

    stc_output.time_repetition

    Returns
    -------
    auto_spm_stc: nipype Workflow
        SPM slice timing correction workflow with automatic
        parameters detection.
    """

    # helper functions
    def _sum_one_to_each(slice_order):  # SPM starts count from 1
        return [i + 1 for i in slice_order]

    def _sum_one(num):
        return num + 1

    def _pick_first(sequence):
        return sequence[0]

    # the input and output nodes
    stc_input = setup_node(IdentityInterface(fields=[
        "in_file",
        "num_slices",
        "slice_order",
        "time_repetition",
        "time_acquisition",
        "ref_slice",
        "slice_mode",
    ]),
                           name="stc_input")

    stc_output = setup_node(IdentityInterface(fields=[
        "timecorrected_files",
        "time_repetition",
    ]),
                            name="stc_output")

    # Declare the processing nodes
    params = setup_node(STCParametersInterface(in_files=in_file),
                        name='stc_params')
    gunzip = setup_node(Gunzip(), name="gunzip")
    stc = setup_node(spm_slicetime(out_prefix=out_prefix,
                                   num_slices=num_slices,
                                   time_repetition=time_repetition,
                                   time_acquisition=time_acquisition,
                                   ref_slice=ref_slice,
                                   slice_order=slice_order),
                     name='slice_timer')

    # Create the workflow object
    wf = pe.Workflow(name=wf_name)

    # Connect the nodes
    wf.connect([
        # input node
        (stc_input, params, [
            ("in_file", "in_files"),
            ("num_slices", "num_slices"),
            ("slice_order", "slice_order"),
            ("time_repetition", "time_repetition"),
            ("time_acquisition", "time_acquisition"),
            ("ref_slice", "ref_slice"),
            ("slice_mode", "slice_mode"),
        ]),

        # processing nodes
        (params, gunzip, [(("in_files", _pick_first), "in_file")]),
        (params, stc, [
            (("slice_order", _sum_one_to_each), "slice_order"),
            (("ref_slice", _sum_one), "ref_slice"),
            ("num_slices", "num_slices"),
            ("time_acquisition", "time_acquisition"),
            ("time_repetition", "time_repetition"),
        ]),
        (gunzip, stc, [("out_file", "in_files")]),

        # output node
        (params, stc_output, [("time_repetition", "time_repetition")]),
        (stc, stc_output, [("timecorrected_files", "timecorrected_files")]),
    ])

    return wf
Ejemplo n.º 30
0
def run(base_dir):
    template = '/home/brainlab/Desktop/Rudas/Data/Parcellation/TPM.nii'
    matlab_cmd = '/home/brainlab/Desktop/Rudas/Tools/spm12_r7487/spm12/run_spm12.sh /home/brainlab/Desktop/Rudas/Tools/MCR/v713/ script'
    spm.SPMCommand.set_mlab_paths(matlab_cmd=matlab_cmd, use_mcr=True)

    print('SPM version: ' + str(spm.SPMCommand().version))

    structural_dir = '/home/brainlab/Desktop/Rudas/Data/Propofol/Structurals/'
    experiment_dir = opj(base_dir, 'output/')
    output_dir = 'datasink'
    working_dir = 'workingdir'
    '''

    subject_list = ['2014_05_02_02CB',
                    '2014_05_16_16RA',
                    '2014_05_30_30AQ',
                    '2014_07_04_04HD']
    '''
    subject_list = [
        '2014_05_02_02CB', '2014_05_16_16RA', '2014_05_30_30AQ',
        '2014_07_04_04HD', '2014_07_04_04SG', '2014_08_13_13CA',
        '2014_10_08_08BC', '2014_10_08_08VR', '2014_10_22_22CY',
        '2014_10_22_22TK', '2014_11_17_17EK', '2014_11_17_17NA',
        '2014_11_19_19SA', '2014_11_19_AK', '2014_11_25.25JK',
        '2014_11_27_27HF', '2014_12_10_10JR'
    ]

    # list of subject identifiers

    fwhm = 8  # Smoothing widths to apply (Gaussian kernel size)
    TR = 2  # Repetition time
    init_volume = 0  # Firts volumen identification which will use in the pipeline
    iso_size = 2  # Isometric resample of functional images to voxel size (in mm)

    # ExtractROI - skip dummy scans
    extract = Node(ExtractROI(t_min=init_volume,
                              t_size=-1,
                              output_type='NIFTI'),
                   name="extract")

    # MCFLIRT - motion correction
    mcflirt = Node(MCFLIRT(mean_vol=True, save_plots=True,
                           output_type='NIFTI'),
                   name="motion_correction")

    # SliceTimer - correct for slice wise acquisition
    slicetimer = Node(SliceTimer(index_dir=False,
                                 interleaved=True,
                                 output_type='NIFTI',
                                 time_repetition=TR),
                      name="slice_timing_correction")

    # Smooth - image smoothing
    smooth = Node(spm.Smooth(fwhm=fwhm), name="smooth")

    n4bias = Node(N4Bias(out_file='t1_n4bias.nii.gz'), name='n4bias')

    descomposition = Node(Descomposition(n_components=20,
                                         low_pass=0.1,
                                         high_pass=0.01,
                                         tr=TR),
                          name='descomposition')

    # Artifact Detection - determines outliers in functional images
    art = Node(ArtifactDetect(norm_threshold=2,
                              zintensity_threshold=3,
                              mask_type='spm_global',
                              parameter_source='FSL',
                              use_differences=[True, False],
                              plot_type='svg'),
               name="artifact_detection")

    extract_confounds_ws_csf = Node(
        ExtractConfounds(out_file='ev_without_gs.csv'),
        name='extract_confounds_ws_csf')

    extract_confounds_gs = Node(ExtractConfounds(out_file='ev_with_gs.csv',
                                                 delimiter=','),
                                name='extract_confounds_global_signal')

    signal_extraction = Node(SignalExtraction(
        time_series_out_file='time_series.csv',
        correlation_matrix_out_file='correlation_matrix.png',
        atlas_identifier='cort-maxprob-thr25-2mm',
        tr=TR,
        plot=True),
                             name='signal_extraction')

    art_remotion = Node(ArtifacRemotion(out_file='fmri_art_removed.nii'),
                        name='artifact_remotion')

    # BET - Skullstrip anatomical anf funtional images
    bet_t1 = Node(BET(frac=0.5, robust=True, mask=True,
                      output_type='NIFTI_GZ'),
                  name="bet_t1")

    # FAST - Image Segmentation
    segmentation = Node(FAST(output_type='NIFTI'), name="segmentation")

    # Normalize - normalizes functional and structural images to the MNI template
    normalize_fmri = Node(Normalize12(jobtype='estwrite',
                                      tpm=template,
                                      write_voxel_sizes=[2, 2, 2],
                                      write_bounding_box=[[-90, -126, -72],
                                                          [90, 90, 108]]),
                          name="normalize_fmri")

    gunzip = Node(Gunzip(), name="gunzip")

    normalize_t1 = Node(Normalize12(
        jobtype='estwrite',
        tpm=template,
        write_voxel_sizes=[iso_size, iso_size, iso_size],
        write_bounding_box=[[-90, -126, -72], [90, 90, 108]]),
                        name="normalize_t1")

    normalize_masks = Node(Normalize12(
        jobtype='estwrite',
        tpm=template,
        write_voxel_sizes=[iso_size, iso_size, iso_size],
        write_bounding_box=[[-90, -126, -72], [90, 90, 108]]),
                           name="normalize_masks")

    # Threshold - Threshold WM probability image
    threshold = Node(Threshold(thresh=0.5, args='-bin',
                               output_type='NIFTI_GZ'),
                     name="wm_mask_threshold")

    # FLIRT - pre-alignment of functional images to anatomical images
    coreg_pre = Node(FLIRT(dof=6, output_type='NIFTI_GZ'),
                     name="linear_warp_estimation")

    # FLIRT - coregistration of functional images to anatomical images with BBR
    coreg_bbr = Node(FLIRT(dof=6,
                           cost='bbr',
                           schedule=opj(os.getenv('FSLDIR'),
                                        'etc/flirtsch/bbr.sch'),
                           output_type='NIFTI_GZ'),
                     name="nonlinear_warp_estimation")

    # Apply coregistration warp to functional images
    applywarp = Node(FLIRT(interp='spline',
                           apply_isoxfm=iso_size,
                           output_type='NIFTI'),
                     name="registration_fmri")

    # Apply coregistration warp to mean file
    applywarp_mean = Node(FLIRT(interp='spline',
                                apply_isoxfm=iso_size,
                                output_type='NIFTI_GZ'),
                          name="registration_mean_fmri")

    # Infosource - a function free node to iterate over the list of subject names
    infosource = Node(IdentityInterface(fields=['subject_id']),
                      name="infosource")
    infosource.iterables = [('subject_id', subject_list)]

    # SelectFiles - to grab the data (alternativ to DataGrabber)
    anat_file = opj(structural_dir, '{subject_id}', 't1.nii')
    func_file = opj('{subject_id}', 'fmri.nii')

    templates = {'anat': anat_file, 'func': func_file}

    selectfiles = Node(SelectFiles(templates, base_directory=base_dir),
                       name="selectfiles")

    # Datasink - creates output folder for important outputs
    datasink = Node(DataSink(base_directory=experiment_dir,
                             container=output_dir),
                    name="datasink")

    # Create a coregistration workflow
    coregwf = Workflow(name='coreg_fmri_to_t1')
    coregwf.base_dir = opj(experiment_dir, working_dir)

    # Create a preprocessing workflow
    preproc = Workflow(name='preproc')
    preproc.base_dir = opj(experiment_dir, working_dir)

    # Connect all components of the coregistration workflow

    coregwf.connect([
        (bet_t1, n4bias, [('out_file', 'in_file')]),
        (n4bias, segmentation, [('out_file', 'in_files')]),
        (segmentation, threshold, [(('partial_volume_files', get_latest),
                                    'in_file')]),
        (n4bias, coreg_pre, [('out_file', 'reference')]),
        (threshold, coreg_bbr, [('out_file', 'wm_seg')]),
        (coreg_pre, coreg_bbr, [('out_matrix_file', 'in_matrix_file')]),
        (coreg_bbr, applywarp, [('out_matrix_file', 'in_matrix_file')]),
        (n4bias, applywarp, [('out_file', 'reference')]),
        (coreg_bbr, applywarp_mean, [('out_matrix_file', 'in_matrix_file')]),
        (n4bias, applywarp_mean, [('out_file', 'reference')]),
    ])

    ## Use the following DataSink output substitutions
    substitutions = [('_subject_id_', 'sub-')]
    #                 ('_fwhm_', 'fwhm-'),
    #                 ('_roi', ''),
    #                 ('_mcf', ''),
    #                 ('_st', ''),
    #                 ('_flirt', ''),
    #                 ('.nii_mean_reg', '_mean'),
    #                 ('.nii.par', '.par'),
    #                 ]
    #subjFolders = [('fwhm-%s/' % f, 'fwhm-%s_' % f) for f in fwhm]

    #substitutions.extend(subjFolders)
    datasink.inputs.substitutions = substitutions

    # Connect all components of the preprocessing workflow
    preproc.connect([
        (infosource, selectfiles, [('subject_id', 'subject_id')]),
        (selectfiles, extract, [('func', 'in_file')]),
        (extract, mcflirt, [('roi_file', 'in_file')]),
        (mcflirt, slicetimer, [('out_file', 'in_file')]),
        (selectfiles, coregwf, [('anat', 'bet_t1.in_file'),
                                ('anat', 'nonlinear_warp_estimation.reference')
                                ]),
        (mcflirt, coregwf, [('mean_img', 'linear_warp_estimation.in_file'),
                            ('mean_img', 'nonlinear_warp_estimation.in_file'),
                            ('mean_img', 'registration_mean_fmri.in_file')]),
        (slicetimer, coregwf, [('slice_time_corrected_file',
                                'registration_fmri.in_file')]),
        (coregwf, art, [('registration_fmri.out_file', 'realigned_files')]),
        (mcflirt, art, [('par_file', 'realignment_parameters')]),
        (art, art_remotion, [('outlier_files', 'outlier_files')]),
        (coregwf, art_remotion, [('registration_fmri.out_file', 'in_file')]),
        (coregwf, gunzip, [('n4bias.out_file', 'in_file')]),
        (selectfiles, normalize_fmri, [('anat', 'image_to_align')]),
        (art_remotion, normalize_fmri, [('out_file', 'apply_to_files')]),
        (selectfiles, normalize_t1, [('anat', 'image_to_align')]),
        (gunzip, normalize_t1, [('out_file', 'apply_to_files')]),
        (selectfiles, normalize_masks, [('anat', 'image_to_align')]),
        (coregwf, normalize_masks, [(('segmentation.partial_volume_files',
                                      get_wm_csf), 'apply_to_files')]),
        (normalize_fmri, smooth, [('normalized_files', 'in_files')]),
        (smooth, extract_confounds_ws_csf, [('smoothed_files', 'in_file')]),
        (normalize_masks, extract_confounds_ws_csf, [('normalized_files',
                                                      'list_mask')]),
        (mcflirt, extract_confounds_ws_csf, [('par_file', 'file_concat')]),

        #(smooth, extract_confounds_gs, [('smoothed_files', 'in_file')]),
        #(normalize_t1, extract_confounds_gs, [(('normalized_files',change_to_list), 'list_mask')]),
        #(extract_confounds_ws_csf, extract_confounds_gs, [('out_file', 'file_concat')]),
        (smooth, signal_extraction, [('smoothed_files', 'in_file')]),
        #(extract_confounds_gs, signal_extraction, [('out_file', 'confounds_file')]),
        (extract_confounds_ws_csf, signal_extraction, [('out_file',
                                                        'confounds_file')]),

        #(smooth, descomposition, [('smoothed_files', 'in_file')]),
        #(extract_confounds_ws_csf, descomposition, [('out_file', 'confounds_file')]),

        #(extract_confounds_gs, datasink, [('out_file', 'preprocessing.@confounds_with_gs')]),
        (extract_confounds_ws_csf, datasink,
         [('out_file', 'preprocessing.@confounds_without_gs')]),
        (smooth, datasink, [('smoothed_files', 'preprocessing.@smoothed')]),
        (normalize_fmri, datasink, [('normalized_files',
                                     'preprocessing.@fmri_normalized')]),
        (normalize_t1, datasink, [('normalized_files',
                                   'preprocessing.@t1_normalized')]),
        (normalize_masks, datasink, [('normalized_files',
                                      'preprocessing.@masks_normalized')]),
        (signal_extraction, datasink, [('time_series_out_file',
                                        'preprocessing.@time_serie')]),
        (signal_extraction, datasink, [('correlation_matrix_out_file',
                                        'preprocessing.@correlation_matrix')]),
        (signal_extraction, datasink,
         [('fmri_cleaned_out_file', 'preprocessing.@fmri_cleaned_out_file')]),
        #(descomposition, datasink, [('out_file', 'preprocessing.@descomposition')]),
        #(descomposition, datasink, [('plot_files', 'preprocessing.@descomposition_plot_files')])
    ])

    preproc.write_graph(graph2use='colored', format='png', simple_form=True)
    preproc.run()
Ejemplo n.º 31
0
def spm_mrpet_preprocessing(wf_name="spm_mrpet_preproc"):
    """ Run the PET pre-processing workflow against the
    gunzip_pet.in_file files.
    It depends on the anat_preproc_workflow, so if this
    has not been run, this function will run it too.

    # TODO: organize the anat2pet hack/condition somehow:
    If anat2pet:
    - SPM12 Coregister T1 and tissues to PET
    - PVC the PET image in PET space
    - SPM12 Warp PET to MNI
    else:
    - SPM12 Coregister PET to T1
    - PVC the PET image in anatomical space
    - SPM12 Warp PET in anatomical space to MNI through the
    `anat_to_mni_warp`.

    Parameters
    ----------
    wf_name: str
        Name of the workflow.

    Nipype Inputs
    -------------
    pet_input.in_file: traits.File
        The raw NIFTI_GZ PET image file

    pet_input.anat: traits.File
        Path to the high-contrast anatomical image.
        Reference file of the warp_field, i.e., the
        anatomical image in its native space.

    pet_input.anat_to_mni_warp: traits.File
        The warp field from the transformation of the
        anatomical image to the standard MNI space.

    pet_input.atlas_anat: traits.File
        The atlas file in anatomical space.

    pet_input.tissues: list of traits.File
        List of tissues files from the New Segment process.
        At least the first 3 tissues must be present.

    Nipype outputs
    --------------
    pet_output.pvc_out: existing file
        The results of the PVC process

    pet_output.brain_mask: existing file
        A brain mask calculated with the tissues file.

    pet_output.coreg_ref: existing file
        The coregistered reference image to PET space.

    pet_output.coreg_others: list of existing files
        List of coregistered files from coreg_pet.apply_to_files

    pet_output.pvc_warped: existing file
        Results from PETPVC normalized to MNI.
        The result of every internal pre-processing step
        is normalized to MNI here.

    pet_output.warp_field: existing files
        Spatial normalization parameters .mat files

    pet_output.gm_norm: existing file
        The output of the grey matter intensity
        normalization process.
        This is the last step in the PET signal correction,
        before registration.

    pet_output.atlas_pet: existing file
        Atlas image warped to PET space.
        If the `atlas_file` option is an existing file and
        `normalize_atlas` is True.

    Returns
    -------
    wf: nipype Workflow
    """
    # specify input and output fields
    in_fields  = ["in_file",
                  "anat",
                  "anat_to_mni_warp",
                  "tissues",]

    out_fields = ["brain_mask",
                  "coreg_others",
                  "coreg_ref",
                  "pvc_warped",
                  "pet_warped", # 'pet_warped' is a dummy entry to keep the fields pattern.
                  "warp_field",
                  "pvc_out",
                  "pvc_mask",
                  "gm_norm",]

    do_atlas, _ = check_atlas_file()
    if do_atlas:
        in_fields  += ["atlas_anat"]
        out_fields += ["atlas_pet" ]

    # input
    pet_input = setup_node(IdentityInterface(fields=in_fields, mandatory_inputs=True),
                           name="pet_input")

    # workflow to perform partial volume correction
    petpvc    = petpvc_workflow(wf_name="petpvc")

    merge_list = setup_node(Merge(4), name='merge_for_unzip')
    gunzipper = pe.MapNode(Gunzip(), name="gunzip", iterfield=['in_file'])

    warp_pet = setup_node(spm_normalize(), name="warp_pet")

    tpm_bbox = setup_node(Function(function=get_bounding_box,
                                   input_names=["in_file"],
                                   output_names=["bbox"]),
                          name="tpm_bbox")
    tpm_bbox.inputs.in_file = spm_tpm_priors_path()

    # output
    pet_output = setup_node(IdentityInterface(fields=out_fields), name="pet_output")

    # Create the workflow object
    wf = pe.Workflow(name=wf_name)

    # check how to perform the registration, to decide how to build the pipeline
    anat2pet = get_config_setting('registration.anat2pet', False)
    if anat2pet:
        wf.connect([
                    # inputs
                    (pet_input, petpvc,     [("in_file", "pvc_input.in_file"),
                                             ("anat",    "pvc_input.reference_file"),
                                             ("tissues", "pvc_input.tissues")]),

                    # gunzip some files for SPM Normalize
                    (petpvc,    merge_list, [("pvc_output.pvc_out",    "in1"),
                                             ("pvc_output.brain_mask", "in2"),
                                             ("pvc_output.gm_norm",    "in3")]),
                    (pet_input, merge_list, [("in_file",               "in4")]),

                    (merge_list, gunzipper, [("out", "in_file")]),

                    # warp the PET PVCed to MNI
                    (petpvc,    warp_pet,   [("pvc_output.coreg_ref", "image_to_align")]),
                    (gunzipper, warp_pet,   [("out_file",             "apply_to_files")]),
                    (tpm_bbox,  warp_pet,   [("bbox",                 "write_bounding_box")]),

                    # output
                    (petpvc,    pet_output, [("pvc_output.pvc_out",      "pvc_out"),
                                             ("pvc_output.brain_mask",   "brain_mask"),
                                             ("pvc_output.coreg_ref",    "coreg_ref"),
                                             ("pvc_output.coreg_others", "coreg_others"),
                                             ("pvc_output.gm_norm",      "gm_norm")]),

                    # output
                    (warp_pet,  pet_output, [("normalized_files",  "pvc_warped"),
                                             ("deformation_field", "warp_field")]),
                   ])
    else: # PET 2 ANAT
        collector  = setup_node(Merge(2), name='merge_for_warp')
        apply_warp = setup_node(spm_apply_deformations(), name="warp_pet")

        wf.connect([
                    # inputs
                    (pet_input, petpvc,     [("in_file", "pvc_input.in_file"),
                                             ("anat",    "pvc_input.reference_file"),
                                             ("tissues", "pvc_input.tissues")]),

                    # gunzip some files for SPM Normalize
                    (petpvc,    merge_list, [("pvc_output.pvc_out",    "in1"),
                                             ("pvc_output.brain_mask", "in2"),
                                             ("pvc_output.gm_norm",    "in3")]),
                    (pet_input, merge_list, [("in_file",               "in4")]),

                    (merge_list, gunzipper, [("out",                   "in_file")]),

                    # warp the PET PVCed to MNI
                    (gunzipper,   collector,   [("out_file",             "in1")]),
                    (petpvc,      collector,   [("pvc_output.coreg_ref", "in2")]),

                    (pet_input,   apply_warp,  [("anat_to_mni_warp", "deformation_file")]),
                    (collector,   apply_warp,  [("out",              "apply_to_files")]),
                    (tpm_bbox,    apply_warp,  [("bbox",             "write_bounding_box")]),

                    # output
                    (petpvc,    pet_output, [("pvc_output.pvc_out",      "pvc_out"),
                                             ("pvc_output.brain_mask",   "brain_mask"),
                                             ("pvc_output.petpvc_mask",  "petpvc_mask"),
                                             ("pvc_output.coreg_ref",    "coreg_ref"),
                                             ("pvc_output.coreg_others", "coreg_others"),
                                             ("pvc_output.gm_norm",      "gm_norm")]),

                    # output
                    (apply_warp,  pet_output, [("normalized_files",  "pvc_warped"),
                                               ("deformation_field", "warp_field")]),
                   ])


    if do_atlas:
        coreg_atlas = setup_node(spm_coregister(cost_function="mi"), name="coreg_atlas")

        # set the registration interpolation to nearest neighbour.
        coreg_atlas.inputs.write_interp = 0
        wf.connect([
                    (pet_input,   coreg_atlas, [("anat",                 "source")]),
                    (petpvc,      coreg_atlas, [("pvc_output.coreg_ref", "target")]),
                    (pet_input,   coreg_atlas, [("atlas_anat",           "apply_to_files")]),
                    (coreg_atlas, pet_output,  [("coregistered_files",   "atlas_pet")]),
        ])

    return wf
Ejemplo n.º 32
0
                               ref_slice=number_of_slices // 2),
                   name="sliceTiming")

# Realign - correct for motion
realign = Node(Realign(register_to_mean=True), name="realign")
# Artifact Detection - determine which of the images in the functional series
#   are outliers. This is based on deviation in intensity or movement.
art = Node(ArtifactDetect(norm_threshold=1,
                          zintensity_threshold=3,
                          mask_type='file',
                          parameter_source='SPM',
                          use_differences=[True, False]),
           name="art")

#Gunzip - unzip anatomical
gunzip2 = Node(Gunzip(), name="gunzip2")

gunzip = Node(Gunzip(), name="gunzip")

sliceTiming = Node(SliceTiming(num_slices=number_of_slices,
                               time_repetition=TR,
                               time_acquisition=TR - TR / number_of_slices,
                               slice_order=interleaved_order,
                               ref_slice=19),
                   name="sliceTiming")

# Realign - correct for motion
realign = Node(Realign(register_to_mean=True), name="realign")

# Artifact Detection - determine which of the images in the functional series
#   are outliers. This is based on deviation in intensity or movement.