Ejemplo n.º 1
0
def test_rename():
    tempdir = os.path.realpath(mkdtemp())
    origdir = os.getcwd()
    os.chdir(tempdir)

    # Test very simple rename
    _ = open("file.txt", "w").close()
    rn = utility.Rename(in_file="file.txt", format_string="test_file1.txt")
    res = rn.run()
    outfile = os.path.join(tempdir, "test_file1.txt")
    yield assert_equal, res.outputs.out_file, outfile
    yield assert_true, os.path.exists(outfile)

    # Now a string-formatting version
    rn = utility.Rename(in_file="file.txt",
                        format_string="%(field1)s_file%(field2)d",
                        keep_ext=True)
    # Test .input field creation
    yield assert_true, hasattr(rn.inputs, "field1")
    yield assert_true, hasattr(rn.inputs, "field2")
    # Set the inputs
    rn.inputs.field1 = "test"
    rn.inputs.field2 = 2
    res = rn.run()
    outfile = os.path.join(tempdir, "test_file2.txt")
    yield assert_equal, res.outputs.out_file, outfile
    yield assert_true, os.path.exists(outfile)

    # Clean up
    os.chdir(origdir)
    shutil.rmtree(tempdir)
Ejemplo n.º 2
0
def test_rename(tmpdir):
    tmpdir.chdir()

    # Test very simple rename
    _ = open("file.txt", "w").close()
    rn = utility.Rename(in_file="file.txt", format_string="test_file1.txt")
    res = rn.run()
    outfile = tmpdir.join("test_file1.txt").strpath
    assert res.outputs.out_file == outfile
    assert os.path.exists(outfile)

    # Now a string-formatting version
    rn = utility.Rename(in_file="file.txt",
                        format_string="%(field1)s_file%(field2)d",
                        keep_ext=True)
    # Test .input field creation
    assert hasattr(rn.inputs, "field1")
    assert hasattr(rn.inputs, "field2")

    # Set the inputs
    rn.inputs.field1 = "test"
    rn.inputs.field2 = 2
    res = rn.run()
    outfile = tmpdir.join("test_file2.txt").strpath
    assert res.outputs.out_file == outfile
    assert os.path.exists(outfile)
Ejemplo n.º 3
0
def create_converter_diffusion_pipeline(working_dir,
                                        ds_dir,
                                        name='converter_diffusion'):
    # initiate workflow
    converter_wf = Workflow(name=name)
    converter_wf.base_dir = os.path.join(working_dir, 'LeiCA_resting')

    # set fsl output
    fsl.FSLCommand.set_default_output_type('NIFTI_GZ')

    # inputnode
    inputnode = Node(util.IdentityInterface(fields=['dMRI_dicom']),
                     name='inputnode')

    outputnode = Node(util.IdentityInterface(fields=['dMRI']),
                      name='outputnode')

    niftisink = Node(nio.DataSink(), name='niftisink')
    niftisink.inputs.base_directory = os.path.join(ds_dir, 'raw_niftis')

    #######

    converter_dMRI = Node(Dcm2nii(), name="converter_dMRI")
    converter_dMRI.inputs.gzip_output = True
    converter_dMRI.inputs.nii_output = True
    converter_dMRI.inputs.anonymize = False
    converter_dMRI.plugin_args = {'submit_specs': 'request_memory = 2000'}
    converter_wf.connect(inputnode, 'dMRI_dicom', converter_dMRI,
                         'source_names')

    dMRI_rename = Node(util.Rename(format_string='DTI_mx_137.nii.gz'),
                       name='dMRI_rename')
    converter_wf.connect(converter_dMRI, 'converted_files', dMRI_rename,
                         'in_file')

    bvecs_rename = Node(util.Rename(format_string='DTI_mx_137.bvecs'),
                        name='bvecs_rename')
    converter_wf.connect(converter_dMRI, 'bvecs', bvecs_rename, 'in_file')

    bvals_rename = Node(util.Rename(format_string='DTI_mx_137.bvals'),
                        name='bvals_rename')
    converter_wf.connect(converter_dMRI, "bvals", bvals_rename, 'in_file')

    # reorient to standard orientation
    reor_2_std = Node(fsl.Reorient2Std(), name='reor_2_std')
    converter_wf.connect(dMRI_rename, 'out_file', reor_2_std, 'in_file')
    converter_wf.connect(reor_2_std, 'out_file', outputnode, 'dMRI')

    # save original niftis
    converter_wf.connect(reor_2_std, 'out_file', niftisink, 'dMRI.@dwi')
    converter_wf.connect(bvals_rename, 'out_file', niftisink, 'dMRI.@bvals')
    converter_wf.connect(bvecs_rename, 'out_file', niftisink, 'dMRI.@bvecs')

    converter_wf.write_graph(dotfilename='converter_struct',
                             graph2use='flat',
                             format='pdf')
    return converter_wf
Ejemplo n.º 4
0
def mask2surf(name='MaskToSurface', use_ras_coord=True):
    inputnode = pe.Node(niu.IdentityInterface(
        fields=['in_file', 'norm', 'in_filled', 'out_name']),
                        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(fields=['out_surf']),
                         name='outputnode')
    binarize = pe.Node(fs.Binarize(min=0.1), name='binarize')
    fill = pe.Node(FillMask(), name='FillMask')
    pretess = pe.Node(fs.MRIPretess(label=1), name='PreTess')
    tess = pe.Node(fs.MRITessellate(label_value=1,
                                    use_real_RAS_coordinates=use_ras_coord),
                   name='tess')
    smooth = pe.Node(fs.SmoothTessellation(disable_estimates=True),
                     name='mris_smooth')
    rename = pe.Node(niu.Rename(keep_ext=False), name='rename')
    togii = pe.Node(fs.MRIsConvert(out_datatype='gii'), name='toGIFTI')

    wf = pe.Workflow(name=name)
    wf.connect([
        (inputnode, binarize, [('in_file', 'in_file')]),
        (inputnode, pretess, [('norm', 'in_norm')]),
        (inputnode, fill, [('in_filled', 'in_filled')]),
        (inputnode, rename, [('out_name', 'format_string')]),
        (binarize, fill, [('binary_file', 'in_file')]),
        (fill, pretess, [('out_file', 'in_filled')]),
        (pretess, tess, [('out_file', 'in_file')]),
        (tess, smooth, [('surface', 'in_file')]),
        (smooth, rename, [('surface', 'in_file')]),
        (rename, togii, [('out_file', 'in_file')]),
        (togii, outputnode, [('converted', 'out_surf')]),
    ])
    return wf
Ejemplo n.º 5
0
def create_align_to_anatomy_workflow(name='align_to_anatomy',
                                     format_string='inplane_to_anatomy'):

    align_to_anatomy = pe.Workflow(name=name)

    inputs = pe.Node(interface=util.IdentityInterface(
        fields=['inplane_file', 'anatomy_file']),
                     name='inputs')
    strip = pe.Node(interface=fs.ReconAll(),
                    name='strip')  #FIXME: reconall interface barfs if rerun
    strip.inputs.directive = 'autorecon1'
    strip.inputs.flags = '-nowsgcaatlas'

    register = pe.Node(interface=fs.RobustRegister(), name='register')
    register.inputs.auto_sens = True
    #register.inputs.init_orient = True #FIXME: disabled due to bug in binary
    convert_xfm = pe.Node(interface=nmutil.LtaToXfm(), name='convert_xfm')
    rename_xfm = pe.Node(interface=util.Rename(format_string),
                         name='rename_xfm')
    rename_xfm.inputs.keep_ext = True
    outputs = pe.Node(
        interface=util.IdentityInterface(fields=['xfm_file', 'strip_file']),
        name='outputs')

    align_to_anatomy.connect(inputs, 'inplane_file', strip, 'T1_files')
    align_to_anatomy.connect(strip, 'brainmask', register, 'source_file')
    align_to_anatomy.connect(inputs, 'anatomy_file', register, 'target_file')
    align_to_anatomy.connect(register, 'out_reg_file', convert_xfm, 'in_file')
    align_to_anatomy.connect(convert_xfm, 'out_file', rename_xfm, 'in_file')
    align_to_anatomy.connect(rename_xfm, 'out_file', outputs, 'xfm_file')
    align_to_anatomy.connect(strip, 'brainmask', outputs, 'strip_file')

    return align_to_anatomy
Ejemplo n.º 6
0
def create_extract_functional_workflow(
        name='extract_functional',
        templates={'functional': 'Raw/Functional/Scan_{scan}/'},
        format_string='f%(scan)d'):

    extract_functional = pe.Workflow(name=name)

    inputs = pe.Node(
        interface=util.IdentityInterface(fields=['session_dir', 'scan']),
        name='inputs')
    get_functional = pe.Node(interface=nio.SelectFiles(templates),
                             name='get_functional')
    functional_to_nii = pe.Node(interface=ds.DcmStack(),
                                name='functional_to_nii')
    functional_to_nii.inputs.embed_meta = True
    rename_functional = pe.Node(interface=util.Rename(format_string),
                                name='rename_functional')
    rename_functional.inputs.keep_ext = True
    outputs = pe.Node(interface=util.IdentityInterface(fields=['out_file']),
                      name='outputs')

    extract_functional.connect(inputs, 'session_dir', get_functional,
                               'base_directory')
    extract_functional.connect(inputs, 'scan', get_functional, 'scan')
    extract_functional.connect(get_functional, 'functional', functional_to_nii,
                               'dicom_files')
    extract_functional.connect(functional_to_nii, 'out_file',
                               rename_functional, 'in_file')
    extract_functional.connect(inputs, 'scan', rename_functional, 'scan')
    extract_functional.connect(rename_functional, 'out_file', outputs,
                               'out_file')

    return extract_functional
Ejemplo n.º 7
0
def create_extract_inplane_workflow(
        name='extract_inplane',
        templates={'inplane': 'Raw/Anatomy/Inplane{id}/'},
        format_string='inplane'):

    extract_inplane = pe.Workflow(name=name)

    inputs = pe.Node(
        interface=util.IdentityInterface(fields=['session_dir', 'ref_vol']),
        name='inputs')
    get_inplane = pe.Node(interface=nio.SelectFiles(templates),
                          name='get_inplane')
    inplane_to_nii = pe.Node(interface=ds.DcmStack(), name='inplane_to_nii')
    inplane_to_nii.inputs.embed_meta = True
    rename_inplane = pe.Node(interface=util.Rename(format_string),
                             name='rename_inplane')
    rename_inplane.inputs.keep_ext = True
    outputs = pe.Node(interface=util.IdentityInterface(fields=['out_file']),
                      name='outputs')

    extract_inplane.connect(inputs, 'session_dir', get_inplane,
                            'base_directory')
    extract_inplane.connect(inputs, ('ref_vol', ref_vol_to_inplane_id),
                            get_inplane, 'id')
    extract_inplane.connect(get_inplane, 'inplane', inplane_to_nii,
                            'dicom_files')
    extract_inplane.connect(inplane_to_nii, 'out_file', rename_inplane,
                            'in_file')
    extract_inplane.connect(rename_inplane, 'out_file', outputs, 'out_file')

    return extract_inplane
Ejemplo n.º 8
0
def create_normalize_workflow(name="normalize"):

    # Define the workflow inputs
    inputnode = pe.Node(util.IdentityInterface(
        fields=["timeseries", "flirt_affine", "warpfield"]),
                        name="inputs")

    # Define the target space and warp to it
    mni152 = fsl.Info.standard_image("avg152T1_brain.nii.gz")

    applywarp = pe.MapNode(fsl.ApplyWarp(ref_file=mni152, interp="spline"),
                           iterfield=["in_file", "premat"],
                           name="applywarp")

    # Rename the timeseries
    rename = pe.MapNode(util.Rename(format_string="timeseries_warped",
                                    keep_ext=True),
                        iterfield=["in_file"],
                        name="rename")

    # Define the outputs
    outputnode = pe.Node(util.IdentityInterface(fields=["timeseries"]),
                         name="outputs")

    normalize = pe.Workflow(name=name)
    normalize.connect([
        (inputnode, applywarp, [("timeseries", "in_file"),
                                ("warpfield", "field_file"),
                                ("flirt_affine", "premat")]),
        (applywarp, rename, [("out_file", "in_file")]),
        (rename, outputnode, [("out_file", "timeseries")]),
    ])

    return normalize
Ejemplo n.º 9
0
def renameFile(file_name, node_name, wdir=None, nthreads=1):
    renameFile = pe.Node(niu.Rename(format_string="%(subjid)s_%(file_name)s"),
                         name=node_name)
    renameFile.base_dir = wdir
    renameFile.inputs.keep_ext = True
    renameFile.inputs.file_name = file_name
    renameFile.interface.num_threads = nthreads

    return renameFile
Ejemplo n.º 10
0
 def _run_interface(self, runtime):
     outputs = self._list_outputs()
     aff_file = outputs['aff_file']
     aff = np.diag(4 * [1])
     np.savetxt(aff_file, aff, "%g")
     rename = niu.Rename()
     rename.inputs.in_file = self.inputs.flo_file
     rename.inputs.format_string = outputs['res_file']
     result = rename.run()
     return result.runtime
Ejemplo n.º 11
0
    def create_workflow(self, flow, inputnode, outputnode):
        if self.config.seg_tool == "Freesurfer":
            # Converting to .mgz format
            fs_mriconvert = pe.Node(interface=fs.MRIConvert(out_type="mgz",
                                                            out_file="T1.mgz"),
                                    name="mgz_convert")

            if self.config.make_isotropic:
                fs_mriconvert.inputs.vox_size = (
                    self.config.isotropic_vox_size,
                    self.config.isotropic_vox_size,
                    self.config.isotropic_vox_size)
                fs_mriconvert.inputs.resample_type = self.config.isotropic_interpolation

            rename = pe.Node(util.Rename(), name="copy_orig")
            orig_dir = os.path.join(self.config.freesurfer_subject_id, "mri",
                                    "orig")
            if not os.path.exists(orig_dir):
                os.makedirs(orig_dir)
                print "Folder not existing; %s created!" % orig_dir
            rename.inputs.format_string = os.path.join(orig_dir, "001.mgz")

            # ReconAll => named outputnode as we don't want to select a specific output....
            fs_reconall = pe.Node(interface=fs.ReconAll(
                flags='-no-isrunning -parallel -openmp {}'.format(
                    self.config.fs_number_of_cores)),
                                  name="reconall")
            fs_reconall.inputs.directive = 'all'
            #fs_reconall.inputs.args = self.config.freesurfer_args

            #fs_reconall.inputs.subjects_dir and fs_reconall.inputs.subject_id set in cmp/pipelines/diffusion/diffusion.py
            fs_reconall.inputs.subjects_dir = self.config.freesurfer_subjects_dir

            # fs_reconall.inputs.hippocampal_subfields_T1 = self.config.segment_hippocampal_subfields
            # fs_reconall.inputs.brainstem = self.config.segment_brainstem

            def isavailable(file):
                print "T1 is available"
                return file

            flow.connect([
                (inputnode, fs_mriconvert, [(('T1', isavailable), 'in_file')]),
                (fs_mriconvert, rename, [('out_file', 'in_file')]),
                (rename, fs_reconall, [(("out_file", extract_base_directory),
                                        "subject_id")]),
                (fs_reconall, outputnode, [('subjects_dir', 'subjects_dir'),
                                           ('subject_id', 'subject_id')]),
            ])
def anat_qc_workflow_dhcp(name='MRIQC_Anat', settings=None):
    """
    One-subject-one-session-one-run pipeline to extract the NR-IQMs from
    anatomical images
    """
    if settings is None:
        settings = {}

    workflow = pe.Workflow(name=name)
    deriv_dir = op.abspath('./derivatives')
    if 'work_dir' in settings.keys():
        deriv_dir = op.abspath(op.join(settings['work_dir'], 'derivatives'))

    if not op.exists(deriv_dir):
        os.makedirs(deriv_dir)
    # Define workflow, inputs and outputs
    inputnode = pe.Node(niu.IdentityInterface(
        fields=['bids_root', 'subject_id', 'session_id', 'run_id', 'reorient'
                ]),
                        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(fields=['']), name='outputnode')

    # Plot mosaic
    plot = pe.Node(PlotMosaic(), name='plot_mosaic')
    merg = pe.Node(niu.Merge(3), name='plot_metadata')

    # Connect all nodes
    workflow.connect([(inputnode, plot, [('reorient', 'in_file')]),
                      (inputnode, plot, [('subject_id', 'subject')]),
                      (inputnode, merg, [('session_id', 'in1'),
                                         ('run_id', 'in2')]),
                      (merg, plot, [('out', 'metadata')])])

    # Save mosaic to well-formed path
    mvplot = pe.Node(niu.Rename(
        format_string='anatomical_%(subject_id)s_%(session_id)s_%(run_id)s',
        keep_ext=True),
                     name='rename_plot')
    dsplot = pe.Node(nio.DataSink(base_directory=settings['work_dir'],
                                  parameterization=False),
                     name='ds_plot')
    workflow.connect([(inputnode, mvplot, [('subject_id', 'subject_id'),
                                           ('session_id', 'session_id'),
                                           ('run_id', 'run_id')]),
                      (plot, mvplot, [('out_file', 'in_file')]),
                      (mvplot, dsplot, [('out_file', '@mosaic')])])

    return workflow
Ejemplo n.º 13
0
    def create_workflow(self, flow, inputnode, outputnode):
        if self.config.seg_tool == "Freesurfer":
            if self.config.use_existing_freesurfer_data == False:
                # Converting to .mgz format
                fs_mriconvert = pe.Node(interface=fs.MRIConvert(
                    out_type="mgz", out_file="T1.mgz"),
                                        name="mgz_convert")

                rename = pe.Node(util.Rename(), name="copy_orig")
                orig_dir = os.path.join(self.config.freesurfer_subject_id,
                                        "mri", "orig")
                if not os.path.exists(orig_dir):
                    os.makedirs(orig_dir)
                    print "Folder not existing; %s created!" % orig_dir
                rename.inputs.format_string = os.path.join(orig_dir, "001.mgz")

                # ReconAll => named outputnode as we don't want to select a specific output....
                fs_reconall = pe.Node(
                    interface=fs.ReconAll(flags='-no-isrunning'),
                    name="reconall")
                fs_reconall.inputs.args = self.config.freesurfer_args

                #fs_reconall.inputs.subjects_dir and fs_reconall.inputs.subject_id set in cmp/pipelines/diffusion/diffusion.py
                fs_reconall.inputs.subjects_dir = self.config.freesurfer_subjects_dir

                def isavailable(file):
                    print "T1 is available"
                    return file

                flow.connect([
                    (inputnode, fs_mriconvert, [(('T1', isavailable),
                                                 'in_file')]),
                    (fs_mriconvert, rename, [('out_file', 'in_file')]),
                    (rename, fs_reconall,
                     [(("out_file", extract_base_directory), "subject_id")]),
                    (fs_reconall, outputnode, [('subjects_dir',
                                                'subjects_dir'),
                                               ('subject_id', 'subject_id')]),
                ])

            else:
                outputnode.inputs.subjects_dir = self.config.freesurfer_subjects_dir
                outputnode.inputs.subject_id = self.config.freesurfer_subject_id

        elif self.config.seg_tool == "Custom segmentation":

            outputnode.inputs.custom_wm_mask = self.config.white_matter_mask
Ejemplo n.º 14
0
def create_gradient_unwarp_workflow(in_file,
                                    in_coeff,
                                    output_dir,
                                    offsets=[0, 0, 0],
                                    scanner='siemens',
                                    radius=0.225,
                                    interp='CUB',
                                    throughplaneonly=False,
                                    inplaneonly=False):

    _, subject_id, _ = split_filename(os.path.basename(in_file))
    # Create a workflow to process the images
    workflow = pe.Workflow(name='gradwarp_correction')
    workflow.base_output_dir = 'gradwarp_correction'
    # The gradwarp field is computed.
    gradwarp = pe.Node(interface=GradwarpCorrection(), name='gradwarp')
    gradwarp.inputs.offset_x = -1 * offsets[0]
    gradwarp.inputs.offset_y = -1 * offsets[1]
    gradwarp.inputs.offset_z = -1 * offsets[2]
    gradwarp.inputs.radius = radius
    gradwarp.inputs.scanner_type = scanner
    gradwarp.inputs.in_file = in_file
    gradwarp.inputs.coeff_file = in_coeff
    if throughplaneonly:
        gradwarp.inputs.throughplaneonly = True
    if inplaneonly:
        gradwarp.inputs.inplaneonly = True

    # The obtained deformation field is used the resample the input image
    resampling = pe.Node(interface=niftyreg.RegResample(inter_val=interp,
                                                        ref_file=in_file,
                                                        flo_file=in_file),
                         name='resampling')
    workflow.connect(gradwarp, 'out_file', resampling, 'trans_file')
    renamer = pe.Node(interface=niu.Rename(format_string=subject_id +
                                           "_gradwarp",
                                           keep_ext=True),
                      name='renamer')
    workflow.connect(resampling, 'out_file', renamer, 'in_file')
    # Create a data sink
    ds = pe.Node(nio.DataSink(parameterization=False), name='ds')
    ds.inputs.base_directory = output_dir
    workflow.connect(renamer, 'out_file', ds, '@img')

    return workflow
Ejemplo n.º 15
0
def rename_connections(workflow, datasink, rename_list, sink_node):

    ncount = 0
    rcount = 0
    for rename in rename_list:
        din_file = sink_node + '.@' + str(ncount)
        if len(rename) == 4:
            rename_node = pe.MapNode(interface=util.Rename(),
                                     name=rename[2] + str(rcount),
                                     iterfield=['in_file', 'format_string'])
            rename_node.inputs.format_string = rename[3]

            workflow.connect(rename[0], rename[1], rename_node, 'in_file')
            workflow.connect(rename_node, 'out_file', datasink, din_file)
            rcount += 1
        else:
            workflow.connect(rename[0], rename[1], datasink, din_file)
        ncount += 1
Ejemplo n.º 16
0
def convert_rsfmri_dicom(name='convert_rsfmri_dicom'):
    inputnode = pe.Node(
        utility.IdentityInterface(
            fields=['rsfmri_dicom_pattern']),
        name='inputspec')
    outputnode = pe.Node(
        utility.IdentityInterface(
            fields=['rsfmri_file','dicom_pars']),
        name='outputspec')

    n_dcm2nii = pe.Node(
        dcm2nii.Dcm2nii(
            gzip_output=True,
            id_in_filename=True,
            date_in_filename=True,
            convert_all_pars=False,
            config_file='/home_local/bpinsard/.dcm2nii/dcm2nii.ini',),
        name='dcm2nii')

    n_rename_rsfmri = pe.Node(
        utility.Rename(
           format_string='%(site)s_S_%(subj)s_%(data)s_%(time)s_rsfMRI.nii.gz',
           parse_string='(?P<data>\d{8})_(?P<time>\d{6})(?P<site>\d{3})S(?P<subj>\d{4})'),
        name='rename_rsfmri')

    n_get_dicom_par=pe.Node(
        utility.Function(
            input_names=['dicom_pattern'],
            output_names=['dicom_pars'],
            function=get_dicom_par),
        name='get_dicom_par')

    w=pe.Workflow(name=name)
    w.connect([
        (inputnode, n_dcm2nii, [('rsfmri_dicom_pattern','source_names')]),
        (n_dcm2nii, n_rename_rsfmri,[('converted_files','in_file')]),
        (inputnode,n_get_dicom_par,[('rsfmri_dicom_pattern','dicom_pattern')]),
        (n_rename_rsfmri, outputnode, [('out_file','rsfmri_file')]),
        (n_get_dicom_par, outputnode, [('dicom_pars','dicom_pars')]),
        ])
    return w
Ejemplo n.º 17
0
def legacy(
    bids_base,
    template,
    debug=False,
    functional_blur_xy=False,
    functional_match={},
    keep_work=False,
    n_jobs=False,
    n_jobs_percentage=0.8,
    out_base=None,
    realign="time",
    registration_mask=False,
    sessions=[],
    structural_match={},
    subjects=[],
    tr=1,
    workflow_name='legacy',
    enforce_dummy_scans=DUMMY_SCANS,
    exclude={},
):
    '''
	Legacy realignment and registration workflow representative of the tweaks and workarounds commonly used in the pre-SAMRI period.

	Parameters
	----------
	bids_base : str
		Path to the BIDS data set root.
	template : str
		Path to the template to register the data to.
	debug : bool, optional
		Whether to enable nipype debug mode.
		This increases logging.
	exclude : dict
		A dictionary with any combination of "sessions", "subjects", "tasks" as keys and corresponding identifiers as values.
		If this is specified matching entries will be excluded in the analysis.
	functional_blur_xy : float, optional
		Factor by which to smooth data in the xy-plane; if parameter evaluates to false, no smoothing will be applied.
		Ideally this value should correspond to the resolution or smoothness in the z-direction (assuing z represents the lower-resolution slice-encoding direction).
	functional_match : dict, optional
		Dictionary specifying a whitelist to use for functional data inclusion into the workflow; if dictionary is empty no whitelist is present and all data will be considered.
		The dictionary should have keys which are 'acquisition', 'task', or 'modality', and values which are lists of acceptable strings for the respective BIDS field.
	keep_work : bool, str
		Whether to keep the work directory after workflow conclusion (this directory contains all the intermediary processing commands, inputs, and outputs --- it is invaluable for debugging but many times larger in size than the actual output).
	n_jobs : int, optional
		Number of processors to maximally use for the workflow; if unspecified a best guess will be estimate based on `n_jobs_percentage` and hardware (but not on current load).
	n_jobs_percentage : float, optional
		Percentage of available processors (as in available hardware, not available free load) to maximally use for the workflow (this is overriden by `n_jobs`).
	out_base : str, optional
		Output base directory - inside which a directory named `workflow_name` (as well as associated directories) will be created.
	realign : {"space","time","spacetime",""}, optional
		Parameter that dictates slictiming correction and realignment of slices. "time" (FSL.SliceTimer) is default, since it works safely. Use others only with caution!
	registration_mask : str, optional
		Mask to use for the registration process.
		This mask will constrain the area for similarity metric evaluation, but the data will not be cropped.
	sessions : list, optional
		A whitelist of sessions to include in the workflow, if the list is empty there is no whitelist and all sessions will be considered.
	structural_match : dict, optional
		Dictionary specifying a whitelist to use for structural data inclusion into the workflow; if dictionary is empty no whitelist is present and all data will be considered.
		The dictionary should have keys which are 'acquisition', or 'modality', and values which are lists of acceptable strings for the respective BIDS field.
	subjects : list, optional
		A whitelist of subjects to include in the workflow, if the list is empty there is no whitelist and all sessions will be considered.
	tr : float, optional
		Repetition time, explicitly.
		WARNING! This is a parameter waiting for deprecation.
	workflow_name : str, optional
		Top level name for the output directory.
	'''

    try:
        import nipype.interfaces.ants.legacy as antslegacy
    except ModuleNotFoundError:
        print('''
			The `nipype.interfaces.ants.legacy` was not found on this system.
			You may want to downgrade nipype to e.g. 1.1.1, as this module has been removed in more recent versions:
			https://github.com/nipy/nipype/issues/3197
		''')

    bids_base, out_base, out_dir, template, registration_mask, data_selection, functional_scan_types, structural_scan_types, subjects_sessions, func_ind, struct_ind = common_select(
        bids_base,
        out_base,
        workflow_name,
        template,
        registration_mask,
        functional_match,
        structural_match,
        subjects,
        sessions,
        exclude,
    )

    if not n_jobs:
        n_jobs = max(int(round(mp.cpu_count() * n_jobs_percentage)), 2)

    get_f_scan = pe.Node(name='get_f_scan',
                         interface=util.Function(
                             function=get_bids_scan,
                             input_names=inspect.getargspec(get_bids_scan)[0],
                             output_names=[
                                 'scan_path', 'scan_type', 'task', 'nii_path',
                                 'nii_name', 'events_name', 'subject_session',
                                 'metadata_filename', 'dict_slice', 'ind_type'
                             ]))
    get_f_scan.inputs.ignore_exception = True
    get_f_scan.inputs.data_selection = data_selection
    get_f_scan.inputs.bids_base = bids_base
    get_f_scan.iterables = ("ind_type", func_ind)

    dummy_scans = pe.Node(
        name='dummy_scans',
        interface=util.Function(
            function=force_dummy_scans,
            input_names=inspect.getargspec(force_dummy_scans)[0],
            output_names=['out_file', 'deleted_scans']))
    dummy_scans.inputs.desired_dummy_scans = enforce_dummy_scans

    events_file = pe.Node(
        name='events_file',
        interface=util.Function(
            function=write_bids_events_file,
            input_names=inspect.getargspec(write_bids_events_file)[0],
            output_names=['out_file']))

    temporal_mean = pe.Node(interface=fsl.MeanImage(), name="temporal_mean")

    f_resize = pe.Node(interface=VoxelResize(), name="f_resize")
    f_resize.inputs.resize_factors = [10, 10, 10]

    f_percentile = pe.Node(interface=fsl.ImageStats(), name="f_percentile")
    f_percentile.inputs.op_string = '-p 98'

    f_threshold = pe.Node(interface=fsl.Threshold(), name="f_threshold")

    f_fast = pe.Node(interface=fsl.FAST(), name="f_fast")
    f_fast.inputs.no_pve = True
    f_fast.inputs.output_biascorrected = True

    f_bet = pe.Node(interface=fsl.BET(), name="f_BET")

    f_swapdim = pe.Node(interface=fsl.SwapDimensions(), name="f_swapdim")
    f_swapdim.inputs.new_dims = ('x', '-z', '-y')

    f_deleteorient = pe.Node(interface=FSLOrient(), name="f_deleteorient")
    f_deleteorient.inputs.main_option = 'deleteorient'

    datasink = pe.Node(nio.DataSink(), name='datasink')
    datasink.inputs.base_directory = out_dir
    datasink.inputs.parameterization = False

    workflow_connections = [
        (get_f_scan, dummy_scans, [('nii_path', 'in_file')]),
        (dummy_scans, events_file, [('deleted_scans', 'forced_dummy_scans')]),
        (dummy_scans, f_resize, [('out_file', 'in_file')]),
        (get_f_scan, events_file, [('nii_path', 'timecourse_file'),
                                   ('task', 'task'),
                                   ('scan_path', 'scan_dir')]),
        (events_file, datasink, [('out_file', 'func.@events')]),
        (get_f_scan, events_file, [('events_name', 'out_file')]),
        (get_f_scan, datasink, [(('subject_session', ss_to_path), 'container')
                                ]),
        (temporal_mean, f_percentile, [('out_file', 'in_file')]),
        # here we divide by 10 assuming 10 percent noise
        (f_percentile, f_threshold, [(('out_stat', divideby_10), 'thresh')]),
        (temporal_mean, f_threshold, [('out_file', 'in_file')]),
        (f_threshold, f_fast, [('out_file', 'in_files')]),
        (f_fast, f_bet, [('restored_image', 'in_file')]),
        (f_resize, f_deleteorient, [('out_file', 'in_file')]),
        (f_deleteorient, f_swapdim, [('out_file', 'in_file')]),
    ]

    if realign == "space":
        realigner = pe.Node(interface=spm.Realign(), name="realigner")
        realigner.inputs.register_to_mean = True
        workflow_connections.extend([
            (f_swapdim, realigner, [('out_file', 'in_file')]),
        ])

    elif realign == "spacetime":
        realigner = pe.Node(interface=nipy.SpaceTimeRealigner(),
                            name="realigner")
        realigner.inputs.slice_times = "asc_alt_2"
        realigner.inputs.tr = tr
        realigner.inputs.slice_info = 3  #3 for coronal slices (2 for horizontal, 1 for sagittal)
        workflow_connections.extend([
            (f_swapdim, realigner, [('out_file', 'in_file')]),
        ])

    elif realign == "time":
        realigner = pe.Node(interface=fsl.SliceTimer(), name="slicetimer")
        realigner.inputs.time_repetition = tr
        workflow_connections.extend([
            (f_swapdim, realigner, [('out_file', 'in_file')]),
        ])

    f_antsintroduction = pe.Node(interface=antslegacy.antsIntroduction(),
                                 name='ants_introduction')
    f_antsintroduction.inputs.dimension = 3
    f_antsintroduction.inputs.reference_image = template
    #will need updating to `1`
    f_antsintroduction.inputs.bias_field_correction = True
    f_antsintroduction.inputs.transformation_model = 'GR'
    f_antsintroduction.inputs.max_iterations = [8, 15, 8]

    f_warp = pe.Node(interface=ants.WarpTimeSeriesImageMultiTransform(),
                     name='f_warp')
    f_warp.inputs.reference_image = template
    f_warp.inputs.dimension = 4

    f_copysform2qform = pe.Node(interface=FSLOrient(),
                                name='f_copysform2qform')
    f_copysform2qform.inputs.main_option = 'copysform2qform'

    warp_merge = pe.Node(util.Merge(2), name='warp_merge')

    workflow_connections.extend([
        (f_bet, f_antsintroduction, [('out_file', 'input_image')]),
        (f_antsintroduction, warp_merge, [('warp_field', 'in1')]),
        (f_antsintroduction, warp_merge, [('affine_transformation', 'in2')]),
        (warp_merge, f_warp, [('out', 'transformation_series')]),
        (f_warp, f_copysform2qform, [('output_image', 'in_file')]),
    ])
    if realign == "space":
        workflow_connections.extend([
            (realigner, temporal_mean, [('realigned_files', 'in_file')]),
            (realigner, f_warp, [('realigned_files', 'input_image')]),
        ])
    elif realign == "spacetime":
        workflow_connections.extend([
            (realigner, temporal_mean, [('out_file', 'in_file')]),
            (realigner, f_warp, [('out_file', 'input_image')]),
        ])
    elif realign == "time":
        workflow_connections.extend([
            (realigner, temporal_mean, [('slice_time_corrected_file',
                                         'in_file')]),
            (realigner, f_warp, [('slice_time_corrected_file', 'input_image')
                                 ]),
        ])
    else:
        workflow_connections.extend([
            (f_resize, temporal_mean, [('out_file', 'in_file')]),
            (f_swapdim, f_warp, [('out_file', 'input_image')]),
        ])

    if functional_blur_xy:
        blur = pe.Node(interface=afni.preprocess.BlurToFWHM(), name="blur")
        blur.inputs.fwhmxy = functional_blur_xy
        workflow_connections.extend([
            (get_f_scan, blur, [('nii_name', 'out_file')]),
            (f_copysform2qform, blur, [('out_file', 'in_file')]),
            (blur, datasink, [('out_file', 'func')]),
        ])
    else:

        f_rename = pe.Node(util.Rename(), name='f_rename')

        workflow_connections.extend([
            (get_f_scan, f_rename, [('nii_name', 'format_string')]),
            (f_copysform2qform, f_rename, [('out_file', 'in_file')]),
            (f_rename, datasink, [('out_file', 'func')]),
        ])

    workflow_config = {
        'execution': {
            'crashdump_dir': path.join(out_base, 'crashdump'),
        }
    }
    if debug:
        workflow_config['logging'] = {
            'workflow_level': 'DEBUG',
            'utils_level': 'DEBUG',
            'interface_level': 'DEBUG',
            'filemanip_level': 'DEBUG',
            'log_to_file': 'true',
        }

    workdir_name = workflow_name + "_work"
    #this gives the name of the workdir, the output name is passed to the datasink
    workflow = pe.Workflow(name=workdir_name)
    workflow.connect(workflow_connections)
    workflow.base_dir = out_base
    workflow.config = workflow_config
    try:
        workflow.write_graph(dotfilename=path.join(workflow.base_dir,
                                                   workdir_name, "graph.dot"),
                             graph2use="hierarchical",
                             format="png")
    except OSError:
        print(
            'We could not write the DOT file for visualization (`dot` function from the graphviz package). This is non-critical to the processing, but you should get this fixed.'
        )

    workflow.run(plugin="MultiProc", plugin_args={'n_procs': n_jobs})
    copy_bids_files(bids_base, os.path.join(out_base, workflow_name))
    if not keep_work:
        workdir = path.join(workflow.base_dir, workdir_name)
        try:
            shutil.rmtree(workdir)
        except OSError as e:
            if str(e) == 'Cannot call rmtree on a symbolic link':
                print(
                    'Not deleting top level workdir (`{}`), as it is a symlink. Deleting only contents instead'
                    .format(workdir))
                for file_object in os.listdir(workdir):
                    file_object_path = os.path.join(workdir, file_object)
                    if os.path.isfile(file_object_path):
                        os.unlink(file_object_path)
                    else:
                        shutil.rmtree(file_object_path)
            else:
                raise OSError(str(e))
Ejemplo n.º 18
0
def init_intramodal_template_wf(inputs_list, t1w_source_file, reportlets_dir, transform="Rigid",
                                num_iterations=2, mem_gb=3, omp_nthreads=1,
                                name="intramodal_template_wf"):
    """Create an unbiased intramodal template for a subject. This aligns the b=0 references
    from all the scans of a subject. Can be rigid, affine or nonlinear (BSplineSyN).

    **Parameters**
        inputs_list: list of inputs
            List if identifiers for the input b=0 images.
        transform: 'Rigid', 'Affine', 'BSplineSyN'
            Which transform to ultimately use. If 'BSplineSyN', first 2 iterations of Affine will
            be run.
        num_iterations: int
            Default: 2.

    **Inputs**

        [workflow_name]_image...
            One input for each input image. There is no input called inputs_list
        t1w_image

    **Outputs**
        [workflow_name]_transform
            transform files to the intramodal template

        intramodal_template_to_t1w_transform
            Transform from the b0

    """
    workflow = Workflow(name=name)
    input_names = [name.replace('-', '_') + '_b0_template' for name in inputs_list]
    output_names = [name.replace('-', '_') + '_transform' for name in inputs_list]

    inputnode = pe.Node(
        niu.IdentityInterface(
            fields=input_names + [
                't1_brain', 't1_preproc', 't1_mask', 't1_seg', 'subjects_dir', 'subject_id',
                't1_aseg', 't1_aparc', 't1_tpms', 't1_2_mni_forward_transform',
                'dwi_sampling_grid', 't1_2_fsnative_forward_transform',
                't1_2_fsnative_reverse_transform', 't1_2_mni_reverse_transform']),
        name='inputnode')
    merge_inputs = pe.Node(niu.Merge(len(input_names)), name='merge_inputs')
    rename_inputs = pe.MapNode(
        niu.Rename(keep_ext=True),
        iterfield=['in_file', 'format_string'],
        name='rename_inputs')
    rename_inputs.inputs.format_string = input_names
    rename_inputs.synchronize = True
    for input_num, input_name in enumerate(input_names):
        workflow.connect(inputnode, input_name, merge_inputs, 'in%d' % (input_num + 1))

    outputnode = pe.Node(
        niu.IdentityInterface(
            fields=output_names + ["intramodal_template",
                                   "intramodal_template_mask",
                                   "intramodal_template_to_t1_affine",
                                   "intramodal_template_to_t1_warp"]),
        name='outputnode')
    split_outputs = pe.Node(niu.Split(splits=[1] * len(input_names), squeeze=True),
                            name='split_outputs')
    for output_num, output_name in enumerate(output_names):
        workflow.connect(split_outputs, 'out%d' % (output_num + 1), outputnode, output_name)

    runtime_opts = {'num_cores': 1, 'parallel_control': 0}
    if omp_nthreads > 1:
        runtime_opts = {'num_cores': omp_nthreads, 'parallel_control': 2}
    ants_mvtc2 = pe.Node(MultivariateTemplateConstruction2(dimension=3, **runtime_opts),
                         name='ants_mvtc2')
    intramodal_template_mask = init_skullstrip_b0_wf(name="intramodal_template_mask")

    workflow.connect([
        (merge_inputs, rename_inputs, [('out', 'in_file')]),
        (rename_inputs, ants_mvtc2, [('out_file', 'input_images')]),
        (intramodal_template_mask, outputnode, [
            ('outputnode.mask_file', 'intramodal_template_mask')]),
        (ants_mvtc2, intramodal_template_mask, [
            ('templates', 'inputnode.in_file')]),
        (ants_mvtc2, split_outputs, [
            ('forward_transforms', 'inlist')]),
        (ants_mvtc2, outputnode, [
            ('templates', 'intramodal_template')])
    ])

    # calculate dwi registration to T1w
    b0_coreg_wf = init_b0_to_anat_registration_wf(omp_nthreads=omp_nthreads,
                                                  mem_gb=mem_gb,
                                                  write_report=True)

    workflow.connect([
        (inputnode, b0_coreg_wf, [
            ('t1_brain', 'inputnode.t1_brain'),
            ('t1_seg', 'inputnode.t1_seg'),
            ('subjects_dir', 'inputnode.subjects_dir'),
            ('subject_id', 'inputnode.subject_id'),
            ('t1_2_fsnative_reverse_transform',
             'inputnode.t1_2_fsnative_reverse_transform')]),
        (ants_mvtc2, b0_coreg_wf, [
            ('templates', 'inputnode.ref_b0_brain')]),
        (b0_coreg_wf, outputnode, [
            ('outputnode.itk_b0_to_t1', 'intramodal_template_to_t1_affine')])
    ])

    # Fill-in datasinks of reportlets seen so far
    for node in workflow.list_node_names():
        if node.split('.')[-1].startswith('ds_report'):
            workflow.get_node(node).inputs.base_directory = reportlets_dir
            workflow.get_node(node).inputs.source_file = t1w_source_file

    return workflow
Ejemplo n.º 19
0
def create_network_masks_workflow(name="network_masks", smm_threshold=0.5):

    network_masks = Workflow(name=name)

    # Input node
    inputspec = Node(utility.IdentityInterface(fields=['actmaps', 'networks']),
                     name='inputspec')

    # Binarise results
    actmaps2binmasks = MapNode(
        fsl.ImageMaths(op_string='-thr {0} -bin'.format(smm_threshold)),
        iterfield=['in_file'],
        name='actmaps2binmasks')

    # Split main masks from exclusive masks
    mainmasks = Node(SplitMaps(), name='mainmasks')

    # Combine exclusive masks
    exclusivemasks = MapNode(fsl.ImageMaths(),
                             iterfield=['in_file', 'op_string'],
                             name='exclusivemasks')

    # Rename main masks
    mainmasks_rename = MapNode(utility.Rename(),
                               iterfield=['in_file', 'format_string'],
                               name='mainmasks_rename')
    mainmasks_rename.inputs.keep_ext = True

    # Rename exclusive masks
    exclusivemasks_rename = MapNode(utility.Rename(),
                                    iterfield=['in_file', 'format_string'],
                                    name='exclusivemasks_rename')
    exclusivemasks_rename.inputs.keep_ext = True

    # Output Node
    outputspec = Node(
        utility.IdentityInterface(fields=['main_masks', 'exclusive_masks']),
        name='outputspec')

    # Helper functions

    def get_names(x):
        return [y['name'] for y in x]

    network_masks.connect(inputspec, 'actmaps', actmaps2binmasks, 'in_file')
    network_masks.connect(actmaps2binmasks, 'out_file', mainmasks, 'in_files')
    network_masks.connect(inputspec, 'networks', mainmasks, 'in_networks')
    network_masks.connect(mainmasks, 'out_mains', mainmasks_rename, 'in_file')
    network_masks.connect(inputspec, ('networks', get_names), mainmasks_rename,
                          'format_string')
    network_masks.connect(mainmasks, 'out_firsts', exclusivemasks, 'in_file')
    network_masks.connect(mainmasks, 'out_opstrings', exclusivemasks,
                          'op_string')
    network_masks.connect(exclusivemasks, 'out_file', exclusivemasks_rename,
                          'in_file')
    network_masks.connect(inputspec, ('networks', get_names),
                          exclusivemasks_rename, 'format_string')
    network_masks.connect(mainmasks_rename, 'out_file', outputspec,
                          'main_masks')
    network_masks.connect(exclusivemasks_rename, 'out_file', outputspec,
                          'exclusive_masks')

    return network_masks
Ejemplo n.º 20
0
                      'inlist')


def chooseindex(roi):
    return {
        'isotropic_voxel': list(range(0, 4)),
        'anisotropic_voxel': list(range(4, 8)),
        'isotropic_surface': list(range(8, 12))
    }[roi]


preprocessing.connect(iter_smoothing_method, ("smoothing_method", chooseindex),
                      select_smoothed_files, 'index')

rename = pe.MapNode(
    util.Rename(format_string="%(orig)s"),
    name="rename",
    iterfield=['in_file'])
rename.inputs.parse_string = "(?P<orig>.*)"

preprocessing.connect(select_smoothed_files, 'out', rename, 'in_file')

specify_model = pe.Node(interface=model.SpecifyModel(), name="specify_model")
specify_model.inputs.input_units = 'secs'
specify_model.inputs.time_repetition = 3.
specify_model.inputs.high_pass_filter_cutoff = 120
specify_model.inputs.subject_info = [
    Bunch(
        conditions=['Task-Odd', 'Task-Even'],
        onsets=[list(range(15, 240, 60)),
                list(range(45, 240, 60))],
Ejemplo n.º 21
0
def create_reg_and_label_wf(name="reg_wf", manual_seg_rois=False):
    inputfields = [
        "subject_id", "aparc_aseg", "fa", "wm_mask", "termination_mask"
    ]

    if manual_seg_rois:
        inputfields.append("manual_seg_rois")

    inputnode = pe.Node(interface=util.IdentityInterface(fields=inputfields),
                        name="inputnode")

    outputnode = pe.Node(interface=util.IdentityInterface(fields=[
        "dwi_to_t1_matrix", "t1_to_dwi_matrix", "rois_to_dwi", "rois",
        "wmmask_to_dwi", "termmask_to_dwi", "highres_t1_to_dwi_matrix"
    ]),
                         name="outputnode")

    dmn_labels_if = util.Function(input_names=["in_file", "out_filename"],
                                  output_names=["out_file"],
                                  function=dmn_labels_combined)
    dmn_labelling = pe.Node(interface=dmn_labels_if, name='dmn_labelling')

    align_wmmask_to_dwi = coreg_without_resample("align_wmmask_to_fa")
    align_wmmask_to_dwi.inputs.inputnode.interp = "nearestneighbour"

    rois_to_dwi = pe.Node(interface=fsl.ApplyXfm(), name='rois_to_dwi')
    rois_to_dwi.inputs.interp = "nearestneighbour"

    threshold_fa = pe.Node(interface=fsl.ImageMaths(), name='threshold_fa')
    threshold_fa.inputs.op_string = "-thr 0.2 -bin"

    multiply_rois_by_termmask = pe.Node(interface=fsl.MultiImageMaths(),
                                        name='multiply_rois_by_termmask')
    multiply_rois_by_termmask.inputs.op_string = "-mul %s"

    termmask_to_dwi = rois_to_dwi.clone("termmask_to_dwi")

    invertxfm = pe.Node(interface=fsl.ConvertXFM(), name='invertxfm')
    invertxfm.inputs.invert_xfm = True
    '''
    Define renaming nodes
    '''
    rename_t1_to_dwi_mat = pe.Node(
        interface=util.Rename(format_string="%(subject_id)s_t1_to_dwi_matrix"),
        name='rename_t1_to_dwi_mat')
    rename_t1_to_dwi_mat.inputs.keep_ext = True

    rename_dwi_to_t1_mat = pe.Node(
        interface=util.Rename(format_string="%(subject_id)s_dwi_to_t1_matrix"),
        name='rename_dwi_to_t1_mat')
    rename_dwi_to_t1_mat.inputs.keep_ext = True

    rename_rois_dwi = pe.Node(
        interface=util.Rename(format_string="%(subject_id)s_rois_dwi"),
        name='rename_rois_dwi')
    rename_rois_dwi.inputs.keep_ext = True

    rename_rois = pe.Node(
        interface=util.Rename(format_string="%(subject_id)s_rois"),
        name='rename_rois')
    rename_rois.inputs.keep_ext = True

    rename_termmask_dwi = pe.Node(
        interface=util.Rename(format_string="%(subject_id)s_term_mask_dwi"),
        name='rename_termmask_dwi')
    rename_termmask_dwi.inputs.keep_ext = True

    rename_wmmask_dwi = pe.Node(
        interface=util.Rename(format_string="%(subject_id)s_wm_mask_dwi"),
        name='rename_wmmask_dwi')
    rename_wmmask_dwi.inputs.keep_ext = True

    rename_highres_matrix_file = pe.Node(interface=util.Rename(
        format_string="%(subject_id)s_t1_to_dwi_NoResample"),
                                         name='rename_highres_matrix_file')
    rename_highres_matrix_file.inputs.keep_ext = True

    workflow = pe.Workflow(name=name)

    workflow.connect([(inputnode, align_wmmask_to_dwi,
                       [("wm_mask", "inputnode.moving_image")])])
    workflow.connect([(inputnode, threshold_fa, [("fa", "in_file")])])
    workflow.connect([(threshold_fa, align_wmmask_to_dwi,
                       [("out_file", "inputnode.fixed_image")])])

    if manual_seg_rois:
        workflow.connect([(inputnode, rois_to_dwi, [("manual_seg_rois",
                                                     "in_file")])])
        workflow.connect([(inputnode, rois_to_dwi, [("manual_seg_rois",
                                                     "reference")])])
        workflow.connect([(inputnode, outputnode, [("manual_seg_rois", "rois")
                                                   ])])

    else:
        workflow.connect([(inputnode, dmn_labelling, [
            (('subject_id', add_subj_name_to_rois), 'out_filename')
        ])])
        workflow.connect([(inputnode, dmn_labelling, [("aparc_aseg", "in_file")
                                                      ])])

        workflow.connect([(dmn_labelling, multiply_rois_by_termmask,
                           [("out_file", "in_file")])])
        workflow.connect([(inputnode, multiply_rois_by_termmask,
                           [("termination_mask", "operand_files")])])
        workflow.connect([(multiply_rois_by_termmask, rename_rois,
                           [("out_file", "in_file")])])
        workflow.connect([(inputnode, rename_rois, [("subject_id",
                                                     "subject_id")])])
        workflow.connect([(rename_rois, rois_to_dwi, [("out_file", "in_file")])
                          ])
        workflow.connect([(rename_rois, rois_to_dwi, [("out_file", "reference")
                                                      ])])
        workflow.connect([(rename_rois, outputnode, [("out_file", "rois")])])

    workflow.connect([(align_wmmask_to_dwi, rois_to_dwi, [
        ("outputnode.highres_matrix_file", "in_matrix_file")
    ])])

    workflow.connect([(inputnode, termmask_to_dwi, [("termination_mask",
                                                     "in_file")])])
    workflow.connect([(inputnode, termmask_to_dwi, [("termination_mask",
                                                     "reference")])])
    workflow.connect([(align_wmmask_to_dwi, termmask_to_dwi, [
        ("outputnode.highres_matrix_file", "in_matrix_file")
    ])])

    workflow.connect([(align_wmmask_to_dwi, invertxfm,
                       [("outputnode.lowres_matrix_file", "in_file")])])

    workflow.connect([(inputnode, rename_t1_to_dwi_mat, [("subject_id",
                                                          "subject_id")])])
    workflow.connect([(align_wmmask_to_dwi, rename_t1_to_dwi_mat,
                       [("outputnode.lowres_matrix_file", "in_file")])])
    workflow.connect([(rename_t1_to_dwi_mat, outputnode,
                       [("out_file", "t1_to_dwi_matrix")])])

    workflow.connect([(inputnode, rename_dwi_to_t1_mat, [("subject_id",
                                                          "subject_id")])])
    workflow.connect([(invertxfm, rename_dwi_to_t1_mat, [("out_file",
                                                          "in_file")])])
    workflow.connect([(rename_dwi_to_t1_mat, outputnode,
                       [("out_file", "dwi_to_t1_matrix")])])

    workflow.connect([(inputnode, rename_rois_dwi, [("subject_id",
                                                     "subject_id")])])
    workflow.connect([(rois_to_dwi, rename_rois_dwi, [("out_file", "in_file")])
                      ])
    workflow.connect([(rename_rois_dwi, outputnode, [("out_file",
                                                      "rois_to_dwi")])])

    workflow.connect([(inputnode, rename_termmask_dwi, [("subject_id",
                                                         "subject_id")])])
    workflow.connect([(termmask_to_dwi, rename_termmask_dwi, [("out_file",
                                                               "in_file")])])
    workflow.connect([(rename_termmask_dwi, outputnode,
                       [("out_file", "termmask_to_dwi")])])

    workflow.connect([(inputnode, rename_wmmask_dwi, [("subject_id",
                                                       "subject_id")])])
    workflow.connect([(align_wmmask_to_dwi, rename_wmmask_dwi,
                       [("outputnode.out_file", "in_file")])])
    workflow.connect([(rename_wmmask_dwi, outputnode, [("out_file",
                                                        "wmmask_to_dwi")])])

    workflow.connect([(inputnode, rename_highres_matrix_file,
                       [("subject_id", "subject_id")])])
    workflow.connect([(align_wmmask_to_dwi, rename_highres_matrix_file,
                       [("outputnode.highres_matrix_file", "in_file")])])
    workflow.connect([(rename_highres_matrix_file, outputnode,
                       [("out_file", "highres_t1_to_dwi_matrix")])])
    return workflow
Ejemplo n.º 22
0
def init_asl_surf_wf(mem_gb,
                     surface_spaces,
                     medial_surface_nan,
                     name='asl_surf_wf'):
    """
    Sample functional images to FreeSurfer surfaces.

    For each vertex, the cortical ribbon is sampled at six points (spaced 20% of thickness apart)
    and averaged.
    Outputs are in GIFTI format.

    Workflow Graph
        .. workflow::
            :graph2use: colored
            :simple_form: yes

            from aslprep.workflows.asl import init_asl_surf_wf
            wf = init_asl_surf_wf(mem_gb=0.1,
                                   surface_spaces=['fsnative', 'fsaverage5'],
                                   medial_surface_nan=False)

    Parameters
    ----------
    surface_spaces : :obj:`list`
        List of FreeSurfer surface-spaces (either ``fsaverage{3,4,5,6,}`` or ``fsnative``)
        the functional images are to be resampled to.
        For ``fsnative``, images will be resampled to the individual subject's
        native surface.
    medial_surface_nan : :obj:`bool`
        Replace medial wall values with NaNs on functional GIFTI files

    Inputs
    ------
    source_file
        Motion-corrected ASL series in T1 space
    t1w_preproc
        Bias-corrected structural template image
    subjects_dir
        FreeSurfer SUBJECTS_DIR
    subject_id
        FreeSurfer subject ID
    t1w2fsnative_xfm
        LTA-style affine matrix translating from T1w to FreeSurfer-conformed subject space

    Outputs
    -------
    surfaces
        ASL series, resampled to FreeSurfer surfaces

    """
    from nipype.interfaces.io import FreeSurferSource
    from ...niworkflows.engine.workflows import LiterateWorkflow as Workflow
    from ...niworkflows.interfaces.surf import GiftiSetAnatomicalStructure

    workflow = Workflow(name=name)
    workflow.__desc__ = """\
The ASL time-series were resampled onto the following surfaces
(FreeSurfer reconstruction nomenclature):
{out_spaces}.
""".format(out_spaces=', '.join(['*%s*' % s for s in surface_spaces]))

    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'source_file', 'subject_id', 'subjects_dir', 't1w2fsnative_xfm'
    ]),
                        name='inputnode')
    itersource = pe.Node(niu.IdentityInterface(fields=['target']),
                         name='itersource')
    itersource.iterables = [('target', surface_spaces)]

    get_fsnative = pe.Node(FreeSurferSource(),
                           name='get_fsnative',
                           run_without_submitting=True)

    def select_target(subject_id, space):
        """Get the target subject ID, given a source subject ID and a target space."""
        return subject_id if space == 'fsnative' else space

    targets = pe.Node(niu.Function(function=select_target),
                      name='targets',
                      run_without_submitting=True,
                      mem_gb=DEFAULT_MEMORY_MIN_GB)

    # Rename the source file to the output space to simplify naming later
    rename_src = pe.Node(niu.Rename(format_string='%(subject)s',
                                    keep_ext=True),
                         name='rename_src',
                         run_without_submitting=True,
                         mem_gb=DEFAULT_MEMORY_MIN_GB)
    itk2lta = pe.Node(niu.Function(function=_itk2lta),
                      name="itk2lta",
                      run_without_submitting=True)
    sampler = pe.MapNode(fs.SampleToSurface(
        cortex_mask=True,
        interp_method='trilinear',
        out_type='gii',
        override_reg_subj=True,
        sampling_method='average',
        sampling_range=(0, 1, 0.2),
        sampling_units='frac',
    ),
                         iterfield=['hemi'],
                         name='sampler',
                         mem_gb=mem_gb * 3)
    sampler.inputs.hemi = ['lh', 'rh']
    update_metadata = pe.MapNode(GiftiSetAnatomicalStructure(),
                                 iterfield=['in_file'],
                                 name='update_metadata',
                                 mem_gb=DEFAULT_MEMORY_MIN_GB)

    outputnode = pe.JoinNode(
        niu.IdentityInterface(fields=['surfaces', 'target']),
        joinsource='itersource',
        name='outputnode')

    workflow.connect([
        (inputnode, get_fsnative, [('subject_id', 'subject_id'),
                                   ('subjects_dir', 'subjects_dir')]),
        (inputnode, targets, [('subject_id', 'subject_id')]),
        (inputnode, rename_src, [('source_file', 'in_file')]),
        (inputnode, itk2lta, [('source_file', 'src_file'),
                              ('t1w2fsnative_xfm', 'in_file')]),
        (get_fsnative, itk2lta, [('T1', 'dst_file')]),
        (inputnode, sampler, [('subjects_dir', 'subjects_dir'),
                              ('subject_id', 'subject_id')]),
        (itersource, targets, [('target', 'space')]),
        (itersource, rename_src, [('target', 'subject')]),
        (itk2lta, sampler, [('out', 'reg_file')]),
        (targets, sampler, [('out', 'target_subject')]),
        (rename_src, sampler, [('out_file', 'source_file')]),
        (update_metadata, outputnode, [('out_file', 'surfaces')]),
        (itersource, outputnode, [('target', 'target')]),
    ])

    if not medial_surface_nan:
        workflow.connect(sampler, 'out_file', update_metadata, 'in_file')
        return workflow

    from ...niworkflows.interfaces.freesurfer import MedialNaNs
    # Refine if medial vertices should be NaNs
    medial_nans = pe.MapNode(MedialNaNs(),
                             iterfield=['in_file'],
                             name='medial_nans',
                             mem_gb=DEFAULT_MEMORY_MIN_GB)

    workflow.connect([
        (inputnode, medial_nans, [('subjects_dir', 'subjects_dir')]),
        (sampler, medial_nans, [('out_file', 'in_file')]),
        (medial_nans, update_metadata, [('out_file', 'in_file')]),
    ])
    return workflow
Ejemplo n.º 23
0
    def create_workflow(self, flow, inputnode, outputnode):
        if self.config.seg_tool == "Freesurfer":
            if self.config.use_existing_freesurfer_data is False:
                # Converting to .mgz format
                fs_mriconvert = pe.Node(interface=fs.MRIConvert(
                    out_type="mgz", out_file="T1.mgz"),
                                        name="mgzConvert")

                if self.config.make_isotropic:
                    fs_mriconvert.inputs.vox_size = (
                        self.config.isotropic_vox_size,
                        self.config.isotropic_vox_size,
                        self.config.isotropic_vox_size)
                    fs_mriconvert.inputs.resample_type = self.config.isotropic_interpolation

                rename = pe.Node(util.Rename(), name='copyOrig')
                orig_dir = os.path.join(self.config.freesurfer_subject_id,
                                        "mri", "orig")
                if not os.path.exists(orig_dir):
                    os.makedirs(orig_dir)
                    print("INFO : Folder not existing; %s created!" % orig_dir)
                rename.inputs.format_string = os.path.join(orig_dir, "001.mgz")

                if self.config.brain_mask_extraction_tool == "Freesurfer":
                    # ReconAll => named outputnode as we don't want to select a specific output....
                    fs_reconall = pe.Node(interface=fs.ReconAll(
                        flags='-no-isrunning -parallel -openmp {}'.format(
                            self.config.number_of_threads)),
                                          name='reconall')
                    fs_reconall.inputs.directive = 'all'
                    fs_reconall.inputs.args = self.config.freesurfer_args

                    # fs_reconall.inputs.subjects_dir and fs_reconall.inputs.subject_id set in cmp/pipelines/diffusion/diffusion.py
                    fs_reconall.inputs.subjects_dir = self.config.freesurfer_subjects_dir

                    # fs_reconall.inputs.hippocampal_subfields_T1 = self.config.segment_hippocampal_subfields
                    # fs_reconall.inputs.brainstem = self.config.segment_brainstem

                    def isavailable(file):
                        # print "T1 is available"
                        return file

                    flow.connect([
                        (inputnode, fs_mriconvert, [(('T1', isavailable),
                                                     'in_file')]),
                        (fs_mriconvert, rename, [('out_file', 'in_file')]),
                        (rename, fs_reconall,
                         [(("out_file", extract_base_directory), "subject_id")
                          ]),
                        (fs_reconall, outputnode,
                         [('subjects_dir', 'subjects_dir'),
                          ('subject_id', 'subject_id')]),
                    ])
                else:
                    # ReconAll => named outputnode as we don't want to select a specific output....
                    fs_autorecon1 = pe.Node(interface=fs.ReconAll(
                        flags='-no-isrunning -parallel -openmp {}'.format(
                            self.config.number_of_threads)),
                                            name='autorecon1')
                    fs_autorecon1.inputs.directive = 'autorecon1'

                    # if self.config.brain_mask_extraction_tool == "Custom" or self.config.brain_mask_extraction_tool == "ANTs":
                    if self.config.brain_mask_extraction_tool == "ANTs":
                        fs_autorecon1.inputs.flags = '-no-isrunning -noskullstrip -parallel -openmp {}'.format(
                            self.config.number_of_threads)
                    fs_autorecon1.inputs.args = self.config.freesurfer_args

                    # fs_reconall.inputs.subjects_dir and fs_reconall.inputs.subject_id set in cmp/pipelines/diffusion/diffusion.py
                    fs_autorecon1.inputs.subjects_dir = self.config.freesurfer_subjects_dir

                    def isavailable(file):
                        # print "Is available"
                        return file

                    flow.connect([(inputnode, fs_mriconvert, [
                        (('T1', isavailable), 'in_file')
                    ]), (fs_mriconvert, rename, [('out_file', 'in_file')]),
                                  (rename, fs_autorecon1,
                                   [(("out_file", extract_base_directory),
                                     "subject_id")])])

                    fs_source = pe.Node(interface=FreeSurferSource(),
                                        name='fsSource')

                    fs_mriconvert_nu = pe.Node(interface=fs.MRIConvert(
                        out_type="niigz", out_file="nu.nii.gz"),
                                               name='niigzConvert')

                    flow.connect([(fs_autorecon1, fs_source, [
                        ('subjects_dir', 'subjects_dir'),
                        ('subject_id', 'subject_id')
                    ]), (fs_source, fs_mriconvert_nu, [('nu', 'in_file')])])

                    fs_mriconvert_brainmask = pe.Node(
                        interface=fs.MRIConvert(out_type="mgz",
                                                out_file="brainmask.mgz"),
                        name='fsMriconvertBETbrainmask')

                    if self.config.brain_mask_extraction_tool == "BET":
                        fsl_bet = pe.Node(interface=fsl.BET(
                            out_file='brain.nii.gz',
                            mask=True,
                            skull=True,
                            robust=True),
                                          name='fsl_bet')

                        flow.connect([(fs_mriconvert_nu, fsl_bet,
                                       [('out_file', 'in_file')]),
                                      (fsl_bet, fs_mriconvert_brainmask,
                                       [('out_file', 'in_file')])])

                    elif self.config.brain_mask_extraction_tool == "ANTs":
                        # templatefile =
                        #    pkg_resources.resource_filename('cmtklib', os.path.join('data', 'segmentation',
                        #                                    'ants_template_IXI', 'T_template2_BrainCerebellum.nii.gz'))
                        # probmaskfile = pkg_resources.resource_filename('cmtklib',
                        #     os.path.join('data', 'segmentation', 'ants_template_IXI',
                        #     'T_template_BrainCerebellumProbabilityMask.nii.gz'))

                        ants_bet = pe.Node(interface=ants.BrainExtraction(
                            out_prefix='ants_bet_'),
                                           name='antsBET')
                        ants_bet.inputs.brain_template = self.config.ants_templatefile
                        ants_bet.inputs.brain_probability_mask = self.config.ants_probmaskfile
                        ants_bet.inputs.extraction_registration_mask = self.config.ants_regmaskfile
                        ants_bet.inputs.num_threads = self.config.number_of_threads

                        flow.connect([(fs_mriconvert_nu, ants_bet,
                                       [('out_file', 'anatomical_image')]),
                                      (ants_bet, fs_mriconvert_brainmask,
                                       [('BrainExtractionBrain', 'in_file')])])
                    # elif self.config.brain_mask_extraction_tool == "Custom":
                    #     fs_mriconvert_brainmask.inputs.in_file = os.path.abspath(
                    #         self.config.brain_mask_path)

                    # copy_brainmask_to_fs = pe.Node(interface=copyFileToFreesurfer(),name='copy_brainmask_to_fs')
                    # copy_brainmask_to_fs.inputs.out_file =
                    #    os.path.join(self.config.freesurfer_subject_id,"mri","brainmask.mgz")

                    # copy_brainmaskauto_to_fs = pe.Node(interface=copyFileToFreesurfer(),name='copy_brainmaskauto_to_fs')
                    # copy_brainmaskauto_to_fs.inputs.out_file =
                    #    os.path.join(self.config.freesurfer_subject_id,"mri","brainmask.auto.mgz")

                    # flow.connect([
                    #             (fs_mriconvert_brainmask,copy_brainmask_to_fs,[('out_file','in_file')]),
                    #             (fs_mriconvert_brainmask,copy_brainmaskauto_to_fs,[('out_file','in_file')])
                    #             ])

                    copy_brainmask_to_fs = pe.Node(
                        interface=copyBrainMaskToFreesurfer(),
                        name='copyBrainmaskTofs')

                    flow.connect([(rename, copy_brainmask_to_fs, [
                        (("out_file", extract_base_directory), "subject_dir")
                    ]),
                                  (fs_mriconvert_brainmask,
                                   copy_brainmask_to_fs, [('out_file',
                                                           'in_file')])])

                    # flow.connect([
                    #             (fs_source,fs_mriconvert_nu,[('nu','in_file')])
                    #             ])

                    def get_freesurfer_subject_id(file):
                        # print("Extract reconall base dir : %s" % file[:-18])
                        return file[:-18]

                    fs_reconall23 = pe.Node(interface=fs.ReconAll(
                        flags='-no-isrunning -parallel -openmp {}'.format(
                            self.config.number_of_threads)),
                                            name='reconall23')
                    fs_reconall23.inputs.directive = 'autorecon2'
                    fs_reconall23.inputs.args = self.config.freesurfer_args
                    fs_reconall23.inputs.flags = '-autorecon3'

                    # fs_reconall.inputs.subjects_dir and fs_reconall.inputs.subject_id set in cmp/pipelines/diffusion/diffusion.py
                    fs_reconall23.inputs.subjects_dir = self.config.freesurfer_subjects_dir

                    # fs_reconall.inputs.hippocampal_subfields_T1 = self.config.segment_hippocampal_subfields
                    # fs_reconall.inputs.brainstem = self.config.segment_brainstem

                    flow.connect([(copy_brainmask_to_fs, fs_reconall23, [
                        (("out_brainmask_file", get_freesurfer_subject_id),
                         "subject_id")
                    ]),
                                  (fs_reconall23, outputnode,
                                   [('subjects_dir', 'subjects_dir'),
                                    ('subject_id', 'subject_id')])])

            else:
                outputnode.inputs.subjects_dir = self.config.freesurfer_subjects_dir
                outputnode.inputs.subject_id = self.config.freesurfer_subject_id
Ejemplo n.º 24
0
                       name='level1design')
level1design.inputs.timing_units = modelspec.inputs.output_units
level1design.inputs.interscan_interval = modelspec.inputs.time_repetition
level1design.inputs.bases = {'hrf': {'derivs': [0, 0]}}
level1design.inputs.model_serial_correlations = 'AR(1)'
workflow.connect(modelspec, 'session_info', level1design, 'session_info')
workflow.connect(mean_mask, 'out_file', level1design, 'mask_image')

# plot the design matrix
plot_design_matrix = pe.Node(niu.Function(input_names=['mat_file'],
                                          output_names=['out_file'],
                                          function=_plot_design_matrix),
                             name='plot_design_matrix')
workflow.connect(level1design, 'spm_mat_file', plot_design_matrix, 'mat_file')
plot_design_matrix_rename = pe.Node(
    niu.Rename(format_string='%(subject_id)s_task-olfactoryperception_bold_dmatrix.png'),
    name='plot_design_matrix_rename')
workflow.connect(plot_design_matrix, 'out_file', plot_design_matrix_rename, 'in_file')
workflow.connect(infosource, 'subject_id', plot_design_matrix_rename, 'subject_id')
workflow.connect(plot_design_matrix_rename, 'out_file', datasink, 'report.@dmatrix')

# plot the correlation between regressors
plot_regressors_correlation = pe.Node(niu.Function(input_names=['mat_file'],
                                                   output_names=['out_file'],
                                                   function=_plot_regressors_correlation),
                                      name='plot_regressors_correlation')
workflow.connect(level1design, 'spm_mat_file', plot_regressors_correlation, 'mat_file')
plot_regressors_correlation_rename = pe.Node(
    niu.Rename(format_string='%(subject_id)s_task_olfactory_perception_bold_ccmatrix.png'),
    name='plot_regressors_correlation_rename')
workflow.connect(plot_regressors_correlation, 'out_file', plot_regressors_correlation_rename, 'in_file')
Ejemplo n.º 25
0
def create_fsl_flame_wf(ftest=False, wf_name='groupAnalysis'):
    """
    FSL `FEAT <http://fsl.fmrib.ox.ac.uk/fsl/fslwiki/FEAT>`_
    BASED Group Analysis

    Parameters
    ----------
    ftest : boolean, optional(default=False)
        Ftest help investigate several contrasts at the same time
        for example to see whether any of them (or any combination of them) is 
        significantly non-zero. Also, the F-test allows you to compare the 
        contribution of each contrast to the model and decide on significant 
        and non-significant ones
 
    wf_name : string 
        Workflow name
    
    Returns 
    -------
    grp_analysis : workflow object
        Group Analysis workflow object
    
    Notes
    -----
    `Source <https://github.com/openconnectome/C-PAC/blob/master/CPAC/group_analysis/group_analysis_preproc.py>`_
 
    Workflow Inputs::
        
        inputspec.mat_file : string (existing file)
           Mat file containing  matrix for design 
        
        inputspec.con_file : string (existing file)
           Contrast file containing contrast vectors 
        
        inputspec.grp_file : string (existing file)
           file containing matrix specifying the groups the covariance is split into
        
        inputspec.zmap_files : string (existing nifti file)
           derivative or the zmap file for which the group analysis is to be run
        
        inputspec.z_threshold : float
            Z Statistic threshold value for cluster thresholding. It is used to 
            determine what level of activation would be statistically significant. 
            Increasing this will result in higher estimates of required effect.
        
        inputspec.p_threshold : float
            Probability threshold for cluster thresholding.
            
        inputspec.fts_file : string (existing file)
           file containing matrix specifying f-contrasts
           
        inputspec.paramerters : string (tuple)
            tuple containing which MNI and FSLDIR path information
                      
    Workflow Outputs::
    
        outputspec.merged : string (nifti file)
            4D volume file after merging all the derivative 
            files from each specified subject.
            
        outputspec.zstats : list (nifti files)
            Z statistic image for each t contrast
            
        outputspec.zfstats : list (nifti files)
            Z statistic image for each f contrast
        
        outputspec.fstats : list (nifti files)
            F statistic for each contrast  
        
        outputspec.cluster_threshold : list (nifti files)
           the thresholded Z statistic image for each t contrast
        
        outputspec.cluster_index : list (nifti files)
            image of clusters for each t contrast; the values 
            in the clusters are the index numbers as used 
            in the cluster list.
        
        outputspec.cluster_localmax_txt : list (text files)
            local maxima text file for each t contrast, 
            defines the coordinates of maximum value in the cluster
        
        outputspec.overlay_threshold : list (nifti files)
            3D color rendered stats overlay image for t contrast
            After reloading this image, use the Statistics Color 
            Rendering GUI to reload the color look-up-table
        
        outputspec.overlay_rendered_image : list (nifti files)
           2D color rendered stats overlay picture for each t contrast
            
        outputspec.cluster_threshold_zf : list (nifti files)
           the thresholded Z statistic image for each f contrast
        
        outputspec.cluster_index_zf : list (nifti files)
            image of clusters for each f contrast; the values 
            in the clusters are the index numbers as used 
            in the cluster list.
            
        outputspec.cluster_localmax_txt_zf : list (text files)
            local maxima text file for each f contrast, 
            defines the coordinates of maximum value in the cluster
        
        outputspec.overlay_threshold_zf : list (nifti files)
            3D color rendered stats overlay image for f contrast
            After reloading this image, use the Statistics Color 
            Rendering GUI to reload the color look-up-table
        
        outputspec.overlay_rendered_image_zf : list (nifti files)
           2D color rendered stats overlay picture for each f contrast
    
    Order of commands:

    - Merge all the Z-map 3D images into 4D image file.  For details see `fslmerge <http://fsl.fmrib.ox.ac.uk/fsl/fslwiki/Fslutils>`_::
    
        fslmerge -t sub01/sca/seed1/sca_Z_FWHM_merged.nii 
                    sub02/sca/seed1/sca_Z_FWHM.nii.gz ....  
                    merge.nii.gz
                    
        arguments 
            -t : concatenate images in time
            
    - Create mask specific for analysis. For details see `fslmaths <http://fsl.fmrib.ox.ac.uk/fsl/fslwiki/Fslutils>`_::
    
        fslmaths merged.nii.gz 
                -abs -Tmin -bin mean_mask.nii.gz
        
        arguments 
             -Tmin  : min across time
             -abs   : absolute value
             -bin   : use (current image>0) to binarise
    
    - FSL FLAMEO to perform higher level analysis.  For details see `flameo <http://fsl.fmrib.ox.ac.uk/fsl/fslwiki/FEAT>`_::
        
        flameo --copefile = merged.nii.gz --covsplitfile = anova_with_meanFD.grp --designfile = anova_with_meanFD.mat 
               --fcontrastsfile = anova_with_meanFD.fts --ld=stats --maskfile = mean_mask.nii.gz --runmode=ols 
               --tcontrastsfile = anova_with_meanFD.con
           
        arguments
            --copefile        : cope regressor data file
            --designfile      : design matrix file
            --maskfile        : mask file
            --tcontrastsfile  : file containing an ASCII matrix specifying the t contrasts
            --fcontrastsfile  : file containing an ASCII matrix specifying the f contrasts
            --runmode         : Interference to perform (mixed effects - OLS)
            
    - Run FSL Easy thresh 
        
      Easy thresh is a simple script for carrying out cluster-based thresholding and colour activation overlaying::
        
        easythresh <raw_zstat> <brain_mask> <z_thresh> <prob_thresh> <background_image> <output_root> [--mm]
      
      A seperate workflow called easythresh is called to run easythresh steps.
      
    .. exec::
        from CPAC.group_analysis import create_fsl_flame_wf
        wf = create_fsl_flame_wf()
        wf.write_graph(
            graph2use='orig',
            dotfilename='./images/generated/group_analysis.dot'
        )

    High Level Workflow Graph:
    
    .. image:: ../../images/generated/group_analysis.png
       :width: 800
    
    
    Detailed Workflow Graph:
    
    .. image:: ../../images/generated/group_analysis_detailed.png
       :width: 800

    Examples
    --------
    
    >>> from group_analysis_preproc import create_group_analysis
    >>> preproc = create_group_analysis()
    >>> preproc.inputs.inputspec.mat_file = '../group_models/anova_with_meanFD/anova_with_meanFD.mat'
    >>> preproc.inputs.inputspec.con_file = '../group_models/anova_with_meanFD/anova_with_meanFD.con'
    >>> preproc.inputs.inputspec.grp_file = '../group_models/anova_with_meanFD/anova_with_meanFD.grp'
    >>> preproc.inputs.inputspec.zmap_files = ['subjects/sub01/seeds_rest_Dickstein_DLPFC/sca_Z_FWHM.nii.gz', 
                                               'subjects/sub02/seeds_rest_Dickstein_DLPFC/sca_Z_FWHM.nii.gz']
    >>> preproc.inputs.inputspec.z_threshold = 2.3
    >>> preproc.inputs.inputspec.p_threshold = 0.05
    >>> preproc.inputs.inputspec.parameters = ('/usr/local/fsl/', 'MNI152')
    >>> preproc.run()  -- SKIP doctest
            
    """
    grp_analysis = pe.Workflow(name=wf_name)

    inputnode = pe.Node(util.IdentityInterface(fields=[
        'merged_file', 'merge_mask', 'mat_file', 'con_file', 'grp_file',
        'fts_file', 'z_threshold', 'p_threshold', 'parameters'
    ]),
                        name='inputspec')

    outputnode = pe.Node(util.IdentityInterface(fields=[
        'merged', 'zstats', 'zfstats', 'fstats', 'cluster_threshold',
        'cluster_index', 'cluster_localmax_txt', 'overlay_threshold',
        'rendered_image', 'cluster_localmax_txt_zf', 'cluster_threshold_zf',
        'cluster_index_zf', 'overlay_threshold_zf', 'rendered_image_zf'
    ]),
                         name='outputspec')
    '''
    merge_to_4d = pe.Node(interface=fsl.Merge(),
                          name='merge_to_4d')
    merge_to_4d.inputs.dimension = 't'

    ### create analysis specific mask
    #-Tmin: min across time
    # -abs: absolute value
    #-bin: use (current image>0) to binarise
    merge_mask = pe.Node(interface=fsl.ImageMaths(),
                         name='merge_mask')
    merge_mask.inputs.op_string = '-abs -Tmin -bin'
    '''

    fsl_flameo = pe.Node(interface=fsl.FLAMEO(), name='fsl_flameo')
    fsl_flameo.inputs.run_mode = 'ols'

    # rename the FLAME zstat outputs after the contrast string labels for
    # easier interpretation
    label_zstat_imports = ["import os"]
    label_zstat = pe.Node(util.Function(input_names=['zstat_list', 'con_file'],
                                        output_names=['new_zstat_list'],
                                        function=label_zstat_files,
                                        imports=label_zstat_imports),
                          name='label_zstat')

    rename_zstats = pe.MapNode(interface=util.Rename(),
                               name='rename_zstats',
                               iterfield=['in_file', 'format_string'])
    rename_zstats.inputs.keep_ext = True

    # create analysis specific mask
    # fslmaths merged.nii.gz -abs -bin -Tmean -mul volume out.nii.gz
    # -Tmean: mean across time
    # create group_reg file
    # this file can provide an idea of how well the subjects
    # in our analysis overlay with each other and the MNI brain.
    # e.g., maybe there is one subject with limited coverage.
    # not attached to sink currently
    merge_mean_mask = pe.Node(interface=fsl.ImageMaths(),
                              name='merge_mean_mask')

    # function node to get the operation string for fslmaths command
    get_opstring = pe.Node(util.Function(input_names=['in_file'],
                                         output_names=['out_file'],
                                         function=get_operation),
                           name='get_opstring')

    # connections
    '''
    grp_analysis.connect(inputnode, 'zmap_files',
                         merge_to_4d, 'in_files')
    grp_analysis.connect(merge_to_4d, 'merged_file',
                         merge_mask, 'in_file')
    '''
    grp_analysis.connect(inputnode, 'merged_file', fsl_flameo, 'cope_file')
    grp_analysis.connect(inputnode, 'merge_mask', fsl_flameo, 'mask_file')
    grp_analysis.connect(inputnode, 'mat_file', fsl_flameo, 'design_file')
    grp_analysis.connect(inputnode, 'con_file', fsl_flameo, 't_con_file')
    grp_analysis.connect(inputnode, 'grp_file', fsl_flameo, 'cov_split_file')

    grp_analysis.connect(fsl_flameo, 'zstats', label_zstat, 'zstat_list')
    grp_analysis.connect(inputnode, 'con_file', label_zstat, 'con_file')

    grp_analysis.connect(fsl_flameo, 'zstats', rename_zstats, 'in_file')

    grp_analysis.connect(label_zstat, 'new_zstat_list', rename_zstats,
                         'format_string')

    if ftest:
        grp_analysis.connect(inputnode, 'fts_file', fsl_flameo, 'f_con_file')

        easy_thresh_zf = easy_thresh('easy_thresh_zf')

        grp_analysis.connect(fsl_flameo, 'zfstats', easy_thresh_zf,
                             'inputspec.z_stats')
        grp_analysis.connect(inputnode, 'merge_mask', easy_thresh_zf,
                             'inputspec.merge_mask')
        grp_analysis.connect(inputnode, 'z_threshold', easy_thresh_zf,
                             'inputspec.z_threshold')
        grp_analysis.connect(inputnode, 'p_threshold', easy_thresh_zf,
                             'inputspec.p_threshold')
        grp_analysis.connect(inputnode, 'parameters', easy_thresh_zf,
                             'inputspec.parameters')
        grp_analysis.connect(easy_thresh_zf, 'outputspec.cluster_threshold',
                             outputnode, 'cluster_threshold_zf')
        grp_analysis.connect(easy_thresh_zf, 'outputspec.cluster_index',
                             outputnode, 'cluster_index_zf')
        grp_analysis.connect(easy_thresh_zf, 'outputspec.cluster_localmax_txt',
                             outputnode, 'cluster_localmax_txt_zf')
        grp_analysis.connect(easy_thresh_zf, 'outputspec.overlay_threshold',
                             outputnode, 'overlay_threshold_zf')
        grp_analysis.connect(easy_thresh_zf, 'outputspec.rendered_image',
                             outputnode, 'rendered_image_zf')

    # calling easythresh for zstats files
    easy_thresh_z = easy_thresh('easy_thresh_z')
    grp_analysis.connect(rename_zstats, 'out_file', easy_thresh_z,
                         'inputspec.z_stats')
    grp_analysis.connect(inputnode, 'merge_mask', easy_thresh_z,
                         'inputspec.merge_mask')
    grp_analysis.connect(inputnode, 'z_threshold', easy_thresh_z,
                         'inputspec.z_threshold')
    grp_analysis.connect(inputnode, 'p_threshold', easy_thresh_z,
                         'inputspec.p_threshold')
    grp_analysis.connect(inputnode, 'parameters', easy_thresh_z,
                         'inputspec.parameters')

    grp_analysis.connect(inputnode, 'merged_file', get_opstring, 'in_file')
    grp_analysis.connect(inputnode, 'merged_file', merge_mean_mask, 'in_file')
    grp_analysis.connect(get_opstring, 'out_file', merge_mean_mask,
                         'op_string')

    grp_analysis.connect(fsl_flameo, 'zfstats', outputnode, 'zfstats')
    grp_analysis.connect(fsl_flameo, 'fstats', outputnode, 'fstats')
    grp_analysis.connect(inputnode, 'merged_file', outputnode, 'merged')

    grp_analysis.connect(rename_zstats, 'out_file', outputnode, 'zstats')

    grp_analysis.connect(easy_thresh_z, 'outputspec.cluster_threshold',
                         outputnode, 'cluster_threshold')
    grp_analysis.connect(easy_thresh_z, 'outputspec.cluster_index', outputnode,
                         'cluster_index')
    grp_analysis.connect(easy_thresh_z, 'outputspec.cluster_localmax_txt',
                         outputnode, 'cluster_localmax_txt')
    grp_analysis.connect(easy_thresh_z, 'outputspec.overlay_threshold',
                         outputnode, 'overlay_threshold')
    grp_analysis.connect(easy_thresh_z, 'outputspec.rendered_image',
                         outputnode, 'rendered_image')

    return grp_analysis
Ejemplo n.º 26
0
    datagrabber.inputs.template_args['dwi'] = [[
        'subject_id', ['session_1/DTI_mx_137/*.dcm']
    ]]
    datagrabber.inputs.sort_filelist = True
    datagrabber.inputs.raise_on_empty = False

    wf.connect(subjects_infosource, "subject_id", datagrabber, "subject_id")

    dcm2nii_dwi = pe.Node(Dcm2nii(), name="dcm2nii_dwi")
    dcm2nii_dwi.inputs.gzip_output = True
    dcm2nii_dwi.inputs.nii_output = True
    dcm2nii_dwi.inputs.anonymize = False
    dcm2nii_dwi.plugin_args = {'submit_specs': 'request_memory = 2000'}
    wf.connect(datagrabber, "dwi", dcm2nii_dwi, "source_names")

    dwi_rename = pe.Node(util.Rename(format_string="DTI_mx_137.nii.gz"),
                         name="dwi_rename")
    wf.connect(dcm2nii_dwi, "converted_files", dwi_rename, "in_file")

    bvecs_rename = pe.Node(util.Rename(format_string="DTI_mx_137.bvecs"),
                           name="bvecs_rename")
    wf.connect(dcm2nii_dwi, "bvecs", bvecs_rename, "in_file")

    bvals_rename = pe.Node(util.Rename(format_string="DTI_mx_137.bvals"),
                           name="bvals_rename")
    wf.connect(dcm2nii_dwi, "bvals", bvals_rename, "in_file")

    ds = pe.Node(nio.DataSink(), name="dwi_datasink")
    ds.inputs.base_directory = '/scr/kalifornien1/data/nki_enhanced/'
    ds.inputs.substitutions = [('_subject_id_', '')]
    ds.inputs.regexp_substitutions = [('_others_rename[0-9]*/', '')]
Ejemplo n.º 27
0
datasource_dartel = pe.MapNode(nio.DataGrabber(infields=['subject_id'],
                                               outfields=['struct']),
                               name='datasource_dartel',
                               iterfield=['subject_id'])
datasource_dartel.inputs.template = 'nipype-tutorial/data/%s/%s.nii'
datasource_dartel.inputs.template_args = dict(
    struct=[['subject_id', 'struct']])
datasource_dartel.inputs.sort_filelist = True
datasource_dartel.inputs.subject_id = subject_list
"""Here we make sure that struct files have names corresponding to the subject ids.
This way we will be able to pick the right field flows later.
"""

rename_dartel = pe.MapNode(
    niu.Rename(format_string="subject_id_%(subject_id)s_struct"),
    iterfield=['in_file', 'subject_id'],
    name='rename_dartel')
rename_dartel.inputs.subject_id = subject_list
rename_dartel.inputs.keep_ext = True

dartel_workflow = spm_wf.create_DARTEL_template(name='dartel_workflow')
dartel_workflow.inputs.inputspec.template_prefix = "template"
"""This function will allow to pick the right field flow for each subject
"""


def pickFieldFlow(dartel_flow_fields, subject_id):
    from nipype.utils.filemanip import split_filename
    for f in dartel_flow_fields:
        _, name, _ = split_filename(f)
Ejemplo n.º 28
0
                      merge_smoothed_files, 'in3')

select_smoothed_files = pe.Node(interface=util.Select(),
                                name="select_smoothed_files")
preprocessing.connect(merge_smoothed_files, 'out', select_smoothed_files,
                      'inlist')


def chooseindex(roi):
    return {'isotropic_voxel': list(range(0, 4)), 'anisotropic_voxel': list(range(4, 8)),
            'isotropic_surface': list(range(8, 12))}[roi]

preprocessing.connect(iter_smoothing_method, ("smoothing_method", chooseindex),
                      select_smoothed_files, 'index')

rename = pe.MapNode(util.Rename(format_string="%(orig)s"), name="rename",
                    iterfield=['in_file'])
rename.inputs.parse_string = "(?P<orig>.*)"

preprocessing.connect(select_smoothed_files, 'out', rename, 'in_file')

specify_model = pe.Node(interface=model.SpecifyModel(), name="specify_model")
specify_model.inputs.input_units = 'secs'
specify_model.inputs.time_repetition = 3.
specify_model.inputs.high_pass_filter_cutoff = 120
specify_model.inputs.subject_info = [Bunch(conditions=['Task-Odd', 'Task-Even'],
                                           onsets=[list(range(15, 240, 60)),
                                                   list(range(45, 240, 60))],
                                           durations=[[15], [15]])] * 4

level1design = pe.Node(interface=spm.Level1Design(), name="level1design")
Ejemplo n.º 29
0
def init_bold_surf_wf(mem_gb,
                      output_spaces,
                      medial_surface_nan,
                      name='bold_surf_wf'):
    """
    This workflow samples functional images to FreeSurfer surfaces

    For each vertex, the cortical ribbon is sampled at six points (spaced 20% of thickness apart)
    and averaged.

    Outputs are in GIFTI format.

    .. workflow::
        :graph2use: colored
        :simple_form: yes

        from fmriprep.workflows.bold import init_bold_surf_wf
        wf = init_bold_surf_wf(mem_gb=0.1,
                               output_spaces=['T1w', 'fsnative',
                                             'template', 'fsaverage5'],
                               medial_surface_nan=False)

    **Parameters**

        output_spaces : list
            List of output spaces functional images are to be resampled to
            Target spaces beginning with ``fs`` will be selected for resampling,
            such as ``fsaverage`` or related template spaces
            If the list contains ``fsnative``, images will be resampled to the
            individual subject's native surface
        medial_surface_nan : bool
            Replace medial wall values with NaNs on functional GIFTI files

    **Inputs**

        source_file
            Motion-corrected BOLD series in T1 space
        t1_preproc
            Bias-corrected structural template image
        subjects_dir
            FreeSurfer SUBJECTS_DIR
        subject_id
            FreeSurfer subject ID
        t1_2_fsnative_forward_transform
            LTA-style affine matrix translating from T1w to FreeSurfer-conformed subject space

    **Outputs**

        surfaces
            BOLD series, resampled to FreeSurfer surfaces

    """
    # Ensure volumetric spaces do not sneak into this workflow
    spaces = [space for space in output_spaces if space.startswith('fs')]

    workflow = Workflow(name=name)

    if spaces:
        workflow.__desc__ = """\
The BOLD time-series, were resampled to surfaces on the following
spaces: {out_spaces}.
""".format(out_spaces=', '.join(['*%s*' % s for s in spaces]))
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'source_file', 't1_preproc', 'subject_id', 'subjects_dir',
        't1_2_fsnative_forward_transform'
    ]),
                        name='inputnode')

    outputnode = pe.Node(niu.IdentityInterface(fields=['surfaces']),
                         name='outputnode')

    def select_target(subject_id, space):
        """ Given a source subject ID and a target space, get the target subject ID """
        return subject_id if space == 'fsnative' else space

    targets = pe.MapNode(niu.Function(function=select_target),
                         iterfield=['space'],
                         name='targets',
                         mem_gb=DEFAULT_MEMORY_MIN_GB)
    targets.inputs.space = spaces

    # Rename the source file to the output space to simplify naming later
    rename_src = pe.MapNode(niu.Rename(format_string='%(subject)s',
                                       keep_ext=True),
                            iterfield='subject',
                            name='rename_src',
                            run_without_submitting=True,
                            mem_gb=DEFAULT_MEMORY_MIN_GB)
    rename_src.inputs.subject = spaces

    resampling_xfm = pe.Node(LTAConvert(in_lta='identity.nofile',
                                        out_lta=True),
                             name='resampling_xfm')
    set_xfm_source = pe.Node(ConcatenateLTA(out_type='RAS2RAS'),
                             name='set_xfm_source')

    sampler = pe.MapNode(fs.SampleToSurface(sampling_method='average',
                                            sampling_range=(0, 1, 0.2),
                                            sampling_units='frac',
                                            interp_method='trilinear',
                                            cortex_mask=True,
                                            override_reg_subj=True,
                                            out_type='gii'),
                         iterfield=['source_file', 'target_subject'],
                         iterables=('hemi', ['lh', 'rh']),
                         name='sampler',
                         mem_gb=mem_gb * 3)

    medial_nans = pe.MapNode(MedialNaNs(),
                             iterfield=['in_file', 'target_subject'],
                             name='medial_nans',
                             mem_gb=DEFAULT_MEMORY_MIN_GB)

    merger = pe.JoinNode(niu.Merge(1, ravel_inputs=True),
                         name='merger',
                         joinsource='sampler',
                         joinfield=['in1'],
                         run_without_submitting=True,
                         mem_gb=DEFAULT_MEMORY_MIN_GB)

    update_metadata = pe.MapNode(GiftiSetAnatomicalStructure(),
                                 iterfield='in_file',
                                 name='update_metadata',
                                 mem_gb=DEFAULT_MEMORY_MIN_GB)

    workflow.connect([
        (inputnode, targets, [('subject_id', 'subject_id')]),
        (inputnode, rename_src, [('source_file', 'in_file')]),
        (inputnode, resampling_xfm, [('source_file', 'source_file'),
                                     ('t1_preproc', 'target_file')]),
        (inputnode, set_xfm_source, [('t1_2_fsnative_forward_transform',
                                      'in_lta2')]),
        (resampling_xfm, set_xfm_source, [('out_lta', 'in_lta1')]),
        (inputnode, sampler, [('subjects_dir', 'subjects_dir'),
                              ('subject_id', 'subject_id')]),
        (set_xfm_source, sampler, [('out_file', 'reg_file')]),
        (targets, sampler, [('out', 'target_subject')]),
        (rename_src, sampler, [('out_file', 'source_file')]),
        (merger, update_metadata, [('out', 'in_file')]),
        (update_metadata, outputnode, [('out_file', 'surfaces')]),
    ])

    if medial_surface_nan:
        workflow.connect([
            (inputnode, medial_nans, [('subjects_dir', 'subjects_dir')]),
            (sampler, medial_nans, [('out_file', 'in_file')]),
            (targets, medial_nans, [('out', 'target_subject')]),
            (medial_nans, merger, [('out_file', 'in1')]),
        ])
    else:
        workflow.connect(sampler, 'out_file', merger, 'in1')

    return workflow
    def _run_interface(self, runtime):

        # Loading required packages
        from additional_interfaces import AdditionalDTIMeasures
        from additional_interfaces import DipyDenoise
        import nipype.interfaces.fsl as fsl
        import nipype.interfaces.io as nio
        import nipype.pipeline.engine as pe
        import nipype.interfaces.utility as util
        import os

        # ==============================================================
        # Processing of diffusion-weighted data
        # Extract b0 image
        fslroi = pe.Node(interface=fsl.ExtractROI(), name='extract_b0')
        fslroi.inputs.in_file = self.inputs.dwi
        fslroi.inputs.t_min = 0
        fslroi.inputs.t_size = 1

        # Create a brain mask
        bet = pe.Node(interface=fsl.BET(
            frac=0.3, robust=False, mask=True, no_output=False), name='bet')

        # Eddy-current and motion correction
        eddy = pe.Node(interface=fsl.epi.Eddy(args='-v'), name='eddy')
        eddy.inputs.in_acqp  = self.inputs.acqparams
        eddy.inputs.in_bvec  = self.inputs.bvecs
        eddy.inputs.in_bval  = self.inputs.bvals
        eddy.inputs.in_file = self.inputs.dwi
        eddy.inputs.in_index = self.inputs.index_file

        # Denoising
        dwi_denoise = pe.Node(interface=DipyDenoise(), name='dwi_denoise')
        dwi_denoise.inputs.in_file = self.inputs.dwi

        # Fitting the diffusion tensor model
        dtifit = pe.Node(interface=fsl.DTIFit(), name='dtifit')
        dtifit.inputs.base_name = self.inputs.subject_id
        dtifit.inputs.dwi = self.inputs.dwi
        dtifit.inputs.bvecs = self.inputs.bvecs
        dtifit.inputs.bvals = self.inputs.bvals

        # Getting AD and RD
        get_rd = pe.Node(interface=AdditionalDTIMeasures(), name='get_rd')

        # DataSink
        datasink = pe.Node(interface=nio.DataSink(), name='datasink')
        datasink.inputs.parameterization = False
        datasink.inputs.base_directory = self.inputs.out_directory + '/_subject_id_' + self.inputs.subject_id + '/dwi_preproc/'

        # Renaming the outputs for consistency
        AD_rename = pe.Node(interface=util.Rename(keep_ext = True), name='AD_rename')
        AD_rename.inputs.format_string = self.inputs.subject_id + '_AD'

        b0_rename = pe.Node(interface=util.Rename(keep_ext = True), name='b0_rename')
        b0_rename.inputs.format_string = self.inputs.subject_id + '_b0'

        dwi_rename = pe.Node(interface=util.Rename(keep_ext = True), name='dwi_rename')
        dwi_rename.inputs.format_string = self.inputs.subject_id + '_dwi'

        mask_rename = pe.Node(interface=util.Rename(keep_ext = True), name='mask_rename')
        mask_rename.inputs.format_string = self.inputs.subject_id + '_mask'

        RD_rename = pe.Node(interface=util.Rename(keep_ext = True), name='RD_rename')
        RD_rename.inputs.format_string = self.inputs.subject_id + '_RD'


        # ==============================================================
        # Setting up the workflow
        dwi_preproc = pe.Workflow(name='dwi_preproc')

        # Diffusion data
        # Preprocessing
        dwi_preproc.connect(fslroi, 'roi_file', bet, 'in_file')
        dwi_preproc.connect(bet, 'mask_file', eddy, 'in_mask')
        dwi_preproc.connect(eddy, 'out_corrected', dwi_denoise, 'in_file')

        # Calculate diffusion measures
        dwi_preproc.connect(dwi_denoise, 'out_file', dtifit, 'dwi')
        dwi_preproc.connect(bet, 'mask_file', dtifit, 'mask')
        dwi_preproc.connect(dtifit, 'L1', get_rd, 'L1')
        dwi_preproc.connect(dtifit, 'L2', get_rd, 'L2')
        dwi_preproc.connect(dtifit, 'L3', get_rd, 'L3')

        # Renaming same outputs
        dwi_preproc.connect(dwi_denoise, 'out_file', dwi_rename, 'in_file')
        dwi_preproc.connect(bet, 'out_file', b0_rename, 'in_file')
        dwi_preproc.connect(bet, 'mask_file', mask_rename, 'in_file')
        dwi_preproc.connect(get_rd, 'AD', AD_rename, 'in_file')
        dwi_preproc.connect(get_rd, 'RD', RD_rename, 'in_file')

        # Connecting to the datasink
        dwi_preproc.connect(dwi_rename, 'out_file', datasink, 'preprocessed.@dwi')
        dwi_preproc.connect(b0_rename, 'out_file', datasink, 'preprocessed.@b0')
        dwi_preproc.connect(mask_rename, 'out_file', datasink, 'preprocessed.@mask')
        dwi_preproc.connect(dtifit, 'FA', datasink, 'preprocessed.@FA')
        dwi_preproc.connect(dtifit, 'MD', datasink, 'preprocessed.@MD')
        dwi_preproc.connect(AD_rename, 'out_file', datasink, 'preprocessed.@AD')
        dwi_preproc.connect(RD_rename, 'out_file', datasink, 'preprocessed.@RD')

        # ==============================================================
        # Running the workflow
        dwi_preproc.base_dir = os.path.abspath(self.inputs.out_directory + '_subject_id_' + self.inputs.subject_id)
        dwi_preproc.write_graph()
        dwi_preproc.write_graph()
        dwi_preproc.run()

        return runtime