def create_workflow(self, flow, inputnode, outputnode):
        discard_output = pe.Node(
            interface=util.IdentityInterface(fields=["discard_output"]),
            name="discard_output")
        if self.config.discard_n_volumes > 0:
            discard = pe.Node(
                interface=discard_tp(n_discard=self.config.discard_n_volumes),
                name='discard_volumes')
            flow.connect([(inputnode, discard, [("functional", "in_file")]),
                          (discard, discard_output, [("out_file",
                                                      "discard_output")])])
        else:
            flow.connect([(inputnode, discard_output, [("functional",
                                                        "discard_output")])])

        despiking_output = pe.Node(
            interface=util.IdentityInterface(fields=["despiking_output"]),
            name="despkiking_output")
        if self.config.despiking:
            despike = pe.Node(interface=Despike(), name='afni_despike')
            converter = pe.Node(
                interface=afni.AFNItoNIFTI(out_file='fMRI_despike.nii.gz'),
                name='converter')
            flow.connect([(discard_output, despike, [("discard_output",
                                                      "in_file")]),
                          (despike, converter, [("out_file", "in_file")]),
                          (converter, despiking_output,
                           [("out_file", "despiking_output")])])
        else:
            flow.connect([(discard_output, despiking_output,
                           [("discard_output", "despiking_output")])])

        if self.config.slice_timing != "none":
            slc_timing = pe.Node(interface=fsl.SliceTimer(),
                                 name='slice_timing')
            slc_timing.inputs.time_repetition = self.config.repetition_time
            if self.config.slice_timing == "bottom-top interleaved":
                slc_timing.inputs.interleaved = True
                slc_timing.inputs.index_dir = False
            elif self.config.slice_timing == "top-bottom interleaved":
                slc_timing.inputs.interleaved = True
                slc_timing.inputs.index_dir = True
            elif self.config.slice_timing == "bottom-top":
                slc_timing.inputs.interleaved = False
                slc_timing.inputs.index_dir = False
            elif self.config.slice_timing == "top-bottom":
                slc_timing.inputs.interleaved = False
                slc_timing.inputs.index_dir = True

        # def add_header_and_convert_to_tsv(in_file):

        #     try:

        if self.config.motion_correction:
            mo_corr = pe.Node(interface=fsl.MCFLIRT(stats_imgs=True,
                                                    save_mats=False,
                                                    save_plots=True,
                                                    mean_vol=True),
                              name="motion_correction")

        if self.config.slice_timing != "none":
            flow.connect([(despiking_output, slc_timing, [("despiking_output",
                                                           "in_file")])])
            if self.config.motion_correction:
                flow.connect([
                    (slc_timing, mo_corr, [("slice_time_corrected_file",
                                            "in_file")]),
                    (mo_corr, outputnode, [("out_file", "functional_preproc")
                                           ]),
                    (mo_corr, outputnode, [("par_file", "par_file")]),
                    (mo_corr, outputnode, [("mean_img", "mean_vol")]),
                ])
            else:
                mean = pe.Node(interface=fsl.MeanImage(), name="mean")
                flow.connect([(slc_timing, outputnode, [
                    ("slice_time_corrected_file", "functional_preproc")
                ]),
                              (slc_timing, mean, [("slice_time_corrected_file",
                                                   "in_file")]),
                              (mean, outputnode, [("out_file", "mean_vol")])])
        else:
            if self.config.motion_correction:
                flow.connect([
                    (despiking_output, mo_corr, [("despiking_output",
                                                  "in_file")]),
                    (mo_corr, outputnode, [("out_file", "functional_preproc")
                                           ]),
                    (mo_corr, outputnode, [("par_file", "par_file")]),
                    (mo_corr, outputnode, [("mean_img", "mean_vol")]),
                ])
            else:
                mean = pe.Node(interface=fsl.MeanImage(), name="mean")
                flow.connect([(despiking_output, outputnode,
                               [("despiking_output", "functional_preproc")]),
                              (inputnode, mean, [("functional", "in_file")]),
                              (mean, outputnode, [("out_file", "mean_vol")])])
示例#2
0
def create_resting_preproc(name='restpreproc'):
    """Create a "resting" time series preprocessing workflow

    The noise removal is based on Behzadi et al. (2007)

    Parameters
    ----------

    name : name of workflow (default: restpreproc)

    Inputs::

        inputspec.func : functional run (filename or list of filenames)

    Outputs::

        outputspec.noise_mask_file : voxels used for PCA to derive noise components
        outputspec.filtered_file : bandpass filtered and noise-reduced time series

    Example
    -------

    >>> TR = 3.0
    >>> wf = create_resting_preproc()
    >>> wf.inputs.inputspec.func = 'f3.nii'
    >>> wf.inputs.inputspec.num_noise_components = 6
    >>> wf.inputs.inputspec.highpass_sigma = 100/(2*TR)
    >>> wf.inputs.inputspec.lowpass_sigma = 12.5/(2*TR)
    >>> wf.run() # doctest: +SKIP

    """

    restpreproc = pe.Workflow(name=name)

    # Define nodes
    inputnode = pe.Node(interface=util.IdentityInterface(fields=[
        'func', 'num_noise_components', 'highpass_sigma', 'lowpass_sigma'
    ]),
                        name='inputspec')
    outputnode = pe.Node(interface=util.IdentityInterface(fields=[
        'noise_mask_file',
        'filtered_file',
    ]),
                         name='outputspec')
    slicetimer = pe.Node(fsl.SliceTimer(), name='slicetimer')
    realigner = create_realign_flow()
    tsnr = pe.Node(TSNR(regress_poly=2), name='tsnr')
    getthresh = pe.Node(interface=fsl.ImageStats(op_string='-p 98'),
                        name='getthreshold')
    threshold_stddev = pe.Node(fsl.Threshold(), name='threshold')
    compcor = pe.Node(util.Function(
        input_names=['realigned_file', 'noise_mask_file', 'num_components'],
        output_names=['noise_components'],
        function=extract_noise_components),
                      name='compcorr')
    remove_noise = pe.Node(fsl.FilterRegressor(filter_all=True),
                           name='remove_noise')
    bandpass_filter = pe.Node(fsl.TemporalFilter(), name='bandpass_filter')

    # Define connections
    restpreproc.connect(inputnode, 'func', slicetimer, 'in_file')
    restpreproc.connect(slicetimer, 'slice_time_corrected_file', realigner,
                        'inputspec.func')
    restpreproc.connect(realigner, 'outputspec.realigned_file', tsnr,
                        'in_file')
    restpreproc.connect(tsnr, 'stddev_file', threshold_stddev, 'in_file')
    restpreproc.connect(tsnr, 'stddev_file', getthresh, 'in_file')
    restpreproc.connect(getthresh, 'out_stat', threshold_stddev, 'thresh')
    restpreproc.connect(realigner, 'outputspec.realigned_file', compcor,
                        'realigned_file')
    restpreproc.connect(threshold_stddev, 'out_file', compcor,
                        'noise_mask_file')
    restpreproc.connect(inputnode, 'num_noise_components', compcor,
                        'num_components')
    restpreproc.connect(tsnr, 'detrended_file', remove_noise, 'in_file')
    restpreproc.connect(compcor, 'noise_components', remove_noise,
                        'design_file')
    restpreproc.connect(inputnode, 'highpass_sigma', bandpass_filter,
                        'highpass_sigma')
    restpreproc.connect(inputnode, 'lowpass_sigma', bandpass_filter,
                        'lowpass_sigma')
    restpreproc.connect(remove_noise, 'out_file', bandpass_filter, 'in_file')
    restpreproc.connect(threshold_stddev, 'out_file', outputnode,
                        'noise_mask_file')
    restpreproc.connect(bandpass_filter, 'out_file', outputnode,
                        'filtered_file')
    return restpreproc
示例#3
0
realign_flow.connect(splitter, 'out_files', warper, 'in_file')
realign_flow.connect(warper, 'out_file', joiner, 'in_files')
realign_flow.connect(joiner, 'merged_file', realign_outputnode,
                     'realigned_file')
realign_flow.connect(realigner, 'mat_file', realign_outputnode,
                     'transformation_matrices')
realign_flow.connect(realigner, 'par_file', realign_outputnode,
                     'motion_parameters')
realign_flow.connect(realigner, 'rms_files', realign_outputnode,
                     'displacement_parameters')
realign_flow.connect(plot_motion, 'out_file', realign_outputnode,
                     'motion_plots')

# Correct for slice wise acquisition using FSL's SliceTimer (optional)
slicetimer = Node(fsl.SliceTimer(time_repetition=TR,
                                 interleaved=slice_timing_interleaved,
                                 slice_direction=slice_timing_direction,
                                 index_dir=slice_timing_reversed_order),
                  name='slicetimer')

# Create mean image for functional MRI data
mean_func = Node(fsl.ImageMaths(op_string='-Tmean', suffix='_mean'),
                 name='mean_func')

# Brain extraction for structural data
bet_struct = Node(fsl.BET(), name='bet_struct')

# Strip the skull from the mean functional to generate a mask
mean_func_brain = Node(fsl.BET(mask=True, robust=True, frac=0.3),
                       name='mean_func_brain')

# Mask functional data with skull stripped mean functional
示例#4
0
import nipype.interfaces.fsl as fsl
import nipype.algorithms.confounds as confounds
import nipype.interfaces.utility as utility

#Generic datagrabber module that wraps around glob in an
my_io_S3DataGrabber = pe.Node(io.S3DataGrabber(outfields=["outfiles"]),
                              name='my_io_S3DataGrabber')
my_io_S3DataGrabber.inputs.bucket = 'openneuro'
my_io_S3DataGrabber.inputs.sort_filelist = True
my_io_S3DataGrabber.inputs.template = 'sub-01/func/sub-01_task-simon_run-1_bold.nii.gz'
my_io_S3DataGrabber.inputs.anon = True
my_io_S3DataGrabber.inputs.bucket_path = 'ds000101/ds000101_R2.0.0/uncompressed/'
my_io_S3DataGrabber.inputs.local_directory = '/tmp'

#Wraps command **slicetimer**
my_fsl_SliceTimer = pe.Node(interface=fsl.SliceTimer(),
                            name='my_fsl_SliceTimer',
                            iterfield=[''])

#Wraps command **mcflirt**
my_fsl_MCFLIRT = pe.Node(interface=fsl.MCFLIRT(),
                         name='my_fsl_MCFLIRT',
                         iterfield=[''])

#Computes the time-course SNR for a time series
my_confounds_TSNR = pe.Node(interface=confounds.TSNR(),
                            name='my_confounds_TSNR',
                            iterfield=[''])
my_confounds_TSNR.inputs.regress_poly = 3

#Wraps command **fslstats**
示例#5
0
def legacy(
    bids_base,
    template,
    autorotate=False,
    debug=False,
    functional_blur_xy=False,
    functional_match={},
    keep_work=False,
    negative_contrast_agent=False,
    n_procs=N_PROCS,
    out_base=None,
    realign="time",
    registration_mask=False,
    sessions=[],
    structural_match={},
    subjects=[],
    tr=1,
    workflow_name='legacy',
):
    '''
	Legacy realignment and registration workflow representative of the tweaks and workarounds commonly used in the pre-SAMRI period.

	Parameters
	----------
	bids_base : str
		Path to the BIDS data set root.
	template : str
		Path to the template to register the data to.
	autorotate : bool, optional
		Whether to use a multi-rotation-state transformation start.
		This allows the registration to commence with the best rotational fit, and may help if the orientation of the data is malformed with respect to the header.
	debug : bool, optional
		Whether to enable nipype debug mode.
		This increases logging.
	functional_blur_xy : float, optional
		Factor by which to smooth data in the xy-plane; if parameter evaluates to false, no smoothing will be applied.
		Ideally this value should correspond to the resolution or smoothness in the z-direction (assuing z represents the lower-resolution slice-encoding direction).
	functional_match : dict, optional
		Dictionary specifying a whitelist to use for functional data inclusion into the workflow; if dictionary is empty no whitelist is present and all data will be considered.
		The dictionary should have keys which are 'acquisition', 'task', or 'modality', and values which are lists of acceptable strings for the respective BIDS field.
	keep_work : bool, str
		Whether to keep the work directory after workflow conclusion (this directory contains all the intermediary processing commands, inputs, and outputs --- it is invaluable for debugging but many times larger in size than the actual output).
	negative_contrast_agent : bool, optional
		Whether the scan was acquired witn a negative contrast agent given the imaging modality; if true the values will be inverted with respect to zero.
		This is commonly used for iron nano-particle Cerebral Blood Volume (CBV) measurements.
	n_procs : int, optional
		Number of processors to maximally use for the workflow; if unspecified a best guess will be estimate based on hardware (but not on current load).
	out_base : str, optional
		Output base directory --- inside which a directory named `workflow_name` (as well as associated directories) will be created.
	realign : {"space","time","spacetime",""}, optional
		Parameter that dictates slictiming correction and realignment of slices. "time" (FSL.SliceTimer) is default, since it works safely. Use others only with caution!
	registration_mask : str, optional
		Mask to use for the registration process.
		This mask will constrain the area for similarity metric evaluation, but the data will not be cropped.
	sessions : list, optional
		A whitelist of sessions to include in the workflow, if the list is empty there is no whitelist and all sessions will be considered.
	structural_match : dict, optional
		Dictionary specifying a whitelist to use for structural data inclusion into the workflow; if dictionary is empty no whitelist is present and all data will be considered.
		The dictionary should have keys which are 'acquisition', or 'modality', and values which are lists of acceptable strings for the respective BIDS field.
	subjects : list, optional
		A whitelist of subjects to include in the workflow, if the list is empty there is no whitelist and all sessions will be considered.
	tr : float, optional
		Repetition time, explicitly.
		WARNING! This is a parameter waiting for deprecation.
	workflow_name : str, optional
		Top level name for the output directory.
	'''

    bids_base, out_base, out_dir, template, registration_mask, data_selection, functional_scan_types, structural_scan_types, subjects_sessions, func_ind, struct_ind = common_select(
        bids_base,
        out_base,
        workflow_name,
        template,
        registration_mask,
        functional_match,
        structural_match,
        subjects,
        sessions,
    )

    get_f_scan = pe.Node(name='get_f_scan',
                         interface=util.Function(
                             function=get_bids_scan,
                             input_names=inspect.getargspec(get_bids_scan)[0],
                             output_names=[
                                 'scan_path', 'scan_type', 'task', 'nii_path',
                                 'nii_name', 'file_name', 'events_name',
                                 'subject_session'
                             ]))
    get_f_scan.inputs.ignore_exception = True
    get_f_scan.inputs.data_selection = data_selection
    get_f_scan.inputs.bids_base = bids_base
    get_f_scan.iterables = ("ind_type", func_ind)

    dummy_scans = pe.Node(
        name='dummy_scans',
        interface=util.Function(
            function=force_dummy_scans,
            input_names=inspect.getargspec(force_dummy_scans)[0],
            output_names=['out_file', 'deleted_scans']))
    dummy_scans.inputs.desired_dummy_scans = DUMMY_SCANS

    events_file = pe.Node(
        name='events_file',
        interface=util.Function(
            function=write_bids_events_file,
            input_names=inspect.getargspec(write_bids_events_file)[0],
            output_names=['out_file']))

    temporal_mean = pe.Node(interface=fsl.MeanImage(), name="temporal_mean")

    f_resize = pe.Node(interface=VoxelResize(), name="f_resize")
    f_resize.inputs.resize_factors = [10, 10, 10]

    f_percentile = pe.Node(interface=fsl.ImageStats(), name="f_percentile")
    f_percentile.inputs.op_string = '-p 98'

    f_threshold = pe.Node(interface=fsl.Threshold(), name="f_threshold")

    f_fast = pe.Node(interface=fsl.FAST(), name="f_fast")
    f_fast.inputs.no_pve = True
    f_fast.inputs.output_biascorrected = True

    f_bet = pe.Node(interface=fsl.BET(), name="f_BET")

    f_swapdim = pe.Node(interface=fsl.SwapDimensions(), name="f_swapdim")
    f_swapdim.inputs.new_dims = ('x', '-z', '-y')

    f_deleteorient = pe.Node(interface=FSLOrient(), name="f_deleteorient")
    f_deleteorient.inputs.main_option = 'deleteorient'

    datasink = pe.Node(nio.DataSink(), name='datasink')
    datasink.inputs.base_directory = out_dir
    datasink.inputs.parameterization = False

    workflow_connections = [
        (get_f_scan, dummy_scans, [('nii_path', 'in_file')]),
        (get_f_scan, dummy_scans, [('scan_path', 'scan_dir')]),
        (dummy_scans, events_file, [('deleted_scans', 'forced_dummy_scans')]),
        (dummy_scans, f_resize, [('out_file', 'in_file')]),
        (get_f_scan, events_file, [('nii_path', 'timecourse_file'),
                                   ('task', 'task'),
                                   ('scan_path', 'scan_dir')]),
        (events_file, datasink, [('out_file', 'func.@events')]),
        (get_f_scan, events_file, [('events_name', 'out_file')]),
        (get_f_scan, datasink, [(('subject_session', ss_to_path), 'container')
                                ]),
        (temporal_mean, f_percentile, [('out_file', 'in_file')]),
        # here we divide by 10 assuming 10 percent noise
        (f_percentile, f_threshold, [(('out_stat', divideby_10), 'thresh')]),
        (temporal_mean, f_threshold, [('out_file', 'in_file')]),
        (f_threshold, f_fast, [('out_file', 'in_files')]),
        (f_fast, f_bet, [('restored_image', 'in_file')]),
        (f_resize, f_deleteorient, [('out_file', 'in_file')]),
        (f_deleteorient, f_swapdim, [('out_file', 'in_file')]),
    ]

    if realign == "space":
        realigner = pe.Node(interface=spm.Realign(), name="realigner")
        realigner.inputs.register_to_mean = True
        workflow_connections.extend([
            (f_swapdim, realigner, [('out_file', 'in_file')]),
        ])

    elif realign == "spacetime":
        realigner = pe.Node(interface=nipy.SpaceTimeRealigner(),
                            name="realigner")
        realigner.inputs.slice_times = "asc_alt_2"
        realigner.inputs.tr = tr
        realigner.inputs.slice_info = 3  #3 for coronal slices (2 for horizontal, 1 for sagittal)
        workflow_connections.extend([
            (f_swapdim, realigner, [('out_file', 'in_file')]),
        ])

    elif realign == "time":
        realigner = pe.Node(interface=fsl.SliceTimer(), name="slicetimer")
        realigner.inputs.time_repetition = tr
        workflow_connections.extend([
            (f_swapdim, realigner, [('out_file', 'in_file')]),
        ])

    f_antsintroduction = pe.Node(interface=antslegacy.antsIntroduction(),
                                 name='ants_introduction')
    f_antsintroduction.inputs.dimension = 3
    f_antsintroduction.inputs.reference_image = template
    #will need updating to `1`
    f_antsintroduction.inputs.bias_field_correction = True
    f_antsintroduction.inputs.transformation_model = 'GR'
    f_antsintroduction.inputs.max_iterations = [8, 15, 8]

    f_warp = pe.Node(interface=ants.WarpTimeSeriesImageMultiTransform(),
                     name='f_warp')
    f_warp.inputs.reference_image = template
    f_warp.inputs.dimension = 4

    f_copysform2qform = pe.Node(interface=FSLOrient(),
                                name='f_copysform2qform')
    f_copysform2qform.inputs.main_option = 'copysform2qform'

    warp_merge = pe.Node(util.Merge(2), name='warp_merge')

    workflow_connections.extend([
        (f_bet, f_antsintroduction, [('out_file', 'input_image')]),
        (f_antsintroduction, warp_merge, [('warp_field', 'in1')]),
        (f_antsintroduction, warp_merge, [('affine_transformation', 'in2')]),
        (warp_merge, f_warp, [('out', 'transformation_series')]),
        (f_warp, f_copysform2qform, [('output_image', 'in_file')]),
    ])
    if realign == "space":
        workflow_connections.extend([
            (realigner, temporal_mean, [('realigned_files', 'in_file')]),
            (realigner, f_warp, [('realigned_files', 'input_image')]),
        ])
    elif realign == "spacetime":
        workflow_connections.extend([
            (realigner, temporal_mean, [('out_file', 'in_file')]),
            (realigner, f_warp, [('out_file', 'input_image')]),
        ])
    elif realign == "time":
        workflow_connections.extend([
            (realigner, temporal_mean, [('slice_time_corrected_file',
                                         'in_file')]),
            (realigner, f_warp, [('slice_time_corrected_file', 'input_image')
                                 ]),
        ])
    else:
        workflow_connections.extend([
            (f_resize, temporal_mean, [('out_file', 'in_file')]),
            (f_swapdim, f_warp, [('out_file', 'input_image')]),
        ])

    invert = pe.Node(interface=fsl.ImageMaths(), name="invert")

    blur = pe.Node(interface=afni.preprocess.BlurToFWHM(), name="blur")
    blur.inputs.fwhmxy = functional_blur_xy

    if functional_blur_xy and negative_contrast_agent:
        workflow_connections.extend([
            (f_copysform2qform, blur, [('out_file', 'in_file')]),
            (blur, invert, [(('out_file', fslmaths_invert_values), 'op_string')
                            ]),
            (blur, invert, [('out_file', 'in_file')]),
            (get_f_scan, invert, [('nii_name', 'output_image')]),
            (invert, datasink, [('out_file', 'func')]),
        ])

    elif functional_blur_xy:
        workflow_connections.extend([
            (get_f_scan, blur, [('nii_name', 'output_image')]),
            (f_copysform2qform, blur, [('out_file', 'in_file')]),
            (blur, datasink, [('out_file', 'func')]),
        ])

    elif negative_contrast_agent:
        workflow_connections.extend([
            (get_f_scan, invert, [('nii_name', 'out_file')]),
            (f_copysform2qform, invert, [(('out_file', fslmaths_invert_values),
                                          'op_string')]),
            (f_copysform2qform, invert, [('out_file', 'in_file')]),
            (invert, datasink, [('out_file', 'func')]),
        ])
    else:

        f_rename = pe.Node(util.Rename(), name='f_rename')

        workflow_connections.extend([
            (get_f_scan, f_rename, [('nii_name', 'format_string')]),
            (f_copysform2qform, f_rename, [('out_file', 'in_file')]),
            (f_rename, datasink, [('out_file', 'func')]),
        ])

    workflow_config = {
        'execution': {
            'crashdump_dir': path.join(bids_base, 'preprocessing/crashdump'),
        }
    }
    if debug:
        workflow_config['logging'] = {
            'workflow_level': 'DEBUG',
            'utils_level': 'DEBUG',
            'interface_level': 'DEBUG',
            'filemanip_level': 'DEBUG',
            'log_to_file': 'true',
        }

    workdir_name = workflow_name + "_work"
    #this gives the name of the workdir, the output name is passed to the datasink
    workflow = pe.Workflow(name=workdir_name)
    workflow.connect(workflow_connections)
    workflow.base_dir = out_base
    workflow.config = workflow_config
    workflow.write_graph(dotfilename=path.join(workflow.base_dir, workdir_name,
                                               "graph.dot"),
                         graph2use="hierarchical",
                         format="png")

    workflow.run(plugin="MultiProc", plugin_args={'n_procs': n_procs})
    if not keep_work:
        workdir = path.join(workflow.base_dir, workdir_name)
        try:
            shutil.rmtree(workdir)
        except OSError as e:
            if str(e) == 'Cannot call rmtree on a symbolic link':
                print(
                    'Not deleting top level workdir (`{}`), as it is a symlink. Deleting only contents instead'
                    .format(workdir))
                for file_object in os.listdir(workdir):
                    file_object_path = os.path.join(workdir, file_object)
                    if os.path.isfile(file_object_path):
                        os.unlink(file_object_path)
                    else:
                        shutil.rmtree(file_object_path)
            else:
                raise OSError(str(e))
示例#6
0
def diagnose(
    bids_base,
    components=None,
    debug=False,
    exclude={},
    include={},
    keep_crashdump=False,
    keep_work=False,
    match_regex='.+/sub-(?P<sub>[a-zA-Z0-9]+)/ses-(?P<ses>[a-zA-Z0-9]+)/.*?_task-(?P<task>[a-zA-Z0-9]+)_acq-(?P<acq>[a-zA-Z0-9]+)_run-(?P<run>[a-zA-Z0-9]+)_(?P<mod>[a-zA-Z0-9]+).(?:nii|nii\.gz)',
    n_procs=N_PROCS,
    realign="time",
    tr=None,
    workflow_name="diagnostic",
):
    '''Run a basic independent component analysis diagnotic (using FSL's MELODIC) on functional MRI data stored in a BIDS directory tree.

	Parameters
	----------

	bids_base : string, optional
		Path to the top level of a BIDS directory tree for which to perform the diagnostic.
	components : int, optional
		Number of independent components to produce for each functional measurement; if evaluated as False, the number of components is automatically optimized for the given data by FSL's MELODIC.
	debug : bool, optional
		Enable full nipype debugging support for the workflow construction and execution.
	exclude : dict, optional
		A dictionary with any subset of 'subject', 'session', 'acquisition', 'task', 'modality', and 'path' as keys and corresponding identifiers as values.
		This is a blacklist: if this is specified only non-matching entries will be included in the analysis.
	include : dict, optional
		A dictionary with any subset of 'subject', 'session', 'acquisition', 'task', 'modality', and 'path' as keys and corresponding identifiers as values.
		This is a whitelist: if this is specified only matching entries will be included in the analysis.
	keep_crashdump : bool, optional
		Whether to keep the crashdump directory (containing all the crash reports for intermediary workflow steps, as managed by nipypye).
		This is useful for debugging and quality control.
	keep_work : bool, optional
		Whether to keep the work directory (containing all the intermediary workflow steps, as managed by nipypye).
		This is useful for debugging and quality control.
	match_regex : str, optional
		Regex matching pattern by which to select input files. Has to contain groups named "sub", "ses", "acq", "task", and "mod".
	n_procs : int, optional
		Maximum number of processes which to simultaneously spawn for the workflow.
		If not explicitly defined, this is automatically calculated from the number of available cores and under the assumption that the workflow will be the main process running for the duration that it is running.
	realign : {"space","time","spacetime",""}
		Parameter that dictates slictiming correction and realignment of slices. "time" (FSL.SliceTimer) is default, since it works safely. Use others only with caution!
	tr : int, optional
		Repetition time (in seconds); if evaluated as False, the TR will be read from the NIfTI header of each file individually.
	workflow_name : string, optional
		Name of the workflow execution. The output will be saved one level above the bids_base, under a directory bearing the name given here.
	'''

    bids_base = path.abspath(path.expanduser(bids_base))

    datafind = nio.DataFinder()
    datafind.inputs.root_paths = bids_base
    datafind.inputs.match_regex = match_regex
    datafind_res = datafind.run()

    data_selection = zip(*[
        datafind_res.outputs.sub, datafind_res.outputs.ses,
        datafind_res.outputs.acq, datafind_res.outputs.task,
        datafind_res.outputs.mod, datafind_res.outputs.out_paths
    ])
    data_selection = [list(i) for i in data_selection]
    data_selection = pd.DataFrame(data_selection,
                                  columns=('subject', 'session', 'acquisition',
                                           'task', 'modality', 'path'))

    data_selection = data_selection.sort_values(['session', 'subject'],
                                                ascending=[1, 1])
    if exclude:
        for key in exclude:
            data_selection = data_selection[~data_selection[key].
                                            isin(exclude[key])]
    if include:
        for key in include:
            data_selection = data_selection[data_selection[key].isin(
                include[key])]

    data_selection['out_path'] = ''
    if data_selection['path'].str.contains('.nii.gz').any():
        data_selection['out_path'] = data_selection['path'].apply(
            lambda x: path.basename(
                path.splitext(path.splitext(x)[0])[0] + '_MELODIC'))
    else:
        data_selection['out_path'] = data_selection['path'].apply(
            lambda x: path.basename(path.splitext(x)[0] + '_MELODIC'))

    paths = data_selection['path']

    infosource = pe.Node(interface=util.IdentityInterface(
        fields=['path'], mandatory_inputs=False),
                         name="infosource")
    infosource.iterables = [('path', paths)]

    dummy_scans = pe.Node(
        name='dummy_scans',
        interface=util.Function(
            function=force_dummy_scans,
            input_names=inspect.getargspec(force_dummy_scans)[0],
            output_names=['out_file', 'deleted_scans']))
    dummy_scans.inputs.desired_dummy_scans = 10

    bids_filename = pe.Node(name='bids_filename',
                            interface=util.Function(
                                function=out_path,
                                input_names=inspect.getargspec(out_path)[0],
                                output_names=['filename']))
    bids_filename.inputs.selection_df = data_selection

    bids_container = pe.Node(name='path_container',
                             interface=util.Function(
                                 function=container,
                                 input_names=inspect.getargspec(container)[0],
                                 output_names=['container']))
    bids_container.inputs.selection_df = data_selection

    datasink = pe.Node(nio.DataSink(), name='datasink')
    datasink.inputs.base_directory = path.abspath(
        path.join(bids_base, '..', workflow_name))
    datasink.inputs.parameterization = False

    melodic = pe.Node(interface=fsl.model.MELODIC(), name="melodic")
    if tr:
        melodic.inputs.tr_sec = tr
    melodic.inputs.report = True
    if components:
        melodic.inputs.dim = int(components)

    workflow_connections = [
        (infosource, dummy_scans, [('path', 'in_file')]),
        (infosource, bids_filename, [('path', 'in_path')]),
        (bids_filename, bids_container, [('filename', 'out_path')]),
        (bids_filename, melodic, [('filename', 'out_dir')]),
        (bids_container, datasink, [('container', 'container')]),
        (melodic, datasink, [('out_dir', 'func')]),
    ]

    if not tr:
        report_tr = pe.Node(name='report_tr',
                            interface=util.Function(
                                function=get_tr,
                                input_names=inspect.getargspec(get_tr)[0],
                                output_names=['tr']))
        report_tr.inputs.ndim = 4

        workflow_connections.extend([
            (infosource, report_tr, [('path', 'in_file')]),
            (report_tr, melodic, [('tr', 'tr_sec')]),
        ])

    if realign == "space":
        realigner = pe.Node(interface=spm.Realign(), name="realigner")
        realigner.inputs.register_to_mean = True
        workflow_connections.extend([
            (dummy_scans, realigner, [('out_file', 'in_file')]),
            (realigner, melodic, [('out_file', 'in_files')]),
        ])
    elif realign == "spacetime":
        realigner = pe.Node(interface=nipy.SpaceTimeRealigner(),
                            name="realigner")
        realigner.inputs.slice_times = "asc_alt_2"
        if tr:
            realigner.inputs.tr = tr
        else:
            workflow_connections.extend([
                (report_tr, realigner, [('tr', 'tr')]),
            ])
        #3 for coronal slices (2 for horizontal, 1 for sagittal)
        realigner.inputs.slice_info = 3
        workflow_connections.extend([
            (dummy_scans, realigner, [('out_file', 'in_file')]),
            (realigner, melodic, [('out_file', 'in_files')]),
        ])
    elif realign == "time":
        realigner = pe.Node(interface=fsl.SliceTimer(), name="slicetimer")
        if tr:
            realigner.inputs.time_repetition = tr
        else:
            workflow_connections.extend([
                (report_tr, realigner, [('tr', 'time_repetition')]),
            ])
        workflow_connections.extend([
            (dummy_scans, realigner, [('out_file', 'in_file')]),
            (realigner, melodic, [('slice_time_corrected_file', 'in_files')]),
        ])
    else:
        workflow_connections.extend([
            (dummy_scans, melodic, [('out_file', 'in_files')]),
        ])

    crashdump_dir = path.abspath(
        path.join(bids_base, '..', workflow_name + '_crashdump'))
    workflow_config = {'execution': {'crashdump_dir': crashdump_dir}}
    if debug:
        workflow_config['logging'] = {
            'workflow_level': 'DEBUG',
            'utils_level': 'DEBUG',
            'interface_level': 'DEBUG',
            'filemanip_level': 'DEBUG',
            'log_to_file': 'true',
        }

    workdir_name = workflow_name + '_work'
    workflow = pe.Workflow(name=workdir_name)
    workflow.connect(workflow_connections)
    workflow.base_dir = path.abspath(path.join(bids_base, '..'))
    workflow.config = workflow_config
    workflow.write_graph(dotfilename=path.join(workflow.base_dir, workdir_name,
                                               "graph.dot"),
                         graph2use="hierarchical",
                         format="png")

    if not keep_work or not keep_crashdump:
        try:
            workflow.run(plugin="MultiProc", plugin_args={'n_procs': n_procs})
        except RuntimeError:
            pass
    else:
        workflow.run(plugin="MultiProc", plugin_args={'n_procs': n_procs})
    if not keep_work:
        shutil.rmtree(path.join(workflow.base_dir, workdir_name))
    if not keep_crashdump:
        try:
            shutil.rmtree(crashdump_dir)
        except (FileNotFoundError, OSError):
            pass

    return
示例#7
0
import nipype.interfaces.utility as utility
import nipype.interfaces.spm as spm
import nipype.algorithms.misc as misc

#Generic datagrabber module that wraps around glob in an
io_S3DataGrabber = pe.Node(io.S3DataGrabber(outfields=["outfiles"]),
                           name='io_S3DataGrabber')
io_S3DataGrabber.inputs.bucket = 'openneuro'
io_S3DataGrabber.inputs.sort_filelist = True
io_S3DataGrabber.inputs.template = 'sub-01/func/sub-01_task-simon_run-1_bold.nii.gz'
io_S3DataGrabber.inputs.anon = True
io_S3DataGrabber.inputs.bucket_path = 'ds000101/ds000101_R2.0.0/uncompressed/'
io_S3DataGrabber.inputs.local_directory = '/tmp'

#Wraps command **slicetimer**
fsl_SliceTimer = pe.Node(interface=fsl.SliceTimer(), name='fsl_SliceTimer')

#Computes the time-course SNR for a time series
confounds_TSNR = pe.Node(interface=confounds.TSNR(), name='confounds_TSNR')
confounds_TSNR.inputs.regress_poly = 3

#Wraps command **fslstats**
fsl_ImageStats = pe.Node(interface=fsl.ImageStats(), name='fsl_ImageStats')
fsl_ImageStats.inputs.op_string = '-p 98'

#Wraps command **fslmaths**
fsl_Threshold = pe.Node(interface=fsl.Threshold(), name='fsl_Threshold')
fsl_Threshold.inputs.args = '-bin'

#Anatomical compcor: for inputs and outputs, see CompCor.
confounds_ACompCor = pe.Node(interface=confounds.ACompCor(),
    def create_workflow(self, flow, inputnode, outputnode):
        """Create the stage worflow.

        Parameters
        ----------
        flow : nipype.pipeline.engine.Workflow
            The nipype.pipeline.engine.Workflow instance of the fMRI pipeline

        inputnode : nipype.interfaces.utility.IdentityInterface
            Identity interface describing the inputs of the stage

        outputnode : nipype.interfaces.utility.IdentityInterface
            Identity interface describing the outputs of the stage
        """
        discard_output = pe.Node(
            interface=util.IdentityInterface(fields=["discard_output"]),
            name="discard_output",
        )
        if self.config.discard_n_volumes > 0:
            discard = pe.Node(
                interface=DiscardTP(n_discard=self.config.discard_n_volumes),
                name="discard_volumes",
            )
            # fmt:off
            flow.connect(
                [
                    (inputnode, discard, [("functional", "in_file")]),
                    (discard, discard_output, [("out_file", "discard_output")]),
                ]
            )
            # fmt:on
        else:
            # fmt:off
            flow.connect(
                [(inputnode, discard_output, [("functional", "discard_output")])]
            )
            # fmt:on

        despiking_output = pe.Node(
            interface=util.IdentityInterface(fields=["despiking_output"]),
            name="despkiking_output",
        )
        if self.config.despiking:
            despike = pe.Node(interface=Despike(), name="afni_despike")
            converter = pe.Node(
                interface=afni.AFNItoNIFTI(out_file="fMRI_despike.nii.gz"),
                name="converter",
            )
            # fmt:off
            flow.connect(
                [
                    (discard_output, despike, [("discard_output", "in_file")]),
                    (despike, converter, [("out_file", "in_file")]),
                    (converter, despiking_output, [("out_file", "despiking_output")]),
                ]
            )
            # fmt:on
        else:
            # fmt:off
            flow.connect(
                [
                    (discard_output, despiking_output, [("discard_output", "despiking_output")],)
                ]
            )
            # fmt:on

        if self.config.slice_timing != "none":
            slc_timing = pe.Node(interface=fsl.SliceTimer(), name="slice_timing")
            slc_timing.inputs.time_repetition = self.config.repetition_time
            if self.config.slice_timing == "bottom-top interleaved":
                slc_timing.inputs.interleaved = True
                slc_timing.inputs.index_dir = False
            elif self.config.slice_timing == "top-bottom interleaved":
                slc_timing.inputs.interleaved = True
                slc_timing.inputs.index_dir = True
            elif self.config.slice_timing == "bottom-top":
                slc_timing.inputs.interleaved = False
                slc_timing.inputs.index_dir = False
            elif self.config.slice_timing == "top-bottom":
                slc_timing.inputs.interleaved = False
                slc_timing.inputs.index_dir = True

        # def add_header_and_convert_to_tsv(in_file):

        #     try:

        if self.config.motion_correction:
            mo_corr = pe.Node(
                interface=fsl.MCFLIRT(
                    stats_imgs=True, save_mats=False, save_plots=True, mean_vol=True
                ),
                name="motion_correction",
            )

        if self.config.slice_timing != "none":
            # fmt:off
            flow.connect(
                [(despiking_output, slc_timing, [("despiking_output", "in_file")])]
            )
            # fmt:on
            if self.config.motion_correction:
                # fmt:off
                flow.connect(
                    [
                        (slc_timing, mo_corr, [("slice_time_corrected_file", "in_file")],),
                        (mo_corr, outputnode, [("out_file", "functional_preproc")]),
                        (mo_corr, outputnode, [("par_file", "par_file")]),
                        (mo_corr, outputnode, [("mean_img", "mean_vol")]),
                    ]
                )
                # fmt:on
            else:
                mean = pe.Node(interface=fsl.MeanImage(), name="mean")
                # fmt:off
                flow.connect(
                    [
                        (slc_timing, outputnode, [("slice_time_corrected_file", "functional_preproc")],),
                        (slc_timing, mean, [("slice_time_corrected_file", "in_file")]),
                        (mean, outputnode, [("out_file", "mean_vol")]),
                    ]
                )
                # fmt:on
        else:
            if self.config.motion_correction:
                # fmt:off
                flow.connect(
                    [
                        (despiking_output, mo_corr, [("despiking_output", "in_file")]),
                        (mo_corr, outputnode, [("out_file", "functional_preproc"),
                                               ("par_file", "par_file"),
                                               ("mean_img", "mean_vol")]),
                    ]
                )
                # fmt:on
            else:
                mean = pe.Node(interface=fsl.MeanImage(), name="mean")
                # fmt:off
                flow.connect(
                    [
                        (despiking_output, outputnode, [("despiking_output", "functional_preproc")]),
                        (despiking_output, mean, [("despiking_output", "in_file")]),
                        (mean, outputnode, [("out_file", "mean_vol")]),
                    ]
                )
示例#9
0
    outfields=["func", "struct"]),
                            name='DataFromOpenNeuro')
DataFromOpenNeuro.inputs.bucket = 'openfmri'
DataFromOpenNeuro.inputs.sort_filelist = True
DataFromOpenNeuro.inputs.template = '*'
DataFromOpenNeuro.inputs.anon = True
DataFromOpenNeuro.inputs.bucket_path = 'ds001/'
DataFromOpenNeuro.inputs.local_directory = '/tmp'
DataFromOpenNeuro.inputs.field_template = dict(
    func='%s/BOLD/task001_%s/bold.nii.gz',
    struct='%s/anatomy/highres001_brain.nii.gz')
DataFromOpenNeuro.inputs.template_args = dict(func=[['subj_id', 'run_num']],
                                              struct=[['subj_id']])

#Wraps command **slicetimer**
SliceTimer = pe.MapNode(interface=fsl.SliceTimer(),
                        name='SliceTimer',
                        iterfield=['in_file'])

#Wraps command **mcflirt**
MotionCorrection = pe.MapNode(interface=fsl.MCFLIRT(),
                              name='MotionCorrection',
                              iterfield=['in_file'])

#Computes the time-course SNR for a time series
TSNR = pe.MapNode(interface=confounds.TSNR(),
                  name='TSNR',
                  iterfield=['in_file'])
TSNR.inputs.regress_poly = 3

#Wraps command **fslstats**
示例#10
0
def min_func_preproc(subject, sessions, data_dir, fs_dir, wd, sink, TR,
                     EPI_resolution):

    #initiate min func preproc workflow
    wf = pe.Workflow(name='MPP')
    wf.base_dir = wd
    wf.config['execution']['crashdump_dir'] = wf.base_dir + "/crash_files"

    ## set fsl output type to nii.gz
    fsl.FSLCommand.set_default_output_type('NIFTI_GZ')

    # I/O nodes
    inputnode = pe.Node(util.IdentityInterface(fields=['subjid', 'fs_dir']),
                        name='inputnode')
    inputnode.inputs.subjid = subject
    inputnode.inputs.fs_dir = fs_dir

    ds = pe.Node(nio.DataSink(base_directory=sink, parameterization=False),
                 name='sink')

    ds.inputs.substitutions = [('moco.nii.gz.par', 'moco.par'),
                               ('moco.nii.gz_', 'moco_')]

    #infosource to interate over sessions: COND, EXT1, EXT2
    sessions_infosource = pe.Node(util.IdentityInterface(fields=['session']),
                                  name='session')
    sessions_infosource.iterables = [('session', sessions)]

    #select files
    templates = {
        'func_data': '{session}/func_data.nii.gz',
        'T1_brain': 'T1/T1_brain.nii.gz',
        'wmedge': 'T1/MASKS/aparc_aseg.WMedge.nii.gz'
    }

    selectfiles = pe.Node(nio.SelectFiles(templates, base_directory=data_dir),
                          name='selectfiles')

    wf.connect(sessions_infosource, 'session', selectfiles, 'session')
    wf.connect(sessions_infosource, 'session', ds, 'container')

    ##########################################################################
    ########################    START   ######################################
    ##########################################################################

    ###########################################################################
    ########################    No. 3   ######################################

    #change the data type to float
    fsl_float = pe.Node(fsl.maths.MathsCommand(output_datatype='float'),
                        name='fsl_float')

    wf.connect(selectfiles, 'func_data', fsl_float, 'in_file')

    ###########################################################################
    ########################    No. 4   ######################################

    #get FD from fsl_motion_outliers
    FD = pe.Node(fsl.MotionOutliers(out_file='func_data_FD_outliers.txt',
                                    out_metric_values='func_data_FD.txt',
                                    metric='fd'),
                 name='FD')

    wf.connect(fsl_float, 'out_file', FD, 'in_file')
    wf.connect(FD, 'out_metric_values', ds, 'QC.@FD')
    wf.connect(FD, 'out_file', ds, 'QC.@FDoutliers')

    ###########################################################################
    ########################    No. 5   ######################################

    #slice timing correction: sequential ascending
    slicetimer = pe.Node(
        fsl.SliceTimer(
            index_dir=False,
            interleaved=False,
            #slice_direction=3, #z direction
            time_repetition=TR,
            out_file='func_data_stc.nii.gz'),
        name='slicetimer')

    wf.connect(fsl_float, 'out_file', slicetimer, 'in_file')
    wf.connect(slicetimer, 'slice_time_corrected_file', ds, 'TEMP.@slicetimer')

    ###########################################################################
    ########################    No. 6   ######################################
    #do realignment to the middle or first volume
    mcflirt = pe.Node(fsl.MCFLIRT(save_mats=True,
                                  save_plots=True,
                                  save_rms=True,
                                  ref_vol=1,
                                  out_file='func_data_stc_moco.nii.gz'),
                      name='mcflirt')

    wf.connect(slicetimer, 'slice_time_corrected_file', mcflirt, 'in_file')
    wf.connect(mcflirt, 'out_file', ds, 'TEMP.@mcflirt')
    wf.connect(mcflirt, 'par_file', ds, 'MOCO.@par_file')
    wf.connect(mcflirt, 'rms_files', ds, 'MOCO.@rms_files')
    wf.connect(mcflirt, 'mat_file', ds, 'MOCO_MAT.@mcflirt')

    # plot motion parameters
    rotplotter = pe.Node(fsl.PlotMotionParams(in_source='fsl',
                                              plot_type='rotations',
                                              out_file='rotation.png'),
                         name='rotplotter')

    transplotter = pe.Node(fsl.PlotMotionParams(in_source='fsl',
                                                plot_type='translations',
                                                out_file='translation.png'),
                           name='transplotter')

    dispplotter = pe.Node(
        interface=fsl.PlotMotionParams(in_source='fsl',
                                       plot_type='displacement',
                                       out_file='displacement.png'),
        name='dispplotter')

    wf.connect(mcflirt, 'par_file', rotplotter, 'in_file')
    wf.connect(mcflirt, 'par_file', transplotter, 'in_file')
    wf.connect(mcflirt, 'rms_files', dispplotter, 'in_file')
    wf.connect(rotplotter, 'out_file', ds, 'PLOTS.@rotplot')
    wf.connect(transplotter, 'out_file', ds, 'PLOTS.@transplot')
    wf.connect(dispplotter, 'out_file', ds, 'PLOTS.@disppplot')

    #calculate tSNR and the mean

    moco_Tmean = pe.Node(fsl.maths.MathsCommand(args='-Tmean',
                                                out_file='moco_Tmean.nii.gz'),
                         name='moco_Tmean')

    moco_Tstd = pe.Node(fsl.maths.MathsCommand(args='-Tstd',
                                               out_file='moco_Tstd.nii.gz'),
                        name='moco_Tstd')

    tSNR0 = pe.Node(fsl.maths.MultiImageMaths(op_string='-div %s',
                                              out_file='moco_tSNR.nii.gz'),
                    name='moco_tSNR')

    wf.connect(mcflirt, 'out_file', moco_Tmean, 'in_file')
    wf.connect(mcflirt, 'out_file', moco_Tstd, 'in_file')
    wf.connect(moco_Tmean, 'out_file', tSNR0, 'in_file')
    wf.connect(moco_Tstd, 'out_file', tSNR0, 'operand_files')
    wf.connect(moco_Tmean, 'out_file', ds, 'TEMP.@moco_Tmean')
    wf.connect(moco_Tstd, 'out_file', ds, 'TEMP.@moco_Tstd')
    wf.connect(tSNR0, 'out_file', ds, 'TEMP.@moco_Tsnr')

    ###########################################################################
    ########################    No. 7   ######################################

    #bias field correction of mean epi for better coregistration
    bias = pe.Node(
        fsl.FAST(
            img_type=2,
            #restored_image='epi_Tmeanrestored.nii.gz',
            output_biascorrected=True,
            out_basename='moco_Tmean',
            no_pve=True,
            probability_maps=False),
        name='bias')

    wf.connect(moco_Tmean, 'out_file', bias, 'in_files')
    wf.connect(bias, 'restored_image', ds, 'TEMP.@restored_image')

    #co-registration to anat using FS BBregister and mean EPI
    bbregister = pe.Node(fs.BBRegister(
        subject_id=subject,
        subjects_dir=fs_dir,
        contrast_type='t2',
        init='fsl',
        out_fsl_file='func2anat.mat',
        out_reg_file='func2anat.dat',
        registered_file='moco_Tmean_restored2anat.nii.gz',
        epi_mask=True),
                         name='bbregister')

    wf.connect(bias, 'restored_image', bbregister, 'source_file')
    wf.connect(bbregister, 'registered_file', ds, 'TEMP.@registered_file')
    wf.connect(bbregister, 'out_fsl_file', ds, 'COREG.@out_fsl_file')
    wf.connect(bbregister, 'out_reg_file', ds, 'COREG.@out_reg_file')
    wf.connect(bbregister, 'min_cost_file', ds, 'COREG.@min_cost_file')

    #inverse func2anat mat
    inverseXFM = pe.Node(fsl.ConvertXFM(invert_xfm=True,
                                        out_file='anat2func.mat'),
                         name='inverseXFM')

    wf.connect(bbregister, 'out_fsl_file', inverseXFM, 'in_file')
    wf.connect(inverseXFM, 'out_file', ds, 'COREG.@out_fsl_file_inv')

    #plot the corregistration quality
    slicer = pe.Node(fsl.Slicer(middle_slices=True, out_file='func2anat.png'),
                     name='slicer')

    wf.connect(selectfiles, 'wmedge', slicer, 'image_edges')
    wf.connect(bbregister, 'registered_file', slicer, 'in_file')
    wf.connect(slicer, 'out_file', ds, 'PLOTS.@func2anat')

    ###########################################################################
    ########################    No. 8   ######################################
    #MOCO and COREGISTRATION

    #resample T1 to EPI resolution to use it as a reference image
    resample_T1 = pe.Node(
        fsl.FLIRT(datatype='float',
                  apply_isoxfm=EPI_resolution,
                  out_file='T1_brain_EPI.nii.gz'),
        #interp='nearestneighbour'),keep spline so it looks nicer
        name='resample_T1')

    wf.connect(selectfiles, 'T1_brain', resample_T1, 'in_file')
    wf.connect(selectfiles, 'T1_brain', resample_T1, 'reference')
    wf.connect(resample_T1, 'out_file', ds, 'COREG.@resample_T1')

    #concate matrices (moco and func2anat) volume-wise
    concat_xfm = pe.MapNode(fsl.ConvertXFM(concat_xfm=True),
                            iterfield=['in_file'],
                            name='concat_xfm')

    wf.connect(mcflirt, 'mat_file', concat_xfm, 'in_file')
    wf.connect(bbregister, 'out_fsl_file', concat_xfm, 'in_file2')
    wf.connect(concat_xfm, 'out_file', ds, 'MOCO2ANAT_MAT.@concat_out')

    #split func_data
    split = pe.Node(fsl.Split(dimension='t'), name='split')

    wf.connect(slicetimer, 'slice_time_corrected_file', split, 'in_file')

    #motion correction and corregistration in one interpolation step
    flirt = pe.MapNode(fsl.FLIRT(apply_xfm=True,
                                 interp='spline',
                                 datatype='float'),
                       iterfield=['in_file', 'in_matrix_file'],
                       name='flirt')

    wf.connect(split, 'out_files', flirt, 'in_file')
    wf.connect(resample_T1, 'out_file', flirt, 'reference')
    wf.connect(concat_xfm, 'out_file', flirt, 'in_matrix_file')

    #merge the files to have 4d dataset motion corrected and co-registerd to T1
    merge = pe.Node(fsl.Merge(dimension='t',
                              merged_file='func_data_stc_moco2anat.nii.gz'),
                    name='merge')

    wf.connect(flirt, 'out_file', merge, 'in_files')
    wf.connect(merge, 'merged_file', ds, 'TEMP.@merged')

    ###########################################################################
    ########################    No. 9   ######################################

    #run BET on co-registered EPI in 1mm and get the mask
    bet = pe.Node(fsl.BET(mask=True,
                          functional=True,
                          out_file='moco_Tmean_restored2anat_BET.nii.gz'),
                  name='bet')

    wf.connect(bbregister, 'registered_file', bet, 'in_file')
    wf.connect(bet, 'out_file', ds, 'TEMP.@func_data_example')
    wf.connect(bet, 'mask_file', ds, 'TEMP.@func_data_mask')

    #resample BET mask to EPI resolution
    resample_mask = pe.Node(fsl.FLIRT(
        datatype='int',
        apply_isoxfm=EPI_resolution,
        interp='nearestneighbour',
        out_file='prefiltered_func_data_mask.nii.gz'),
                            name='resample_mask')

    wf.connect(bet, 'mask_file', resample_mask, 'in_file')
    wf.connect(resample_T1, 'out_file', resample_mask, 'reference')
    wf.connect(resample_mask, 'out_file', ds, '@mask')

    #apply the mask to 4D data to get rid of the "eyes and the rest"
    mask4D = pe.Node(fsl.maths.ApplyMask(), name='mask')

    wf.connect(merge, 'merged_file', mask4D, 'in_file')
    wf.connect(resample_mask, 'out_file', mask4D, 'mask_file')

    ###########################################################################
    ########################    No. 10   ######################################

    #get the values necessary for intensity normalization
    median = pe.Node(fsl.utils.ImageStats(op_string='-k %s -p 50'),
                     name='median')

    wf.connect(resample_mask, 'out_file', median, 'mask_file')
    wf.connect(mask4D, 'out_file', median, 'in_file')

    #compute the scaling factor
    def get_factor(val):

        factor = 10000 / val
        return factor

    get_scaling_factor = pe.Node(util.Function(input_names=['val'],
                                               output_names=['out_val'],
                                               function=get_factor),
                                 name='scaling_factor')

    #normalize the 4D func data with one scaling factor
    multiplication = pe.Node(fsl.maths.BinaryMaths(
        operation='mul', out_file='prefiltered_func_data.nii.gz'),
                             name='multiplication')

    wf.connect(median, 'out_stat', get_scaling_factor, 'val')
    wf.connect(get_scaling_factor, 'out_val', multiplication, 'operand_value')
    wf.connect(mask4D, 'out_file', multiplication, 'in_file')
    wf.connect(multiplication, 'out_file', ds, '@prefiltered_func_data')

    ###########################################################################
    ########################    No. 11   ######################################

    #calculate tSNR and the mean of the new prefiltered and detrend dataset

    tsnr_detrend = pe.Node(misc.TSNR(
        regress_poly=1,
        detrended_file='prefiltered_func_data_detrend.nii.gz',
        mean_file='prefiltered_func_data_detrend_Tmean.nii.gz',
        tsnr_file='prefiltered_func_data_detrend_tSNR.nii.gz'),
                           name='tsnr_detrend')

    wf.connect(multiplication, 'out_file', tsnr_detrend, 'in_file')
    wf.connect(tsnr_detrend, 'tsnr_file', ds, 'QC.@tsnr_detrend')
    wf.connect(tsnr_detrend, 'mean_file', ds, 'QC.@detrend_mean_file')
    wf.connect(tsnr_detrend, 'detrended_file', ds, '@detrend_file')

    #resample the EPI mask to original EPI dimensions
    convert2func = pe.Node(fsl.FLIRT(apply_xfm=True,
                                     interp='nearestneighbour',
                                     out_file='func_data_mask2func.nii.gz'),
                           name='conver2func')

    wf.connect(resample_mask, 'out_file', convert2func, 'in_file')
    wf.connect(bias, 'restored_image', convert2func, 'reference')
    wf.connect(inverseXFM, 'out_file', convert2func, 'in_matrix_file')
    wf.connect(convert2func, 'out_file', ds, 'QC.@inv')

    ###########################################################################
    ########################    RUN   ######################################
    wf.write_graph(dotfilename='wf.dot',
                   graph2use='colored',
                   format='pdf',
                   simple_form=True)
    wf.run(plugin='MultiProc', plugin_args={'n_procs': 2})
    #wf.run()
    return
示例#11
0
文件: preproc.py 项目: sgagnon/lyman
def create_realignment_workflow(name="realignment",
                                temporal_interp=True,
                                TR=2,
                                slice_order="up",
                                interleaved=True):
    """Motion and slice-time correct the timeseries and summarize."""
    inputnode = Node(IdentityInterface(["timeseries"]), "inputs")

    # Get the middle volume of each run for motion correction
    extractref = MapNode(ExtractRealignmentTarget(), "in_file", "extractref")

    # Motion correct to middle volume of each run
    mcflirt = MapNode(
        fsl.MCFLIRT(cost="normcorr",
                    interpolation="spline",
                    save_mats=True,
                    save_rms=True,
                    save_plots=True), ["in_file", "ref_file"], "mcflirt")

    # Optionally emoporally interpolate to correct for slice time differences
    if temporal_interp:
        slicetime = MapNode(fsl.SliceTimer(time_repetition=TR), "in_file",
                            "slicetime")

        if slice_order == "down":
            slicetime.inputs.index_dir = True
        elif slice_order != "up":
            raise ValueError("slice_order must be 'up' or 'down'")

        if interleaved:
            slicetime.inputs.interleaved = True

    # Generate a report on the motion correction
    mcreport = MapNode(RealignmentReport(),
                       ["target_file", "realign_params", "displace_params"],
                       "mcreport")

    # Define the outputs
    outputnode = Node(
        IdentityInterface(
            ["timeseries", "example_func", "report", "motion_file"]),
        "outputs")

    # Define and connect the sub workflow
    realignment = Workflow(name)

    realignment.connect([
        (inputnode, extractref, [("timeseries", "in_file")]),
        (inputnode, mcflirt, [("timeseries", "in_file")]),
        (extractref, mcflirt, [("out_file", "ref_file")]),
        (extractref, mcreport, [("out_file", "target_file")]),
        (mcflirt, mcreport, [("par_file", "realign_params"),
                             ("rms_files", "displace_params")]),
        (extractref, outputnode, [("out_file", "example_func")]),
        (mcreport, outputnode, [("realign_report", "report"),
                                ("motion_file", "motion_file")]),
    ])

    if temporal_interp:
        realignment.connect([(mcflirt, slicetime, [("out_file", "in_file")]),
                             (slicetime, outputnode,
                              [("slice_time_corrected_file", "timeseries")])])
    else:
        realignment.connect([(mcflirt, outputnode, [("out_file", "timeseries")
                                                    ])])

    return realignment
示例#12
0
def diagnose(measurements_base,
	functional_scan_types=[],
	structural_scan_types=[],
	sessions=[],
	subjects=[],
	measurements=[],
	exclude_subjects=[],
	exclude_measurements=[],
	actual_size=False,
	components=None,
	keep_work=False,
	loud=False,
	n_procs=N_PROCS,
	realign="time",
	tr=1,
	workflow_name="diagnostic",
	):
	'''

	realign: {"space","time","spacetime",""}
		Parameter that dictates slictiming correction and realignment of slices. "time" (FSL.SliceTimer) is default, since it works safely. Use others only with caution!

	'''
	
	measurements_base = path.abspath(path.expanduser(measurements_base))

	#select all functional/sturctural scan types unless specified
	if not functional_scan_types or not structural_scan_types:
		scan_classification = pd.read_csv(scan_classification_file_path)
		if not functional_scan_types:
			functional_scan_types = list(scan_classification[(scan_classification["categories"] == "functional")]["scan_type"])
		if not structural_scan_types:
			structural_scan_types = list(scan_classification[(scan_classification["categories"] == "structural")]["scan_type"])

	#hack to allow structural scan type disabling:
	if structural_scan_types == ["none"]:
		structural_scan_types = []

	# define measurement directories to be processed, and populate the list either with the given include_measurements, or with an intelligent selection
	scan_types = deepcopy(functional_scan_types)
	scan_types.extend(structural_scan_types)
	data_selection=get_data_selection(measurements_base, sessions, scan_types=scan_types, subjects=subjects, exclude_subjects=exclude_subjects, measurements=measurements, exclude_measurements=exclude_measurements)
	if not subjects:
		subjects = set(list(data_selection["subject"]))
	if not sessions:
		sessions = set(list(data_selection["session"]))

	# here we start to define the nipype workflow elements (nodes, connectons, meta)
	subjects_sessions = data_selection[["subject","session"]].drop_duplicates().values.tolist()
	infosource = pe.Node(interface=util.IdentityInterface(fields=['subject_session']), name="infosource")
	infosource.iterables = [('subject_session', subjects_sessions)]

	get_f_scan = pe.Node(name='get_f_scan', interface=util.Function(function=get_scan,input_names=inspect.getargspec(get_scan)[0], output_names=['scan_path','scan_type']))
	get_f_scan.inputs.data_selection = data_selection
	get_f_scan.inputs.measurements_base = measurements_base
	get_f_scan.iterables = ("scan_type", functional_scan_types)

	f_bru2nii = pe.Node(interface=bru2nii.Bru2(), name="f_bru2nii")
	f_bru2nii.inputs.actual_size=actual_size

	dummy_scans = pe.Node(name='dummy_scans', interface=util.Function(function=force_dummy_scans,input_names=inspect.getargspec(force_dummy_scans)[0], output_names=['out_file']))
	dummy_scans.inputs.desired_dummy_scans = 10

	bids_filename = pe.Node(name='bids_filename', interface=util.Function(function=sss_filename,input_names=inspect.getargspec(sss_filename)[0], output_names=['filename']))
	bids_filename.inputs.suffix = "MELODIC"
	bids_filename.inputs.extension = ""

	datasink = pe.Node(nio.DataSink(), name='datasink')
	datasink.inputs.base_directory = path.join(measurements_base,workflow_name)
	datasink.inputs.parameterization = False

	melodic = pe.Node(interface=fsl.model.MELODIC(), name="melodic")
	melodic.inputs.tr_sec = tr
	melodic.inputs.report = True
	if components:
		melodic.inputs.dim = int(components)

	workflow_connections = [
		(infosource, get_f_scan, [('subject_session', 'selector')]),
		(get_f_scan, f_bru2nii, [('scan_path', 'input_dir')]),
		(f_bru2nii, dummy_scans, [('nii_file', 'in_file')]),
		(get_f_scan, dummy_scans, [('scan_path', 'scan_dir')]),
		(infosource, datasink, [(('subject_session',ss_to_path), 'container')]),
		(infosource, bids_filename, [('subject_session', 'subject_session')]),
		(get_f_scan, bids_filename, [('scan_type', 'scan')]),
		(bids_filename, melodic, [('filename', 'out_dir')]),
		(melodic, datasink, [('out_dir', 'func')]),
		]

	#ADDING SELECTABLE NODES AND EXTENDING WORKFLOW AS APPROPRIATE:
	if structural_scan_types:
		get_s_scan = pe.Node(name='get_s_scan', interface=util.Function(function=get_scan,input_names=inspect.getargspec(get_scan)[0], output_names=['scan_path','scan_type']))
		get_s_scan.inputs.data_selection = data_selection
		get_s_scan.inputs.measurements_base = measurements_base
		get_s_scan.iterables = ("scan_type", structural_scan_types)

		s_bru2nii = pe.Node(interface=bru2nii.Bru2(), name="s_bru2nii")
		s_bru2nii.inputs.force_conversion=True
		s_bru2nii.inputs.actual_size=actual_size

		s_bids_filename = pe.Node(name='s_bids_filename', interface=util.Function(function=sss_filename,input_names=inspect.getargspec(sss_filename)[0], output_names=['filename']))
		s_bids_filename.inputs.extension = ""
		s_bids_filename.inputs.scan_prefix = False

		workflow_connections.extend([
			(infosource, get_s_scan, [('subject_session', 'selector')]),
			(infosource, s_bids_filename, [('subject_session', 'subject_session')]),
			(get_s_scan, s_bru2nii, [('scan_path','input_dir')]),
			(get_s_scan, s_bids_filename, [('scan_type', 'scan')]),
			(s_bids_filename, s_bru2nii, [('filename','output_filename')]),
			(s_bru2nii, datasink, [('nii_file', 'anat')]),
			])

	if realign == "space":
		realigner = pe.Node(interface=spm.Realign(), name="realigner")
		realigner.inputs.register_to_mean = True
		workflow_connections.extend([
			(dummy_scans, realigner, [('out_file', 'in_file')]),
			(realigner, melodic, [('out_file', 'in_files')]),
			])

	elif realign == "spacetime":
		realigner = pe.Node(interface=nipy.SpaceTimeRealigner(), name="realigner")
		realigner.inputs.slice_times = "asc_alt_2"
		realigner.inputs.tr = tr
		realigner.inputs.slice_info = 3 #3 for coronal slices (2 for horizontal, 1 for sagittal)
		workflow_connections.extend([
			(dummy_scans, realigner, [('out_file', 'in_file')]),
			(realigner, melodic, [('out_file', 'in_files')]),
			])
	
	elif realign == "time":
		realigner = pe.Node(interface=fsl.SliceTimer(), name="slicetimer")
		realigner.inputs.time_repetition = tr
		workflow_connections.extend([
			(dummy_scans, realigner, [('out_file', 'in_file')]),
			(realigner, melodic, [('slice_time_corrected_file', 'in_files')]),
			])
	else:
		workflow_connections.extend([
			(dummy_scans, melodic, [('out_file', 'in_files')]),
			])

	workdir_name = workflow_name+"_work"
	workflow = pe.Workflow(name=workdir_name)
	workflow.connect(workflow_connections)
	workflow.base_dir = path.join(measurements_base)
	workflow.write_graph(dotfilename=path.join(workflow.base_dir,workdir_name,"graph.dot"), graph2use="hierarchical", format="png")
	if not loud:
		try:
			workflow.run(plugin="MultiProc",  plugin_args={'n_procs' : n_procs})
		except RuntimeError:
			print("WARNING: Some expected scans have not been found (or another TypeError has occured).")
		for f in listdir(getcwd()):
			if re.search("crash.*?get_s_scan|get_f_scan.*?pklz", f):
				remove(path.join(getcwd(), f))
	else:
		workflow.run(plugin="MultiProc",  plugin_args={'n_procs' : n_procs})
	if not keep_work:
		shutil.rmtree(path.join(workflow.base_dir,workdir_name))
示例#13
0
	########## 7: Motion Correction
	mcflt = Node(fsl.MCFLIRT(
			in_file = '%s/rest_delvol_bet.nii.gz'%(output_dir),
			cost = 'mutualinfo',
			out_file = '%s/rest_delvol_bet_mc.nii.gz'%(output_dir),
			save_plots = True,
			save_rms = False,
			output_type = 'NIFTI_GZ'
			), name = 'mcflt')
	res3 = mcflt.run()

	########## 8: Slice Time Correction
	tshiftfsl = Node(fsl.SliceTimer(
			in_file = '%s/rest_delvol_bet_mc.nii.gz'%(output_dir),
			out_file = '%s/rest_delvol_bet_mc_tshift.nii.gz'%(output_dir),
			interleaved = interleaved_val,
			slice_direction = slice_direction_val,
			time_repetition = settr_num,
			index_dir = index_dir_val
			), name = 'tshiftfsl')
	res5 = tshiftfsl.run()

	########## 9: Coregistration, Normalization, Smoothing
	data4d = '%s/rest_delvol_bet_mc_tshift.nii.gz'%(output_dir)
	data3d = '%s/mprage_inu_bet'%(output_dir)
	outpath = '%s/reg'%(output_dir)

	settotvol = str(nib.load('%s/rest_delvol_bet_mc_tshift.nii.gz'%(output_dir)).get_fdata().shape[3])
	replacements = {'OUTPUTSUBNUM': outpath,
					'DATA3D': data3d,
					'DATA4D': data4d,
					'SETTR': settr,
示例#14
0
def create_resting_preproc(name='restpreproc'):
    """Create a "resting" time series preprocessing workflow

    The noise removal is based on Behzadi et al. (2007)

    Parameters
    ----------

    name : name of workflow (default: restpreproc)

    Inputs::

        inputspec.func : functional run (filename or list of filenames)

    Outputs::

        outputspec.noise_mask_file : voxels used for PCA to derive noise components
        outputspec.filtered_file : bandpass filtered and noise-reduced time series

    Example
    -------

    >>> TR = 3.0
    >>> wf = create_resting_preproc()
    >>> wf.inputs.inputspec.func = 'f3.nii'
    >>> wf.inputs.inputspec.num_noise_components = 6
    >>> wf.inputs.inputspec.highpass_sigma = 100/(2*TR)
    >>> wf.inputs.inputspec.lowpass_sigma = 12.5/(2*TR)
    >>> wf.run() # doctest: +SKIP

    """

    restpreproc = pe.Workflow(name=name)

    # Define nodes
    inputnode = pe.Node(interface=util.IdentityInterface(fields=[
        'func', 'num_noise_components', 'highpass_sigma', 'lowpass_sigma'
    ]),
                        name='inputspec')
    outputnode = pe.Node(interface=util.IdentityInterface(fields=[
        'noise_mask_file', 'filtered_file', 'motion_rms_files',
        'motion_par_file', 'realigned_file', 'mask_file', 'outlier_files',
        'intensity_files', 'outlier_plots'
    ]),
                         name='outputspec')
    slicetimer = pe.Node(fsl.SliceTimer(), name='slicetimer')
    realigner = create_realign_flow()

    art_detector = pe.Node(ArtifactDetect(), name='art_detector')
    art_detector.inputs.parameter_source = 'FSL'
    art_detector.inputs.mask_type = 'spm_global'
    art_detector.inputs.global_threshold = .5
    art_detector.inputs.norm_threshold = .6
    art_detector.inputs.use_differences = [True,
                                           True]  ## [Movement, Intensity]
    art_detector.inputs.zintensity_threshold = 3
    art_detector.inputs.intersect_mask = True
    '''Mask smoother node, added by Pablo Polosecki to use EPI mask'''
    mask_smoother = pe.Node(util.Function(input_names=['vol_in'],
                                          output_names=['out_vol'],
                                          function=morph_open_close),
                            name='mask_smoother')
    tsnr = pe.Node(TSNR(regress_poly=2), name='tsnr')
    getthresh = pe.Node(interface=fsl.ImageStats(op_string='-k %s -p 98'),
                        name='getthreshold')
    threshold_stddev = pe.Node(fsl.Threshold(), name='threshold')
    ''' Mask conjunction, to limit noisy voxels to those inside brain mask'''
    conj_masker = pe.Node(fsl.BinaryMaths(operation='mul'), name='conj_masker')

    compcor = pe.Node(util.Function(
        input_names=['realigned_file', 'noise_mask_file', 'num_components'],
        output_names=['noise_components'],
        function=extract_noise_components),
                      name='compcorr')
    #   cat_regressors = pe.Node(util.Function(input_names=['file1',
    #                                                       'file2'],
    #                                          output_names=['out_fn'],
    #                                          function=concatetante_reg_files),
    #                            name='cat_regressors')
    remove_noise = pe.Node(fsl.FilterRegressor(filter_all=True),
                           name='remove_noise')
    bandpass_filter = pe.Node(fsl.TemporalFilter(), name='bandpass_filter')

    # Define connections
    restpreproc.connect(inputnode, 'func', slicetimer, 'in_file')
    restpreproc.connect(slicetimer, 'slice_time_corrected_file', realigner,
                        'inputspec.func')
    restpreproc.connect(realigner, 'outputspec.realigned_file', tsnr,
                        'in_file')
    restpreproc.connect(tsnr, 'stddev_file', threshold_stddev, 'in_file')
    restpreproc.connect(tsnr, 'stddev_file', getthresh, 'in_file')
    restpreproc.connect(mask_smoother, 'out_vol', getthresh, 'mask_file')
    restpreproc.connect(getthresh, 'out_stat', threshold_stddev, 'thresh')
    restpreproc.connect(realigner, 'outputspec.realigned_file', compcor,
                        'realigned_file')
    restpreproc.connect(inputnode, 'num_noise_components', compcor,
                        'num_components')
    restpreproc.connect(tsnr, 'detrended_file', remove_noise, 'in_file')
    # Combiinng compcorr with motion regressors:
    #restpreproc.connect(compcor, 'noise_components',
    #                    cat_regressors, 'file1')
    #restpreproc.connect(realigner, 'outputspec.par_file',
    #                    cat_regressors, 'file2')
    #restpreproc.connect(cat_regressors, 'out_fn',
    #                    remove_noise, 'design_file')
    restpreproc.connect(compcor, 'noise_components', remove_noise,
                        'design_file')
    restpreproc.connect(inputnode, 'highpass_sigma', bandpass_filter,
                        'highpass_sigma')
    restpreproc.connect(inputnode, 'lowpass_sigma', bandpass_filter,
                        'lowpass_sigma')
    restpreproc.connect(remove_noise, 'out_file', bandpass_filter, 'in_file')
    restpreproc.connect(conj_masker, 'out_file', outputnode, 'noise_mask_file')
    restpreproc.connect(bandpass_filter, 'out_file', outputnode,
                        'filtered_file')
    restpreproc.connect(realigner, 'outputspec.rms_files', outputnode,
                        'motion_rms_files')
    restpreproc.connect(realigner, 'outputspec.par_file', outputnode,
                        'motion_par_file')
    restpreproc.connect(realigner, 'outputspec.realigned_file', outputnode,
                        'realigned_file')
    restpreproc.connect(realigner, 'outputspec.realigned_file', art_detector,
                        'realigned_files')
    restpreproc.connect(realigner, 'outputspec.par_file', art_detector,
                        'realignment_parameters')
    restpreproc.connect(art_detector, 'mask_files', mask_smoother, 'vol_in')
    restpreproc.connect(mask_smoother, 'out_vol', outputnode, 'mask_file')
    restpreproc.connect(art_detector, 'outlier_files', outputnode,
                        'outlier_files')
    restpreproc.connect(art_detector, 'intensity_files', outputnode,
                        'intensity_files')
    #restpreproc.connect(art_detector, 'plot_files',
    #                    outputnode, 'outlier_plots')
    restpreproc.connect(mask_smoother, 'out_vol', conj_masker, 'in_file')
    restpreproc.connect(threshold_stddev, 'out_file', conj_masker,
                        'operand_file')
    restpreproc.connect(conj_masker, 'out_file', compcor, 'noise_mask_file')
    return restpreproc
示例#15
0
    def __init__(self, func_source, struct_source, datasink):
        # specify input and output nodes
        self.func_source = func_source
        self.struct_source = struct_source
        self.datasink = datasink

        # specify nodes
        # structual process
        self.bet_struct = pe.Node(interface=fsl.BET(),
                                  name='non_brain_removal_BET_struct')
        self.bet_struct.inputs.output_type = "NIFTI"

        # functional process
        self.slice_timer = pe.Node(interface=fsl.SliceTimer(),
                                   name='time_slice_correction')

        self.mcflirt = pe.Node(interface=fsl.MCFLIRT(),
                               name='motion_correction')
        self.mcflirt.inputs.output_type = "NIFTI"
        self.mcflirt.inputs.mean_vol = True

        self.fslsplit = pe.Node(interface=fsl.Split(), name='fslsplit')
        self.fslsplit.inputs.dimension = 't'
        self.fslsplit.inputs.output_type = "NIFTI"

        self.fslmerge = pe.Node(interface=fsl.Merge(), name='fslmerge')
        self.fslmerge.inputs.dimension = 't'
        self.fslmerge.inputs.output_type = "NIFTI"

        self.bet_mean = pe.Node(interface=fsl.BET(),
                                name='non_brain_removal_BET_mean')
        self.bet_mean.inputs.output_type = "NIFTI"

        # helper function(s)
        def bet_each(in_files):
            '''
            @param in_files: list of image files
            @return out_files: list of image files after applied fsl.BET on it
            '''
            from nipype.interfaces import fsl
            import nipype.pipeline.engine as pe

            out_files = list()
            step_no = 0
            for file_ in in_files:
                bet = pe.Node(interface=fsl.BET(),
                              name='BET_for_step_{}'.format(step_no))
                bet.inputs.in_file = file_
                bet.inputs.out_file = file_[:len(file_) - 4] + '_bet.nii'
                bet.inputs.output_type = "NIFTI"

                bet.run()
                out_files.append(bet.inputs.out_file)

                step_no += 1
            return out_files

        # bet_func return a list of NIFITI files
        self.bet_func = pe.Node(interface=Function(input_names=['in_files'],
                                                   output_names=['out_files'],
                                                   function=bet_each),
                                name='non_brain_removal_BET_func')

        self.coregister = pe.Node(interface=spm.Coregister(),
                                  name="coregister")
        self.coregister.inputs.jobtype = 'estimate'

        self.segment = pe.Node(interface=spm.Segment(), name="segment")
        self.segment.inputs.affine_regularization = 'mni'

        self.normalize_func = pe.Node(interface=spm.Normalize(),
                                      name="normalize_func")
        self.normalize_func.inputs.jobtype = "write"

        # self.fourier = pe.Node(interface=afni.Fourier(), name='temporal_filtering')
        # self.fourier.inputs.highpass = 0.01
        # self.fourier.inputs.lowpass = 0.1

        self.smooth = pe.Node(interface=spm.Smooth(), name="smooth")
        self.smooth.inputs.fwhm = [8, 8, 8]

        # specify workflow instance
        self.workflow = pe.Workflow(name='preprocessing_workflow')

        # connect nodes
        self.workflow.connect([
            (self.struct_source, self.bet_struct, [('outfiles', 'in_file')]),
            (self.func_source, self.slice_timer, [('outfiles', 'in_file')]),
            (self.slice_timer, self.mcflirt, [('slice_time_corrected_file',
                                               'in_file')]),
            (self.mcflirt, self.bet_mean, [('mean_img', 'in_file')]),
            (self.mcflirt, self.fslsplit, [('out_file', 'in_file')]),
            (self.fslsplit, self.bet_func, [('out_files', 'in_files')]),
            (self.bet_func, self.fslmerge, [('out_files', 'in_files')
                                            ]),  # intersect
            (self.bet_struct, self.coregister, [('out_file', 'source')]),
            (self.bet_mean, self.coregister, [('out_file', 'target')]),
            (self.coregister, self.segment, [('coregistered_source', 'data')]),
            (self.segment, self.normalize_func, [('transformation_mat',
                                                  'parameter_file')]),
            (self.fslmerge, self.normalize_func, [('merged_file',
                                                   'apply_to_files')]),
            (self.normalize_func, self.smooth, [('normalized_files',
                                                 'in_files')]),
            (self.coregister, self.datasink, [('coregistered_source',
                                               'registered_file')]),
            (self.normalize_func, self.datasink, [('normalized_files',
                                                   'before_smooth')]),
            (self.smooth, self.datasink, [('smoothed_files', 'final_out')])
        ])
示例#16
0
def create_resting_workflow(name="resting_state"):
    """Return a preprocessing workflow.

    Input spec node takes these three inputs:
        - Timeseries (image files)
        - FWHM of smoothing kernel for (in mms)
        - FNIRT warp coefficient image
        - Freesurfer Subject ID
 
    Output node returns these files:
        - Smoothed timeseries (fully preprocessed and smoothed timeseries in native space)
        - Unsmoothed timeseries (identical steps except no smoothing in the volume)
        - Example func (target volume for MCFLIRT realignment)
        - Mean func (unsmoothed mean functional)
        - Funcational mask (binary dilated brainmask in functional space)
        - Realignment parameters (text files from MCFLIRT)
        - Outlier Files (outlier text files from ART)
        - Plotted estimated rotations from MCFLIRT
        - Plotted estimated translastion from MCFLIRT
        - Plotted estimated relative and absolute displacement from MCFLIRT
        - Plotted global mean intensity value
        - Sliced png of the example func (MCFLIRT target)
        - Sliced png of the unsmoothed mean functional volume
        - Tkregister-style affine matrix
        - FSL-style affine matrix
        - Sliced png summarizing the functional to anatomical transform
        - Optimization cost file quantitatively summarizing the transformation

    """
    resting = pe.Workflow(name=name)

    # Define the inputs for the preprocessing workflow
    inputnode = pe.Node(util.IdentityInterface(
        fields=["timeseries", "subject_id", "warpfield", "smooth_fwhm"]),
                        name="inputspec")

    # Remove the first two frames to account for T1 stabalization
    trimmer = pe.MapNode(fsl.ExtractROI(t_min=6),
                         iterfield=["in_file"],
                         name="trimmer")

    # Convert functional images to float representation
    img2float = pe.MapNode(fsl.ChangeDataType(output_datatype="float"),
                           iterfield=["in_file"],
                           name="img2float")

    # Perform slice-timing correction
    slicetime = pe.MapNode(fsl.SliceTimer(interleaved=True, time_repetition=6),
                           iterfield=["in_file"],
                           name="slicetime")

    # Motion correct
    realign = create_realignment_workflow()

    skullstrip = create_skullstrip_workflow()

    art = create_art_workflow(make_movie=False)

    func2anat = create_bbregister_workflow()

    confounds = create_confound_removal_workflow()

    susan = create_susan_smooth()

    normalize = create_normalize_workflow()

    tosurf = create_surface_projection_workflow()

    rename = pe.MapNode(util.Rename(format_string="timeseries", keep_ext=True),
                        iterfield=["in_file"],
                        name="rename")

    resting.connect([
        (inputnode, trimmer, [("timeseries", "in_file"),
                              (("timeseries", get_trimmed_length), "t_size")]),
        (trimmer, img2float, [("roi_file", "in_file")]),
        (img2float, slicetime, [("out_file", "in_file")]),
        (slicetime, realign, [("slice_time_corrected_file",
                               "inputs.timeseries")]),
        (realign, skullstrip, [("outputs.timeseries", "inputs.timeseries")]),
        (realign, art, [("outputs.realign_parameters",
                         "inputs.realignment_parameters")]),
        (img2float, art, [("out_file", "inputs.raw_timeseries")]),
        (skullstrip, art, [("outputs.timeseries",
                            "inputs.realigned_timeseries"),
                           ("outputs.mask_file", "inputs.mask_file")]),
        (skullstrip, func2anat, [("outputs.mean_func", "inputs.source_file")]),
        (inputnode, func2anat, [("subject_id", "inputs.subject_id")]),
        (inputnode, confounds, [("subject_id", "inputs.subject_id")]),
        (skullstrip, confounds, [("outputs.timeseries", "inputs.timeseries")]),
        (realign, confounds, [("outputs.realign_parameters",
                               "inputs.motion_parameters")]),
        (func2anat, confounds, [("outputs.tkreg_mat", "inputs.reg_file")]),
        (confounds, susan, [("outputs.timeseries", "inputnode.in_files")]),
        (skullstrip, susan, [("outputs.mask_file", "inputnode.mask_file")]),
        (inputnode, susan, [("smooth_fwhm", "inputnode.fwhm")]),
        (susan, rename, [("outputnode.smoothed_files", "in_file")]),
        (susan, normalize, [("outputnode.smoothed_files", "inputs.timeseries")
                            ]),
        (inputnode, normalize, [("warpfield", "inputs.warpfield")]),
        (func2anat, normalize, [("outputs.flirt_mat", "inputs.flirt_affine")]),
        (confounds, tosurf, [("outputs.timeseries", "inputs.timeseries")]),
        (func2anat, tosurf, [("outputs.tkreg_mat", "inputs.tkreg_affine")]),
        (inputnode, tosurf, [("subject_id", "inputs.subject_id"),
                             ("smooth_fwhm", "inputs.smooth_fwhm")]),
    ])

    # Define the outputs of the top-level workflow
    output_fields = [
        "volume_timeseries", "surface_timeseries", "native_timeseries",
        "example_func", "mean_func", "functional_mask", "realign_parameters",
        "mean_func_slices", "intensity_plot", "outlier_volumes",
        "realign_report", "flirt_affine", "tkreg_affine", "coreg_report",
        "confound_sources"
    ]

    outputnode = pe.Node(util.IdentityInterface(fields=output_fields),
                         name="outputspec")

    resting.connect([
        (realign, outputnode, [("outputs.realign_report", "realign_report"),
                               ("outputs.realign_parameters",
                                "realign_parameters"),
                               ("outputs.example_func", "example_func")]),
        (skullstrip, outputnode, [("outputs.mean_func", "mean_func"),
                                  ("outputs.mask_file", "functional_mask"),
                                  ("outputs.report_png", "mean_func_slices")]),
        (art, outputnode, [("outputs.intensity_plot", "intensity_plot"),
                           ("outputs.outlier_volumes", "outlier_volumes")]),
        (func2anat, outputnode, [("outputs.tkreg_mat", "tkreg_affine"),
                                 ("outputs.flirt_mat", "flirt_affine"),
                                 ("outputs.report", "coreg_report")]),
        (confounds, outputnode, [("outputs.confound_sources",
                                  "confound_sources")]),
        (tosurf, outputnode, [("outputs.timeseries", "surface_timeseries")]),
        (normalize, outputnode, [("outputs.timeseries", "volume_timeseries")]),
        (rename, outputnode, [("out_file", "native_timeseries")]),
    ])

    return resting, inputnode, outputnode
示例#17
0
#funcpreproc Nodes

func_calc = pe.MapNode(interface=e_afni.Threedcalc(),
                       name='func_calc',
                       iterfield=["infile_a", "stop_idx"])
func_calc.inputs.start_idx = 0
func_calc.inputs.expr = '\'a\''
func_calc.inputs.out_file = 'rest_dr.nii.gz'

func_refit = pe.MapNode(interface=e_afni.Threedrefit(),
                        name='func_refit',
                        iterfield=["in_file"])
func_refit.inputs.deoblique = True

func_slice_time_correction = pe.MapNode(
    interface=fsl.SliceTimer(),
    name='func_slice_time_correction',
    iterfield=["in_file", "time_repetition"])

func_slice_time_correction.inputs.interleaved = True

func_reorient = pe.MapNode(interface=e_afni.Threedresample(),
                           name='func_reorient',
                           iterfield=["in_file"])
func_reorient.inputs.orientation = 'RPI'

func_tstat = pe.MapNode(interface=e_afni.ThreedTstat(),
                        name='func_tstat',
                        iterfield=["in_file"])
func_tstat.inputs.args = "-mean"
func_tstat.inputs.out_file = 'rest_ro_mean.nii.gz'
示例#18
0
def mod_realign(node, in_file, tr, do_slicetime, sliceorder, parameters={}):
    import nipype.interfaces.fsl as fsl
    import nipype.interfaces.spm as spm
    import nipype.interfaces.nipy as nipy
    import os
    parameter_source = "FSL"
    keys = parameters.keys()
    if node == "nipy":
        realign = nipy.FmriRealign4d()
        realign.inputs.in_file = in_file
        realign.inputs.tr = tr
        if "loops" in keys:
            realign.inputs.loops = parameters["loops"]
        if "speedup" in keys:
            realign.inputs.speedup = parameters["speedup"]
        if "between_loops" in keys:
            realign.inputs.between_loops = parameters["between_loops"]
        if do_slicetime:
            realign.inputs.slice_order = sliceorder
            realign.inputs.time_interp = True

        res = realign.run()
        out_file = res.outputs.out_file
        par_file = res.outputs.par_file

    elif node == "fsl":
        if not isinstance(in_file, list):
            in_file = [in_file]
        out_file = []
        par_file = []
        # get the first volume of first run as ref file
        if not do_slicetime:
            extract = fsl.ExtractROI()
            extract.inputs.t_min = 0
            extract.inputs.t_size = 1
            extract.inputs.in_file = in_file[0]
            ref_vol = extract.run().outputs.roi_file

        for idx, file in enumerate(in_file):
            if do_slicetime:
                slicetime = fsl.SliceTimer()
                slicetime.inputs.in_file = file
                sliceorder_file = os.path.abspath('FSL_custom_order.txt')
                with open(sliceorder_file, 'w') as custom_order_fp:
                    for t in sliceorder:
                        custom_order_fp.write('%d\n' % (t + 1))
                slicetime.inputs.custom_order = sliceorder_file
                slicetime.inputs.time_repetition = tr
                res = slicetime.run()
                file_to_realign = res.outputs.slice_time_corrected_file
                if not idx:
                    extract = fsl.ExtractROI()
                    extract.inputs.t_min = 0
                    extract.inputs.t_size = 1
                    extract.inputs.in_file = file_to_realign
                    ref_vol = extract.run().outputs.roi_file
            else:
                file_to_realign = file
            realign = fsl.MCFLIRT(interpolation='spline', ref_file=ref_vol)
            realign.inputs.save_plots = True
            realign.inputs.mean_vol = True
            realign.inputs.in_file = file_to_realign
            realign.inputs.out_file = 'fsl_corr_' + \
                                      os.path.split(file_to_realign)[1]
            Realign_res = realign.run()
            out_file.append(Realign_res.outputs.out_file)
            par_file.append(Realign_res.outputs.par_file)

    elif node == 'spm':
        import numpy as np
        import nibabel as nib
        import nipype.interfaces.freesurfer as fs
        if not isinstance(in_file, list):
            in_file = [in_file]
        new_in_file = []
        for f in in_file:
            if f.endswith('.nii.gz'):
                convert = fs.MRIConvert()
                convert.inputs.in_file = f
                convert.inputs.out_type = 'nii'
                convert.inputs.in_type = 'niigz'
                f = convert.run().outputs.out_file
                new_in_file.append(f)
            else:
                new_in_file.append(f)
        if do_slicetime:
            img = nib.load(new_in_file[0])
            num_slices = img.shape[2]
            st = spm.SliceTiming()
            st.inputs.in_files = new_in_file
            st.inputs.num_slices = num_slices
            st.inputs.time_repetition = tr
            st.inputs.time_acquisition = tr - tr / num_slices
            st.inputs.slice_order = (np.asarray(sliceorder) +
                                     1).astype(int).tolist()
            st.inputs.ref_slice = 1
            res_st = st.run()
            file_to_realign = res_st.outputs.timecorrected_files
        else:
            file_to_realign = new_in_file
        par_file = []
        realign = spm.Realign()
        realign.inputs.in_files = file_to_realign
        #realign.inputs.out_prefix = 'spm_corr_'
        res = realign.run()
        parameters = res.outputs.realignment_parameters
        if not isinstance(parameters, list):
            parameters = [parameters]
        par_file = parameters
        parameter_source = 'SPM'
        fsl.ImageMaths(in_file=res.outputs.realigned_files,
                       out_file=res.outputs.realigned_files,
                       op_string='-nan').run()
        out_file = res.outputs.realigned_files
    elif node == 'afni':
        import nipype.interfaces.afni as afni
        import nibabel as nib
        import numpy as np
        if not isinstance(in_file, list):
            in_file = [in_file]
        img = nib.load(in_file[0])
        Nz = img.shape[2]
        out_file = []
        par_file = []
        # get the first volume of first run as ref file
        if not do_slicetime:
            extract = fsl.ExtractROI()
            extract.inputs.t_min = 0
            extract.inputs.t_size = 1
            extract.inputs.in_file = in_file[0]
            ref_vol = extract.run().outputs.roi_file

        for idx, file in enumerate(in_file):
            if do_slicetime:
                slicetime = afni.TShift()
                slicetime.inputs.in_file = file
                custom_order = open(
                    os.path.abspath('afni_custom_order_file.txt'), 'w')
                tpattern = []
                for i in xrange(len(sliceorder)):
                    tpattern.append((i * tr / float(Nz), sliceorder[i]))
                tpattern.sort(key=lambda x: x[1])
                for i, t in enumerate(tpattern):
                    print '%f\n' % (t[0])
                    custom_order.write('%f\n' % (t[0]))
                custom_order.close()

                slicetime.inputs.args = '-tpattern @%s' % os.path.abspath(
                    'afni_custom_order_file.txt')
                slicetime.inputs.tr = str(tr) + 's'
                slicetime.inputs.outputtype = 'NIFTI_GZ'
                res = slicetime.run()
                file_to_realign = res.outputs.out_file

                if not idx:
                    extract = fsl.ExtractROI()
                    extract.inputs.t_min = 0
                    extract.inputs.t_size = 1
                    extract.inputs.in_file = file_to_realign
                    ref_vol = extract.run().outputs.roi_file

            else:
                file_to_realign = file

            realign = afni.Volreg()
            realign.inputs.in_file = file_to_realign
            realign.inputs.out_file = "afni_corr_" + os.path.split(
                file_to_realign)[1]
            realign.inputs.oned_file = "afni_realignment_parameters.par"
            realign.inputs.basefile = ref_vol
            Realign_res = realign.run()
            out_file.append(Realign_res.outputs.out_file)

            parameters = Realign_res.outputs.oned_file
            if not isinstance(parameters, list):
                parameters = [parameters]
            for i, p in enumerate(parameters):
                foo = np.genfromtxt(p)
                boo = foo[:, [1, 2, 0, 4, 5, 3]]
                boo[:, :3] = boo[:, :3] * np.pi / 180
                np.savetxt(os.path.abspath('realignment_parameters_%d.par' %
                                           i),
                           boo,
                           delimiter='\t')
                par_file.append(
                    os.path.abspath('realignment_parameters_%d.par' % i))

            #par_file.append(Realign_res.outputs.oned_file)

    return out_file, par_file, parameter_source
示例#19
0
    print("\n    Motion outliers")
    motion_out.inputs.in_file = opj(motion_folder, subj, '4D_static.nii.gz')
    motion_out.inputs.dummy = 4
    motion_out.inputs.metric = 'fd'
    motion_out.inputs.out_file = opj(motion_folder, subj, 'outfile.txt')
    motion_out.inputs.output_type = 'NIFTI_GZ'
    motion_out.inputs.out_metric_plot = opj(motion_folder, subj,
                                            'metricplot.png')
    motion_out.inputs.out_metric_values = opj(motion_folder, subj,
                                              'metrics.txt')
    print(motion_out.cmdline)
    motion_out.run()


# 2.2 Slice timing correction with FSL
slice_timing = fsl.SliceTimer()


def run_slice_timing(subj, motion_folder, slicet_folder):
    print("\n    2.Slice timing correction")
    if not os.path.exists(opj(slicet_folder, subj)):
        os.mkdir(opj(slicet_folder, subj))
    slice_timing.inputs.in_file = opj(motion_folder, subj, '4D_static.nii.gz')
    slice_timing.inputs.out_file = opj(slicet_folder, subj, '4D_st.nii.gz')
    slice_timing.inputs.output_type = 'NIFTI_GZ'
    slice_timing.inputs.time_repetition = 2
    print(slice_timing.cmdline)
    slice_timing.run()


# 2.3 Brain extraction to fMRIs with FSL-BET
示例#20
0
def generic(
    bids_base,
    template,
    autorotate=False,
    debug=False,
    functional_blur_xy=False,
    functional_match={},
    functional_registration_method="composite",
    keep_work=False,
    n_jobs=False,
    n_jobs_percentage=0.8,
    out_base=None,
    realign="time",
    registration_mask="",
    sessions=[],
    structural_match={},
    subjects=[],
    tr=1,
    workflow_name='generic',
    params={},
    phase_dictionary=GENERIC_PHASES,
):
    '''
	Generic preprocessing and registration workflow for small animal data in BIDS format.

	Parameters
	----------
	bids_base : str
		Path to the BIDS data set root.
	template : str
		Path to the template to register the data to.
	autorotate : bool, optional
		Whether to use a multi-rotation-state transformation start.
		This allows the registration to commence with the best rotational fit, and may help if the orientation of the data is malformed with respect to the header.
	debug : bool, optional
		Whether to enable nipype debug mode.
		This increases logging.
	functional_blur_xy : float, optional
		Factor by which to smooth data in the xy-plane; if parameter evaluates to false, no smoothing will be applied.
		Ideally this value should correspond to the resolution or smoothness in the z-direction (assuing z represents the lower-resolution slice-encoding direction).
	functional_match : dict, optional
		Dictionary specifying a whitelist to use for functional data inclusion into the workflow; if dictionary is empty no whitelist is present and all data will be considered.
		The dictionary should have keys which are 'acquisition', 'task', or 'modality', and values which are lists of acceptable strings for the respective BIDS field.
	functional_registration_method : {'composite','functional','structural'}, optional
		How to register the functional scan to the template.
		Values mean the following: 'composite' that it will be registered to the structural scan which will in turn be registered to the template, 'functional' that it will be registered directly, 'structural' that it will be registered exactly as the structural scan.
	keep_work : bool, str
		Whether to keep the work directory after workflow conclusion (this directory contains all the intermediary processing commands, inputs, and outputs --- it is invaluable for debugging but many times larger in size than the actual output).
	n_jobs : int, optional
		Number of processors to maximally use for the workflow; if unspecified a best guess will be estimate based on `n_jobs_percentage` and hardware (but not on current load).
	n_jobs_percentage : float, optional
		Percentage of available processors (as in available hardware, not available free load) to maximally use for the workflow (this is overriden by `n_jobs`).
	out_base : str, optional
		Output base directory --- inside which a directory named `workflow_name`(as well as associated directories) will be created.
	realign : {"space","time","spacetime",""}, optional
		Parameter that dictates slictiming correction and realignment of slices. "time" (FSL.SliceTimer) is default, since it works safely. Use others only with caution!
	registration_mask : str, optional
		Mask to use for the registration process.
		This mask will constrain the area for similarity metric evaluation, but the data will not be cropped.
	sessions : list, optional
		A whitelist of sessions to include in the workflow, if the list is empty there is no whitelist and all sessions will be considered.
	structural_match : dict, optional
		Dictionary specifying a whitelist to use for structural data inclusion into the workflow; if dictionary is empty no whitelist is present and all data will be considered.
		The dictionary should have keys which are 'acquisition', or 'modality', and values which are lists of acceptable strings for the respective BIDS field.
	subjects : list, optional
		A whitelist of subjects to include in the workflow, if the list is empty there is no whitelist and all sessions will be considered.
	tr : float, optional
		Repetition time, explicitly.
		WARNING! This is a parameter waiting for deprecation.
	workflow_name : str, optional
		Top level name for the output directory.
	'''

    bids_base, out_base, out_dir, template, registration_mask, data_selection, functional_scan_types, structural_scan_types, subjects_sessions, func_ind, struct_ind = common_select(
        bids_base,
        out_base,
        workflow_name,
        template,
        registration_mask,
        functional_match,
        structural_match,
        subjects,
        sessions,
    )

    if not n_jobs:
        n_jobs = max(int(round(mp.cpu_count() * n_jobs_percentage)), 2)

    get_f_scan = pe.Node(name='get_f_scan',
                         interface=util.Function(
                             function=get_bids_scan,
                             input_names=inspect.getargspec(get_bids_scan)[0],
                             output_names=[
                                 'scan_path', 'scan_type', 'task', 'nii_path',
                                 'nii_name', 'events_name', 'subject_session',
                                 'metadata_filename', 'dict_slice'
                             ]))
    get_f_scan.inputs.ignore_exception = True
    get_f_scan.inputs.data_selection = data_selection
    get_f_scan.inputs.bids_base = bids_base
    get_f_scan.iterables = ("ind_type", func_ind)

    dummy_scans = pe.Node(
        name='dummy_scans',
        interface=util.Function(
            function=force_dummy_scans,
            input_names=inspect.getargspec(force_dummy_scans)[0],
            output_names=['out_file', 'deleted_scans']))
    dummy_scans.inputs.desired_dummy_scans = DUMMY_SCANS

    events_file = pe.Node(
        name='events_file',
        interface=util.Function(
            function=write_bids_events_file,
            input_names=inspect.getargspec(write_bids_events_file)[0],
            output_names=['out_file']))

    datasink = pe.Node(nio.DataSink(), name='datasink')
    datasink.inputs.base_directory = out_dir
    datasink.inputs.parameterization = False

    workflow_connections = [
        (get_f_scan, dummy_scans, [('nii_path', 'in_file')]),
        (dummy_scans, events_file, [('deleted_scans', 'forced_dummy_scans')]),
        (get_f_scan, events_file, [('nii_path', 'timecourse_file'),
                                   ('task', 'task'),
                                   ('scan_path', 'scan_dir')]),
        (events_file, datasink, [('out_file', 'func.@events')]),
        (get_f_scan, events_file, [('events_name', 'out_file')]),
        (get_f_scan, datasink, [(('subject_session', ss_to_path), 'container')
                                ]),
    ]

    if realign == "space":
        realigner = pe.Node(interface=spm.Realign(), name="realigner")
        realigner.inputs.register_to_mean = True
        workflow_connections.extend([
            (dummy_scans, realigner, [('out_file', 'in_file')]),
        ])

    elif realign == "spacetime":
        realigner = pe.Node(interface=nipy.SpaceTimeRealigner(),
                            name="realigner")
        realigner.inputs.slice_times = "asc_alt_2"
        realigner.inputs.tr = tr
        realigner.inputs.slice_info = 3  #3 for coronal slices (2 for horizontal, 1 for sagittal)
        workflow_connections.extend([
            (dummy_scans, realigner, [('out_file', 'in_file')]),
        ])

    elif realign == "time":
        realigner = pe.Node(interface=fsl.SliceTimer(), name="slicetimer")
        realigner.inputs.time_repetition = tr
        workflow_connections.extend([
            (dummy_scans, realigner, [('out_file', 'in_file')]),
        ])

    #ADDING SELECTABLE NODES AND EXTENDING WORKFLOW AS APPROPRIATE:
    s_biascorrect, f_biascorrect = real_size_nodes()

    if structural_scan_types.any():
        s_data_selection = deepcopy(data_selection)
        for match in structural_match.keys():
            s_data_selection = s_data_selection.loc[
                s_data_selection[match].isin(structural_match[match])]

        get_s_scan = pe.Node(
            name='get_s_scan',
            interface=util.Function(
                function=get_bids_scan,
                input_names=inspect.getargspec(get_bids_scan)[0],
                output_names=[
                    'scan_path', 'scan_type', 'task', 'nii_path', 'nii_name',
                    'events_name', 'subject_session', 'metadata_filename',
                    'dict_slice'
                ]))
        get_s_scan.inputs.ignore_exception = True
        get_s_scan.inputs.data_selection = s_data_selection
        get_s_scan.inputs.bids_base = bids_base

        s_register, s_warp, f_register, f_warp = generic_registration(
            template,
            structural_mask=registration_mask,
            phase_dictionary=phase_dictionary,
        )
        #TODO: incl. in func registration
        if autorotate:
            s_rotated = autorotate(template)
            workflow_connections.extend([
                (s_biascorrect, s_rotated, [('output_image', 'out_file')]),
                (s_rotated, s_register, [('out_file', 'moving_image')]),
            ])
        else:
            workflow_connections.extend([
                (s_biascorrect, s_register, [('output_image', 'moving_image')
                                             ]),
                (s_register, s_warp, [('composite_transform', 'transforms')]),
                (get_s_scan, s_warp, [('nii_path', 'input_image')]),
                (s_warp, datasink, [('output_image', 'anat')]),
            ])

        workflow_connections.extend([
            (get_f_scan, get_s_scan, [('subject_session', 'selector')]),
            (get_s_scan, s_warp, [('nii_name', 'output_image')]),
            (get_s_scan, s_biascorrect, [('nii_path', 'input_image')]),
        ])

    if functional_registration_method == "structural":
        if not structural_scan_types.any():
            raise ValueError(
                'The option `registration="structural"` requires there to be a structural scan type.'
            )
        workflow_connections.extend([
            (s_register, f_warp, [('composite_transform', 'transforms')]),
        ])
        if realign == "space":
            workflow_connections.extend([
                (realigner, f_warp, [('realigned_files', 'input_image')]),
            ])
        elif realign == "spacetime":
            workflow_connections.extend([
                (realigner, f_warp, [('out_file', 'input_image')]),
            ])
        elif realign == "time":
            workflow_connections.extend([
                (realigner, f_warp, [('slice_time_corrected_file',
                                      'input_image')]),
            ])
        else:
            workflow_connections.extend([
                (dummy_scans, f_warp, [('out_file', 'input_image')]),
            ])
    elif functional_registration_method == "composite":
        if not structural_scan_types.any():
            raise ValueError(
                'The option `registration="composite"` requires there to be a structural scan type.'
            )
        temporal_mean = pe.Node(interface=fsl.MeanImage(),
                                name="temporal_mean")

        merge = pe.Node(util.Merge(2), name='merge')

        workflow_connections.extend([
            (temporal_mean, f_biascorrect, [('out_file', 'input_image')]),
            (f_biascorrect, f_register, [('output_image', 'moving_image')]),
            (s_biascorrect, f_register, [('output_image', 'fixed_image')]),
            (s_register, merge, [('composite_transform', 'in1')]),
            (f_register, merge, [('composite_transform', 'in2')]),
            (merge, f_warp, [('out', 'transforms')]),
        ])
        if realign == "space":
            workflow_connections.extend([
                (realigner, temporal_mean, [('realigned_files', 'in_file')]),
                (realigner, f_warp, [('realigned_files', 'input_image')]),
            ])
        elif realign == "spacetime":
            workflow_connections.extend([
                (realigner, temporal_mean, [('out_file', 'in_file')]),
                (realigner, f_warp, [('out_file', 'input_image')]),
            ])
        elif realign == "time":
            workflow_connections.extend([
                (realigner, temporal_mean, [('slice_time_corrected_file',
                                             'in_file')]),
                (realigner, f_warp, [('slice_time_corrected_file',
                                      'input_image')]),
            ])
        else:
            workflow_connections.extend([
                (dummy_scans, temporal_mean, [('out_file', 'in_file')]),
                (dummy_scans, f_warp, [('out_file', 'input_image')]),
            ])
    elif functional_registration_method == "functional":
        f_register, f_warp = functional_registration(template)

        temporal_mean = pe.Node(interface=fsl.MeanImage(),
                                name="temporal_mean")

        #f_cutoff = pe.Node(interface=fsl.ImageMaths(), name="f_cutoff")
        #f_cutoff.inputs.op_string = "-thrP 30"

        #f_BET = pe.Node(interface=fsl.BET(), name="f_BET")
        #f_BET.inputs.mask = True
        #f_BET.inputs.frac = 0.5

        workflow_connections.extend([
            (temporal_mean, f_biascorrect, [('out_file', 'input_image')]),
            #(f_biascorrect, f_cutoff, [('output_image', 'in_file')]),
            #(f_cutoff, f_BET, [('out_file', 'in_file')]),
            #(f_BET, f_register, [('out_file', 'moving_image')]),
            (f_biascorrect, f_register, [('output_image', 'moving_image')]),
            (f_register, f_warp, [('composite_transform', 'transforms')]),
        ])
        if realign == "space":
            workflow_connections.extend([
                (realigner, temporal_mean, [('realigned_files', 'in_file')]),
                (realigner, f_warp, [('realigned_files', 'input_image')]),
            ])
        elif realign == "spacetime":
            workflow_connections.extend([
                (realigner, temporal_mean, [('out_file', 'in_file')]),
                (realigner, f_warp, [('out_file', 'input_image')]),
            ])
        elif realign == "time":
            workflow_connections.extend([
                (realigner, temporal_mean, [('slice_time_corrected_file',
                                             'in_file')]),
                (realigner, f_warp, [('slice_time_corrected_file',
                                      'input_image')]),
            ])
        else:
            workflow_connections.extend([
                (dummy_scans, temporal_mean, [('out_file', 'in_file')]),
                (dummy_scans, f_warp, [('out_file', 'input_image')]),
            ])

    if functional_blur_xy:
        blur = pe.Node(interface=afni.preprocess.BlurToFWHM(), name="blur")
        blur.inputs.fwhmxy = functional_blur_xy
        workflow_connections.extend([
            (get_f_scan, blur, [('nii_name', 'out_file')]),
            (f_warp, blur, [('output_image', 'in_file')]),
            (blur, datasink, [('out_file', 'func')]),
        ])
    else:
        workflow_connections.extend([
            (get_f_scan, f_warp, [('nii_name', 'output_image')]),
            (f_warp, datasink, [('output_image', 'func')]),
        ])

    workflow_config = {
        'execution': {
            'crashdump_dir': path.join(out_base, 'crashdump'),
        }
    }
    if debug:
        workflow_config['logging'] = {
            'workflow_level': 'DEBUG',
            'utils_level': 'DEBUG',
            'interface_level': 'DEBUG',
            'filemanip_level': 'DEBUG',
            'log_to_file': 'true',
        }

    workdir_name = workflow_name + "_work"
    #this gives the name of the workdir, the output name is passed to the datasink
    workflow = pe.Workflow(name=workdir_name)
    workflow.connect(workflow_connections)
    workflow.base_dir = out_base
    workflow.config = workflow_config
    try:
        workflow.write_graph(dotfilename=path.join(workflow.base_dir,
                                                   workdir_name, "graph.dot"),
                             graph2use="hierarchical",
                             format="png")
    except OSError:
        print(
            'We could not write the DOT file for visualization (`dot` function from the graphviz package). This is non-critical to the processing, but you should get this fixed.'
        )

    workflow.run(plugin="MultiProc", plugin_args={'n_procs': n_jobs})
    if not keep_work:
        workdir = path.join(workflow.base_dir, workdir_name)
        try:
            shutil.rmtree(workdir)
        except OSError as e:
            if str(e) == 'Cannot call rmtree on a symbolic link':
                print(
                    'Not deleting top level workdir (`{}`), as it is a symlink. Deleting only contents instead'
                    .format(workdir))
                for file_object in os.listdir(workdir):
                    file_object_path = os.path.join(workdir, file_object)
                    if os.path.isfile(file_object_path):
                        os.unlink(file_object_path)
                    else:
                        shutil.rmtree(file_object_path)
            else:
                raise OSError(str(e))
示例#21
0
def bruker(
    measurements_base,
    functional_scan_types=[],
    structural_scan_types=[],
    sessions=[],
    subjects=[],
    measurements=[],
    exclude_subjects=[],
    exclude_measurements=[],
    actual_size=False,
    functional_blur_xy=False,
    functional_registration_method="structural",
    highpass_sigma=225,
    lowpass_sigma=None,
    negative_contrast_agent=False,
    n_procs=N_PROCS,
    realign="time",
    registration_mask=False,
    template="/home/chymera/ni_data/templates/ds_QBI_chr.nii.gz",
    tr=1,
    very_nasty_bruker_delay_hack=False,
    workflow_name="generic",
    keep_work=False,
    autorotate=False,
    strict=False,
    verbose=False,
):
    '''

	realign: {"space","time","spacetime",""}
		Parameter that dictates slictiming correction and realignment of slices. "time" (FSL.SliceTimer) is default, since it works safely. Use others only with caution!

	'''

    measurements_base = path.abspath(path.expanduser(measurements_base))

    #select all functional/sturctural scan types unless specified
    if not functional_scan_types or not structural_scan_types:
        scan_classification = pd.read_csv(scan_classification_file_path)
        if not functional_scan_types:
            functional_scan_types = list(
                scan_classification[(scan_classification["categories"] ==
                                     "functional")]["scan_type"])
        if not structural_scan_types:
            structural_scan_types = list(
                scan_classification[(scan_classification["categories"] ==
                                     "structural")]["scan_type"])

    #hack to allow structural scan type disabling:
    if structural_scan_types == -1:
        structural_scan_types = []

    # define measurement directories to be processed, and populate the list either with the given include_measurements, or with an intelligent selection
    scan_types = deepcopy(functional_scan_types)
    scan_types.extend(structural_scan_types)
    data_selection = get_data_selection(
        measurements_base,
        sessions,
        scan_types=scan_types,
        subjects=subjects,
        exclude_subjects=exclude_subjects,
        measurements=measurements,
        exclude_measurements=exclude_measurements)
    if not subjects:
        subjects = set(list(data_selection["subject"]))
    if not sessions:
        sessions = set(list(data_selection["session"]))

    # we currently only support one structural scan type per session
    if functional_registration_method in (
            "structural", "composite") and structural_scan_types:
        structural_scan_types = [structural_scan_types[0]]

    # we start to define nipype workflow elements (nodes, connections, meta)
    subjects_sessions = data_selection[["subject", "session"
                                        ]].drop_duplicates().values.tolist()
    infosource = pe.Node(
        interface=util.IdentityInterface(fields=['subject_session']),
        name="infosource")
    infosource.iterables = [('subject_session', subjects_sessions)]

    get_f_scan = pe.Node(name='get_f_scan',
                         interface=util.Function(
                             function=get_scan,
                             input_names=inspect.getargspec(get_scan)[0],
                             output_names=['scan_path', 'scan_type']))
    if not strict:
        get_f_scan.inputs.ignore_exception = True
    get_f_scan.inputs.data_selection = data_selection
    get_f_scan.inputs.measurements_base = measurements_base
    get_f_scan.iterables = ("scan_type", functional_scan_types)

    f_bru2nii = pe.Node(interface=bru2nii.Bru2(), name="f_bru2nii")
    f_bru2nii.inputs.actual_size = actual_size

    dummy_scans = pe.Node(
        name='dummy_scans',
        interface=util.Function(
            function=force_dummy_scans,
            input_names=inspect.getargspec(force_dummy_scans)[0],
            output_names=['out_file']))
    dummy_scans.inputs.desired_dummy_scans = DUMMY_SCANS

    bandpass = pe.Node(interface=fsl.maths.TemporalFilter(), name="bandpass")
    bandpass.inputs.highpass_sigma = highpass_sigma
    if lowpass_sigma:
        bandpass.inputs.lowpass_sigma = lowpass_sigma
    else:
        bandpass.inputs.lowpass_sigma = tr

    bids_filename = pe.Node(
        name='bids_filename',
        interface=util.Function(
            function=sss_filename,
            input_names=inspect.getargspec(sss_filename)[0],
            output_names=['filename']))

    bids_stim_filename = pe.Node(
        name='bids_stim_filename',
        interface=util.Function(
            function=sss_filename,
            input_names=inspect.getargspec(sss_filename)[0],
            output_names=['filename']))
    bids_stim_filename.inputs.suffix = "events"
    bids_stim_filename.inputs.extension = ".tsv"

    events_file = pe.Node(
        name='events_file',
        interface=util.Function(
            function=write_events_file,
            input_names=inspect.getargspec(write_events_file)[0],
            output_names=['out_file']))
    events_file.inputs.dummy_scans_ms = DUMMY_SCANS * tr * 1000
    events_file.inputs.stim_protocol_dictionary = STIM_PROTOCOL_DICTIONARY
    events_file.inputs.very_nasty_bruker_delay_hack = very_nasty_bruker_delay_hack
    if not (strict or verbose):
        events_file.inputs.ignore_exception = True

    datasink = pe.Node(nio.DataSink(), name='datasink')
    datasink.inputs.base_directory = path.join(measurements_base,
                                               "preprocessing", workflow_name)
    datasink.inputs.parameterization = False
    if not (strict or verbose):
        datasink.inputs.ignore_exception = True

    workflow_connections = [
        (infosource, get_f_scan, [('subject_session', 'selector')]),
        (infosource, bids_stim_filename, [('subject_session',
                                           'subject_session')]),
        (get_f_scan, bids_stim_filename, [('scan_type', 'scan')]),
        (get_f_scan, f_bru2nii, [('scan_path', 'input_dir')]),
        (f_bru2nii, dummy_scans, [('nii_file', 'in_file')]),
        (get_f_scan, dummy_scans, [('scan_path', 'scan_dir')]),
        (get_f_scan, events_file, [('scan_type', 'scan_type'),
                                   ('scan_path', 'scan_dir')]),
        (events_file, datasink, [('out_file', 'func.@events')]),
        (bids_stim_filename, events_file, [('filename', 'out_file')]),
        (infosource, datasink, [(('subject_session', ss_to_path), 'container')
                                ]),
        (infosource, bids_filename, [('subject_session', 'subject_session')]),
        (get_f_scan, bids_filename, [('scan_type', 'scan')]),
        (bids_filename, bandpass, [('filename', 'out_file')]),
        (bandpass, datasink, [('out_file', 'func')]),
    ]

    if realign == "space":
        realigner = pe.Node(interface=spm.Realign(), name="realigner")
        realigner.inputs.register_to_mean = True
        workflow_connections.extend([
            (dummy_scans, realigner, [('out_file', 'in_file')]),
        ])

    elif realign == "spacetime":
        realigner = pe.Node(interface=nipy.SpaceTimeRealigner(),
                            name="realigner")
        realigner.inputs.slice_times = "asc_alt_2"
        realigner.inputs.tr = tr
        realigner.inputs.slice_info = 3  #3 for coronal slices (2 for horizontal, 1 for sagittal)
        workflow_connections.extend([
            (dummy_scans, realigner, [('out_file', 'in_file')]),
        ])

    elif realign == "time":
        realigner = pe.Node(interface=fsl.SliceTimer(), name="slicetimer")
        realigner.inputs.time_repetition = tr
        workflow_connections.extend([
            (dummy_scans, realigner, [('out_file', 'in_file')]),
        ])

    #ADDING SELECTABLE NODES AND EXTENDING WORKFLOW AS APPROPRIATE:
    if actual_size:
        s_biascorrect, f_biascorrect = real_size_nodes()
    else:
        s_biascorrect, f_biascorrect = inflated_size_nodes()

    if structural_scan_types:
        get_s_scan = pe.Node(name='get_s_scan',
                             interface=util.Function(
                                 function=get_scan,
                                 input_names=inspect.getargspec(get_scan)[0],
                                 output_names=['scan_path', 'scan_type']))
        if not strict:
            get_s_scan.inputs.ignore_exception = True
        get_s_scan.inputs.data_selection = data_selection
        get_s_scan.inputs.measurements_base = measurements_base
        get_s_scan.iterables = ("scan_type", structural_scan_types)

        s_bru2nii = pe.Node(interface=bru2nii.Bru2(), name="s_bru2nii")
        s_bru2nii.inputs.force_conversion = True
        s_bru2nii.inputs.actual_size = actual_size

        s_bids_filename = pe.Node(
            name='s_bids_filename',
            interface=util.Function(
                function=sss_filename,
                input_names=inspect.getargspec(sss_filename)[0],
                output_names=['filename']))
        s_bids_filename.inputs.scan_prefix = False

        if actual_size:
            s_register, s_warp, _, _ = DSURQEc_structural_registration(
                template, registration_mask)
            #TODO: incl. in func registration
            if autorotate:
                workflow_connections.extend([
                    (s_biascorrect, s_rotated, [('output_image', 'out_file')]),
                    (s_rotated, s_register, [('out_file', 'moving_image')]),
                ])
            else:
                workflow_connections.extend([
                    (s_biascorrect, s_register, [('output_image',
                                                  'moving_image')]),
                    (s_register, s_warp, [('composite_transform', 'transforms')
                                          ]),
                    (s_bru2nii, s_warp, [('nii_file', 'input_image')]),
                    (s_warp, datasink, [('output_image', 'anat')]),
                ])
        else:
            s_reg_biascorrect = pe.Node(interface=ants.N4BiasFieldCorrection(),
                                        name="s_reg_biascorrect")
            s_reg_biascorrect.inputs.dimension = 3
            s_reg_biascorrect.inputs.bspline_fitting_distance = 95
            s_reg_biascorrect.inputs.shrink_factor = 2
            s_reg_biascorrect.inputs.n_iterations = [500, 500, 500, 500]
            s_reg_biascorrect.inputs.convergence_threshold = 1e-14

            s_cutoff = pe.Node(interface=fsl.ImageMaths(), name="s_cutoff")
            s_cutoff.inputs.op_string = "-thrP 20 -uthrp 98"

            s_BET = pe.Node(interface=fsl.BET(), name="s_BET")
            s_BET.inputs.mask = True
            s_BET.inputs.frac = 0.3
            s_BET.inputs.robust = True

            s_mask = pe.Node(interface=fsl.ApplyMask(), name="s_mask")
            s_register, s_warp, f_warp = structural_registration(template)

            workflow_connections.extend([
                (s_bru2nii, s_reg_biascorrect, [('nii_file', 'input_image')]),
                (s_reg_biascorrect, s_cutoff, [('output_image', 'in_file')]),
                (s_cutoff, s_BET, [('out_file', 'in_file')]),
                (s_biascorrect, s_mask, [('output_image', 'in_file')]),
                (s_BET, s_mask, [('mask_file', 'mask_file')]),
            ])

            #TODO: incl. in func registration
            if autorotate:
                workflow_connections.extend([
                    (s_mask, s_rotated, [('out_file', 'out_file')]),
                    (s_rotated, s_register, [('out_file', 'moving_image')]),
                ])
            else:
                workflow_connections.extend([
                    (s_mask, s_register, [('out_file', 'moving_image')]),
                    (s_register, s_warp, [('composite_transform', 'transforms')
                                          ]),
                    (s_bru2nii, s_warp, [('nii_file', 'input_image')]),
                    (s_warp, datasink, [('output_image', 'anat')]),
                ])

        if autorotate:
            s_rotated = autorotate(template)

        workflow_connections.extend([
            (infosource, get_s_scan, [('subject_session', 'selector')]),
            (infosource, s_bids_filename, [('subject_session',
                                            'subject_session')]),
            (get_s_scan, s_bru2nii, [('scan_path', 'input_dir')]),
            (get_s_scan, s_bids_filename, [('scan_type', 'scan')]),
            (s_bids_filename, s_warp, [('filename', 'output_image')]),
            (s_bru2nii, s_biascorrect, [('nii_file', 'input_image')]),
        ])

    if functional_registration_method == "structural":
        if not structural_scan_types:
            raise ValueError(
                'The option `registration="structural"` requires there to be a structural scan type.'
            )
        workflow_connections.extend([
            (s_register, f_warp, [('composite_transform', 'transforms')]),
        ])
        if realign == "space":
            workflow_connections.extend([
                (realigner, f_warp, [('realigned_files', 'input_image')]),
            ])
        elif realign == "spacetime":
            workflow_connections.extend([
                (realigner, f_warp, [('out_file', 'input_image')]),
            ])
        elif realign == "time":
            workflow_connections.extend([
                (realigner, f_warp, [('slice_time_corrected_file',
                                      'input_image')]),
            ])
        else:
            workflow_connections.extend([
                (dummy_scans, f_warp, [('out_file', 'input_image')]),
            ])

    if functional_registration_method == "composite":
        if not structural_scan_types:
            raise ValueError(
                'The option `registration="composite"` requires there to be a structural scan type.'
            )
        _, _, f_register, f_warp = DSURQEc_structural_registration(
            template, registration_mask)

        temporal_mean = pe.Node(interface=fsl.MeanImage(),
                                name="temporal_mean")

        merge = pe.Node(util.Merge(2), name='merge')

        workflow_connections.extend([
            (temporal_mean, f_biascorrect, [('out_file', 'input_image')]),
            (f_biascorrect, f_register, [('output_image', 'moving_image')]),
            (s_biascorrect, f_register, [('output_image', 'fixed_image')]),
            (f_register, merge, [('composite_transform', 'in1')]),
            (s_register, merge, [('composite_transform', 'in2')]),
            (merge, f_warp, [('out', 'transforms')]),
        ])
        if realign == "space":
            workflow_connections.extend([
                (realigner, temporal_mean, [('realigned_files', 'in_file')]),
                (realigner, f_warp, [('realigned_files', 'input_image')]),
            ])
        elif realign == "spacetime":
            workflow_connections.extend([
                (realigner, temporal_mean, [('out_file', 'in_file')]),
                (realigner, f_warp, [('out_file', 'input_image')]),
            ])
        elif realign == "time":
            workflow_connections.extend([
                (realigner, temporal_mean, [('slice_time_corrected_file',
                                             'in_file')]),
                (realigner, f_warp, [('slice_time_corrected_file',
                                      'input_image')]),
            ])
        else:
            workflow_connections.extend([
                (dummy_scans, temporal_mean, [('out_file', 'in_file')]),
                (dummy_scans, f_warp, [('out_file', 'input_image')]),
            ])

    elif functional_registration_method == "functional":
        f_register, f_warp = functional_registration(template)

        temporal_mean = pe.Node(interface=fsl.MeanImage(),
                                name="temporal_mean")

        #f_cutoff = pe.Node(interface=fsl.ImageMaths(), name="f_cutoff")
        #f_cutoff.inputs.op_string = "-thrP 30"

        #f_BET = pe.Node(interface=fsl.BET(), name="f_BET")
        #f_BET.inputs.mask = True
        #f_BET.inputs.frac = 0.5

        workflow_connections.extend([
            (temporal_mean, f_biascorrect, [('out_file', 'input_image')]),
            #(f_biascorrect, f_cutoff, [('output_image', 'in_file')]),
            #(f_cutoff, f_BET, [('out_file', 'in_file')]),
            #(f_BET, f_register, [('out_file', 'moving_image')]),
            (f_biascorrect, f_register, [('output_image', 'moving_image')]),
            (f_register, f_warp, [('composite_transform', 'transforms')]),
        ])
        if realign == "space":
            workflow_connections.extend([
                (realigner, temporal_mean, [('realigned_files', 'in_file')]),
                (realigner, f_warp, [('realigned_files', 'input_image')]),
            ])
        elif realign == "spacetime":
            workflow_connections.extend([
                (realigner, temporal_mean, [('out_file', 'in_file')]),
                (realigner, f_warp, [('out_file', 'input_image')]),
            ])
        elif realign == "time":
            workflow_connections.extend([
                (realigner, temporal_mean, [('slice_time_corrected_file',
                                             'in_file')]),
                (realigner, f_warp, [('slice_time_corrected_file',
                                      'input_image')]),
            ])
        else:
            workflow_connections.extend([
                (dummy_scans, temporal_mean, [('out_file', 'in_file')]),
                (dummy_scans, f_warp, [('out_file', 'input_image')]),
            ])

    invert = pe.Node(interface=fsl.ImageMaths(), name="invert")
    if functional_blur_xy and negative_contrast_agent:
        blur = pe.Node(interface=afni.preprocess.BlurToFWHM(), name="blur")
        blur.inputs.fwhmxy = functional_blur_xy
        workflow_connections.extend([
            (f_warp, blur, [('output_image', 'in_file')]),
            (blur, invert, [(('out_file', fslmaths_invert_values), 'op_string')
                            ]),
            (blur, invert, [('out_file', 'in_file')]),
            (invert, bandpass, [('out_file', 'in_file')]),
        ])
    elif functional_blur_xy:
        blur = pe.Node(interface=afni.preprocess.BlurToFWHM(), name="blur")
        blur.inputs.fwhmxy = functional_blur_xy
        workflow_connections.extend([
            (f_warp, blur, [('output_image', 'in_file')]),
            (blur, bandpass, [('out_file', 'in_file')]),
        ])
    elif negative_contrast_agent:
        blur = pe.Node(interface=afni.preprocess.BlurToFWHM(), name="blur")
        blur.inputs.fwhmxy = functional_blur_xy
        workflow_connections.extend([
            (f_warp, invert, [(('output_image', fslmaths_invert_values),
                               'op_string')]),
            (f_warp, invert, [('output_image', 'in_file')]),
            (invert, bandpass, [('out_file', 'in_file')]),
        ])
    else:
        workflow_connections.extend([
            (f_warp, bandpass, [('output_image', 'in_file')]),
        ])

    workdir_name = workflow_name + "_work"
    workflow = pe.Workflow(name=workdir_name)
    workflow.connect(workflow_connections)
    workflow.base_dir = path.join(measurements_base, "preprocessing")
    workflow.config = {
        "execution": {
            "crashdump_dir":
            path.join(measurements_base, "preprocessing/crashdump")
        }
    }
    workflow.write_graph(dotfilename=path.join(workflow.base_dir, workdir_name,
                                               "graph.dot"),
                         graph2use="hierarchical",
                         format="png")

    workflow.run(plugin="MultiProc", plugin_args={'n_procs': n_procs})
    if not keep_work:
        shutil.rmtree(path.join(workflow.base_dir, workdir_name))