Exemplo n.º 1
0
def create_within_run_align_workflow(name='within_run_align',
                                     slice_timing_correction=True):

    within_run_align = pe.Workflow(name=name)

    inputs = pe.Node(interface=util.IdentityInterface(fields=['in_file']),
                     name='inputs')

    if slice_timing_correction:
        get_meta = pe.Node(interface=ds.LookupMeta(), name='get_meta')
        get_meta.inputs.meta_keys = {
            'RepetitionTime': 'tr',
            'CsaImage.MosaicRefAcqTimes': 'slice_times'
        }

        select_slice_times = pe.Node(
            interface=util.Select(), name='select_slice_times'
        )  #FIXME: sometimes required depending on dicom
        select_slice_times.inputs.index = [0]

    space_time_align = pe.Node(interface=nipy.SpaceTimeRealigner(),
                               name='space_time_align')

    if slice_timing_correction:
        space_time_align.inputs.slice_info = 2

    outputs = pe.Node(interface=util.IdentityInterface(fields=['out_file']),
                      name='outputs')

    within_run_align.connect(inputs, 'in_file', space_time_align, 'in_file')

    if slice_timing_correction:
        within_run_align.connect(inputs, 'in_file', get_meta, 'in_file')
        within_run_align.connect(get_meta, 'tr', space_time_align, 'tr')
        within_run_align.connect(get_meta, 'slice_times', select_slice_times,
                                 'inlist')  #see above
        within_run_align.connect(select_slice_times, 'out', space_time_align,
                                 'slice_times')
        #within_run_align.connect(get_meta, 'slice_times', space_time_align, 'slice_times')

    within_run_align.connect(space_time_align, 'out_file', outputs, 'out_file')

    return within_run_align
Exemplo n.º 2
0
def generic(
    bids_base,
    template,
    autorotate=False,
    debug=False,
    functional_blur_xy=False,
    functional_match={},
    functional_registration_method="composite",
    keep_work=False,
    n_jobs=False,
    n_jobs_percentage=0.8,
    out_base=None,
    realign="time",
    registration_mask="",
    sessions=[],
    structural_match={},
    subjects=[],
    tr=1,
    workflow_name='generic',
    params={},
    phase_dictionary=GENERIC_PHASES,
    enforce_dummy_scans=DUMMY_SCANS,
    masking_config_path='',
    exclude={},
):
    '''
	Generic preprocessing and registration workflow for small animal data in BIDS format.

	Parameters
	----------
	bids_base : str
		Path to the BIDS data set root.
	template : str
		Path to the template to register the data to.
	autorotate : bool, optional
		Whether to use a multi-rotation-state transformation start.
		This allows the registration to commence with the best rotational fit, and may help if the orientation of the data is malformed with respect to the header.
	debug : bool, optional
		Whether to enable nipype debug mode.
		This increases logging.
	exclude : dict
		A dictionary with any combination of "sessions", "subjects", "tasks" as keys and corresponding identifiers as values.
		If this is specified matching entries will be excluded in the analysis.
	functional_blur_xy : float, optional
		Factor by which to smooth data in the xy-plane; if parameter evaluates to false, no smoothing will be applied.
		Ideally this value should correspond to the resolution or smoothness in the z-direction (assuing z represents the lower-resolution slice-encoding direction).
	functional_match : dict, optional
		Dictionary specifying a whitelist to use for functional data inclusion into the workflow; if dictionary is empty no whitelist is present and all data will be considered.
		The dictionary should have keys which are 'acquisition', 'task', or 'modality', and values which are lists of acceptable strings for the respective BIDS field.
	functional_registration_method : {'composite','functional','structural'}, optional
		How to register the functional scan to the template.
		Values mean the following: 'composite' that it will be registered to the structural scan which will in turn be registered to the template, 'functional' that it will be registered directly, 'structural' that it will be registered exactly as the structural scan.
	keep_work : bool, str
		Whether to keep the work directory after workflow conclusion (this directory contains all the intermediary processing commands, inputs, and outputs --- it is invaluable for debugging but many times larger in size than the actual output).
	n_jobs : int, optional
		Number of processors to maximally use for the workflow; if unspecified a best guess will be estimate based on `n_jobs_percentage` and hardware (but not on current load).
	n_jobs_percentage : float, optional
		Percentage of available processors (as in available hardware, not available free load) to maximally use for the workflow (this is overriden by `n_jobs`).
	out_base : str, optional
		Output base directory --- inside which a directory named `workflow_name`(as well as associated directories) will be created.
	realign : {"space","time","spacetime",""}, optional
		Parameter that dictates slictiming correction and realignment of slices. "time" (FSL.SliceTimer) is default, since it works safely. Use others only with caution!
	registration_mask : str, optional
		Mask to use for the registration process.
		This mask will constrain the area for similarity metric evaluation, but the data will not be cropped.
	sessions : list, optional
		A whitelist of sessions to include in the workflow, if the list is empty there is no whitelist and all sessions will be considered.
	structural_match : dict, optional
		Dictionary specifying a whitelist to use for structural data inclusion into the workflow; if dictionary is empty no whitelist is present and all data will be considered.
		The dictionary should have keys which are 'acquisition', or 'modality', and values which are lists of acceptable strings for the respective BIDS field.
	subjects : list, optional
		A whitelist of subjects to include in the workflow, if the list is empty there is no whitelist and all sessions will be considered.
	tr : float, optional
		Repetition time, explicitly.
		WARNING! This is a parameter waiting for deprecation.
	workflow_name : str, optional
		Top level name for the output directory.
	masking_config_path: str
		Path to the json configuration file that will be read by the MLEBE "predict_maks" function. If it is set, the segmentation models from the MLEBE package are used to extract the brain region in an additional masking node.
	'''

    bids_base, out_base, out_dir, template, registration_mask, data_selection, functional_scan_types, structural_scan_types, subjects_sessions, func_ind, struct_ind = common_select(
        bids_base,
        out_base,
        workflow_name,
        template,
        registration_mask,
        functional_match,
        structural_match,
        subjects,
        sessions,
        exclude,
    )

    if not n_jobs:
        n_jobs = max(int(round(mp.cpu_count() * n_jobs_percentage)), 2)

    find_physio = pe.Node(
        name='find_physio',
        interface=util.Function(
            function=corresponding_physiofile,
            input_names=inspect.getargspec(corresponding_physiofile)[0],
            output_names=['physiofile', 'meta_physiofile']))

    get_f_scan = pe.Node(name='get_f_scan',
                         interface=util.Function(
                             function=get_bids_scan,
                             input_names=inspect.getargspec(get_bids_scan)[0],
                             output_names=[
                                 'scan_path', 'scan_type', 'task', 'nii_path',
                                 'nii_name', 'events_name', 'subject_session',
                                 'metadata_filename', 'dict_slice', 'ind_type'
                             ]))
    get_f_scan.inputs.ignore_exception = True
    get_f_scan.inputs.data_selection = data_selection
    get_f_scan.inputs.bids_base = bids_base
    get_f_scan.iterables = ("ind_type", func_ind)

    dummy_scans = pe.Node(
        name='dummy_scans',
        interface=util.Function(
            function=force_dummy_scans,
            input_names=inspect.getargspec(force_dummy_scans)[0],
            output_names=['out_file', 'deleted_scans']))
    dummy_scans.inputs.desired_dummy_scans = enforce_dummy_scans

    events_file = pe.Node(
        name='events_file',
        interface=util.Function(
            function=write_bids_events_file,
            input_names=inspect.getargspec(write_bids_events_file)[0],
            output_names=['out_file']))

    datasink = pe.Node(nio.DataSink(), name='datasink')
    datasink.inputs.base_directory = out_dir
    datasink.inputs.parameterization = False

    workflow_connections = [
        (get_f_scan, dummy_scans, [('nii_path', 'in_file')]),
        (dummy_scans, events_file, [('deleted_scans', 'forced_dummy_scans')]),
        (get_f_scan, events_file, [('nii_path', 'timecourse_file'),
                                   ('task', 'task'),
                                   ('scan_path', 'scan_dir')]),
        (get_f_scan, find_physio, [('nii_path', 'nii_path')]),
        (events_file, datasink, [('out_file', 'func.@events')]),
        (find_physio, datasink, [('physiofile', 'func.@physio')]),
        (find_physio, datasink, [('meta_physiofile', 'func.@meta_physio')]),
        (get_f_scan, events_file, [('events_name', 'out_file')]),
        (get_f_scan, datasink, [(('subject_session', ss_to_path), 'container')
                                ]),
    ]

    if realign == "space":
        realigner = pe.Node(interface=spm.Realign(), name="realigner")
        realigner.inputs.register_to_mean = True
        workflow_connections.extend([
            (dummy_scans, realigner, [('out_file', 'in_file')]),
        ])

    elif realign == "spacetime":
        realigner = pe.Node(interface=nipy.SpaceTimeRealigner(),
                            name="realigner")
        realigner.inputs.slice_times = "asc_alt_2"
        realigner.inputs.tr = tr
        realigner.inputs.slice_info = 3  #3 for coronal slices (2 for horizontal, 1 for sagittal)
        workflow_connections.extend([
            (dummy_scans, realigner, [('out_file', 'in_file')]),
        ])

    elif realign == "time":
        realigner = pe.Node(interface=fsl.SliceTimer(), name="slicetimer")
        realigner.inputs.time_repetition = tr
        workflow_connections.extend([
            (dummy_scans, realigner, [('out_file', 'in_file')]),
        ])

    #ADDING SELECTABLE NODES AND EXTENDING WORKFLOW AS APPROPRIATE:
    s_biascorrect, f_biascorrect = real_size_nodes()

    if structural_scan_types.any():
        s_data_selection = deepcopy(data_selection)
        for match in structural_match.keys():
            s_data_selection = s_data_selection.loc[
                s_data_selection[match].isin(structural_match[match])]

        get_s_scan = pe.Node(
            name='get_s_scan',
            interface=util.Function(
                function=get_bids_scan,
                input_names=inspect.getargspec(get_bids_scan)[0],
                output_names=[
                    'scan_path', 'scan_type', 'task', 'nii_path', 'nii_name',
                    'events_name', 'subject_session', 'metadata_filename',
                    'dict_slice', 'ind_type'
                ]))
        get_s_scan.inputs.ignore_exception = True
        get_s_scan.inputs.data_selection = s_data_selection
        get_s_scan.inputs.bids_base = bids_base

        s_register, s_warp, f_register, f_warp = generic_registration(
            template,
            template_mask=registration_mask,
            phase_dictionary=phase_dictionary,
        )
        #TODO: incl. in func registration
        if autorotate:
            s_rotated = autorotate(template)
            workflow_connections.extend([
                (s_biascorrect, s_rotated, [('output_image', 'out_file')]),
                (s_rotated, s_register, [('out_file', 'moving_image')]),
            ])
        else:
            workflow_connections.extend([
                (s_biascorrect, s_register, [('output_image', 'moving_image')
                                             ]),
                (s_register, s_warp, [('composite_transform', 'transforms')]),
                (get_s_scan, s_warp, [('nii_path', 'input_image')]),
                (s_warp, datasink, [('output_image', 'anat')]),
            ])
        if masking_config_path:
            from mlebe.masking.predict_mask import predict_mask
            s_mask = pe.Node(
                name='s_mask',
                interface=util.Function(
                    function=predict_mask,
                    input_names=inspect.getfullargspec(predict_mask)[0],
                    output_names=['out_file', 'mask_list', 'mask']))
            f_mask = pe.Node(
                name='f_mask',
                interface=util.Function(
                    function=predict_mask,
                    input_names=inspect.getfullargspec(predict_mask)[0],
                    output_names=['out_file', 'mask_list', 'mask']))
            s_mask.inputs.masking_config_path = masking_config_path
            f_mask.inputs.masking_config_path = masking_config_path
            f_mask.inputs.input_type = 'func'
            workflow_connections.extend([
                (get_f_scan, get_s_scan, [('subject_session', 'selector')]),
                (get_f_scan, f_mask, [('nii_path', 'in_file')]),
                (f_mask, f_biascorrect, [('mask', 'mask_image')]),
                (get_s_scan, s_warp, [('nii_name', 'output_image')]),
                (get_s_scan, s_mask, [('nii_path', 'in_file')]),
                (s_mask, s_biascorrect, [('out_file', 'input_image')]),
                (s_mask, s_biascorrect, [('mask', 'mask_image')]),
                (s_mask, s_register, [('mask_list', 'moving_image_masks')]),
                (f_mask, f_register, [('mask_list', 'moving_image_masks')]),
            ])

        else:
            workflow_connections.extend([
                (get_f_scan, get_s_scan, [('subject_session', 'selector')]),
                (get_s_scan, s_warp, [('nii_name', 'output_image')]),
                (get_s_scan, s_biascorrect, [('nii_path', 'input_image')]),
            ])

    if functional_registration_method == "structural":
        if not structural_scan_types.any():
            raise ValueError(
                'The option `registration="structural"` requires there to be a structural scan type.'
            )
        workflow_connections.extend([
            (s_register, f_warp, [('composite_transform', 'transforms')]),
        ])
        if realign == "space":
            workflow_connections.extend([
                (realigner, f_warp, [('realigned_files', 'input_image')]),
            ])
        elif realign == "spacetime":
            workflow_connections.extend([
                (realigner, f_warp, [('out_file', 'input_image')]),
            ])
        elif realign == "time":
            workflow_connections.extend([
                (realigner, f_warp, [('slice_time_corrected_file',
                                      'input_image')]),
            ])
        else:
            workflow_connections.extend([
                (dummy_scans, f_warp, [('out_file', 'input_image')]),
            ])
    elif functional_registration_method == "composite":
        if not structural_scan_types.any():
            raise ValueError(
                'The option `registration="composite"` requires there to be a structural scan type.'
            )
        temporal_mean = pe.Node(interface=fsl.MeanImage(),
                                name="temporal_mean")

        merge = pe.Node(util.Merge(2), name='merge')

        if masking_config_path:
            additional_biascorrect = additional_s_biascorrect()
            workflow_connections.extend([
                (temporal_mean, f_biascorrect, [('out_file', 'input_image')]),
                (f_biascorrect, f_register, [('output_image', 'moving_image')
                                             ]),
                (get_s_scan, additional_biascorrect, [('nii_path',
                                                       'input_image')]),
                (additional_biascorrect, f_register, [('output_image',
                                                       'fixed_image')]),
                (s_register, merge, [('composite_transform', 'in1')]),
                (f_register, merge, [('composite_transform', 'in2')]),
                (merge, f_warp, [('out', 'transforms')]),
            ])
        else:
            workflow_connections.extend([
                (temporal_mean, f_biascorrect, [('out_file', 'input_image')]),
                (f_biascorrect, f_register, [('output_image', 'moving_image')
                                             ]),
                (s_biascorrect, f_register, [('output_image', 'fixed_image')]),
                (s_register, merge, [('composite_transform', 'in1')]),
                (f_register, merge, [('composite_transform', 'in2')]),
                (merge, f_warp, [('out', 'transforms')]),
            ])
        if realign == "space":
            workflow_connections.extend([
                (realigner, temporal_mean, [('realigned_files', 'in_file')]),
                (realigner, f_warp, [('realigned_files', 'input_image')]),
            ])
        elif realign == "spacetime":
            workflow_connections.extend([
                (realigner, temporal_mean, [('out_file', 'in_file')]),
                (realigner, f_warp, [('out_file', 'input_image')]),
            ])
        elif realign == "time":
            workflow_connections.extend([
                (realigner, temporal_mean, [('slice_time_corrected_file',
                                             'in_file')]),
                (realigner, f_warp, [('slice_time_corrected_file',
                                      'input_image')]),
            ])
        else:
            workflow_connections.extend([
                (dummy_scans, temporal_mean, [('out_file', 'in_file')]),
                (dummy_scans, f_warp, [('out_file', 'input_image')]),
            ])
    elif functional_registration_method == "functional":
        f_register, f_warp = functional_registration(template)

        temporal_mean = pe.Node(interface=fsl.MeanImage(),
                                name="temporal_mean")

        #f_cutoff = pe.Node(interface=fsl.ImageMaths(), name="f_cutoff")
        #f_cutoff.inputs.op_string = "-thrP 30"

        #f_BET = pe.Node(interface=fsl.BET(), name="f_BET")
        #f_BET.inputs.mask = True
        #f_BET.inputs.frac = 0.5

        workflow_connections.extend([
            (temporal_mean, f_biascorrect, [('out_file', 'input_image')]),
            #(f_biascorrect, f_cutoff, [('output_image', 'in_file')]),
            #(f_cutoff, f_BET, [('out_file', 'in_file')]),
            #(f_BET, f_register, [('out_file', 'moving_image')]),
            (f_biascorrect, f_register, [('output_image', 'moving_image')]),
            (f_register, f_warp, [('composite_transform', 'transforms')]),
        ])
        if realign == "space":
            workflow_connections.extend([
                (realigner, temporal_mean, [('realigned_files', 'in_file')]),
                (realigner, f_warp, [('realigned_files', 'input_image')]),
            ])
        elif realign == "spacetime":
            workflow_connections.extend([
                (realigner, temporal_mean, [('out_file', 'in_file')]),
                (realigner, f_warp, [('out_file', 'input_image')]),
            ])
        elif realign == "time":
            workflow_connections.extend([
                (realigner, temporal_mean, [('slice_time_corrected_file',
                                             'in_file')]),
                (realigner, f_warp, [('slice_time_corrected_file',
                                      'input_image')]),
            ])
        else:
            workflow_connections.extend([
                (dummy_scans, temporal_mean, [('out_file', 'in_file')]),
                (dummy_scans, f_warp, [('out_file', 'input_image')]),
            ])

    if functional_blur_xy:
        blur = pe.Node(interface=afni.preprocess.BlurToFWHM(), name="blur")
        blur.inputs.fwhmxy = functional_blur_xy
        workflow_connections.extend([
            (get_f_scan, blur, [('nii_name', 'out_file')]),
            (f_warp, blur, [('output_image', 'in_file')]),
            (blur, datasink, [('out_file', 'func')]),
        ])
    else:
        workflow_connections.extend([
            (get_f_scan, f_warp, [('nii_name', 'output_image')]),
            (f_warp, datasink, [('output_image', 'func')]),
        ])

    workflow_config = {
        'execution': {
            'crashdump_dir': path.join(out_base, 'crashdump'),
        }
    }
    if debug:
        workflow_config['logging'] = {
            'workflow_level': 'DEBUG',
            'utils_level': 'DEBUG',
            'interface_level': 'DEBUG',
            'filemanip_level': 'DEBUG',
            'log_to_file': 'true',
        }

    workdir_name = workflow_name + "_work"
    #this gives the name of the workdir, the output name is passed to the datasink
    workflow = pe.Workflow(name=workdir_name)
    workflow.connect(workflow_connections)
    workflow.base_dir = out_base
    workflow.config = workflow_config
    try:
        workflow.write_graph(dotfilename=path.join(workflow.base_dir,
                                                   workdir_name, "graph.dot"),
                             graph2use="hierarchical",
                             format="png")
    except OSError:
        print(
            'We could not write the DOT file for visualization (`dot` function from the graphviz package). This is non-critical to the processing, but you should get this fixed.'
        )

    workflow.run(plugin="MultiProc", plugin_args={'n_procs': n_jobs})
    copy_bids_files(bids_base, os.path.join(out_base, workflow_name))
    if not keep_work:
        workdir = path.join(workflow.base_dir, workdir_name)
        try:
            shutil.rmtree(workdir)
        except OSError as e:
            if str(e) == 'Cannot call rmtree on a symbolic link':
                print(
                    'Not deleting top level workdir (`{}`), as it is a symlink. Deleting only contents instead'
                    .format(workdir))
                for file_object in os.listdir(workdir):
                    file_object_path = os.path.join(workdir, file_object)
                    if os.path.isfile(file_object_path):
                        os.unlink(file_object_path)
                    else:
                        shutil.rmtree(file_object_path)
            else:
                raise OSError(str(e))
Exemplo n.º 3
0
remove_vol = Node(util.Function(input_names=['in_file', 't_min'],
                                output_names=["out_file"],
                                function=strip_rois_func),
                  name='remove_vol')
remove_vol.inputs.t_min = vol_to_remove
preproc_func.connect([(selectfiles, remove_vol, [('func', 'in_file')])])

# Apply mask (to make mag and real comparable)
apply_func = Node(fsl.ApplyMask(), name='apply_func')

preproc_func.connect([(selectfiles, apply_func, [('mask', 'mask_file')]),
                      (remove_vol, apply_func, [('out_file', 'in_file')])])

# motion correction
moco = Node(nipy.SpaceTimeRealigner(slice_times='asc_alt_2',
                                    tr=TR,
                                    slice_info=[2, 1]),
            name="moco")
preproc_func.connect([(apply_func, moco, [('out_file', 'in_file')])])

# compute median
median = Node(util.Function(input_names=['in_files'],
                            output_names=['median_file'],
                            function=median),
              name='median')

preproc_func.connect([(moco, median, [('out_file', 'in_files')])])

# artefact detection
artefact = Node(ra.ArtifactDetect(save_plot=True,
                                  use_norm=True,
# remove first volumes
remove_vol = Node(util.Function(input_names=['in_file', 't_min'],
                                output_names=["out_file"],
                                function=strip_rois_func),
                  name='remove_vol')
remove_vol.inputs.t_min = vol_to_remove

preproc.connect([(selectfiles, remove_vol, [('rest', 'in_file')])])

brain_extract_mag1 = Node(fsl.BET(), name='brain_extract_mag1')

preproc.connect([(selectfiles, brain_extract_mag1, [('mag1', 'in_file')])])

# simultaneous slice time and motion correction
slicemoco = Node(nipy.SpaceTimeRealigner(), name="spacetime_realign")

preproc.connect([(remove_vol, slicemoco, [('out_file', 'in_file')])])

# compute first tsnr and detrend
tsnr = Node(TSNR(regress_poly=2), name='tsnr')
preproc.connect([(slicemoco, tsnr, [('out_file', 'in_file')])])

# compute median of realigned timeseries for preperation for fieldmap

median1 = Node(util.Function(input_names=['in_files'],
                             output_names=['median_file'],
                             function=median),
               name='median1')

#median = Node(SpatialFilter(operation='median'),
Exemplo n.º 5
0
                  name='getsubs')
getsubs.inputs.ignore_exception = False
preproc_wf.connect(subj_iterable, 'subject_id', getsubs, 'subject_id')
preproc_wf.connect(datasource, 'mri_files', getsubs, 'mri_files')

# Extract the first volume of the first run as the reference
extractref = pe.Node(fsl.ExtractROI(t_size=1),
                     iterfield=['in_file'],
                     name='extractref')
preproc_wf.connect(datasource, ('mri_files', pickfirst), extractref, 'in_file')
preproc_wf.connect(datasource, ('mri_files', pickvol, 0, 'middle'), extractref,
                   't_min')
preproc_wf.connect(extractref, 'roi_file', outputspec, 'reference')

# Motion correction with Nipy algorithm
motion_correct = pe.Node(nipy.SpaceTimeRealigner(), name='motion_correct')
motion_correct.plugin_args = {'bsub_args': '-n 12'}
motion_correct.plugin_args = {'bsub_args': '-R "span[hosts=1]"'}
preproc_wf.connect(datasource, 'mri_files', motion_correct, 'in_file')
preproc_wf.connect(motion_correct, 'par_file', outputspec, 'motion_parameters')
preproc_wf.connect(motion_correct, 'out_file', outputspec,
                   'motion_corrected_files')

# Calculate the transformation matrix from EPI space to FreeSurfer space
# using the BBRegister command
coregister = pe.Node(fs.BBRegister(subjects_dir=subjects_dir,
                                   contrast_type='t2',
                                   init='fsl',
                                   out_fsl_file=True),
                     name='coregister')
preproc_wf.connect(subj_iterable, 'subject_id', coregister, 'subject_id')
def create_workflow(files,
                    target_file,
                    subject_id,
                    TR,
                    slice_times,
                    norm_threshold=1,
                    num_components=5,
                    vol_fwhm=None,
                    surf_fwhm=None,
                    lowpass_freq=-1,
                    highpass_freq=-1,
                    subjects_dir=None,
                    sink_directory=os.getcwd(),
                    target_subject=['fsaverage3', 'fsaverage4'],
                    name='resting'):

    wf = Workflow(name=name)

    # Rename files in case they are named identically
    name_unique = MapNode(Rename(format_string='rest_%(run)02d'),
                          iterfield=['in_file', 'run'],
                          name='rename')
    name_unique.inputs.keep_ext = True
    name_unique.inputs.run = range(1, len(files) + 1)
    name_unique.inputs.in_file = files

    realign = Node(nipy.SpaceTimeRealigner(), name="spacetime_realign")
    realign.inputs.slice_times = slice_times
    realign.inputs.tr = TR
    realign.inputs.slice_info = 2

    # Comute TSNR on realigned data regressing polynomials upto order 2
    tsnr = MapNode(TSNR(regress_poly=2), iterfield=['in_file'], name='tsnr')
    wf.connect(realign, "out_file", tsnr, "in_file")

    # Compute the median image across runs
    calc_median = Node(Function(input_names=['in_files'],
                                output_names=['median_file'],
                                function=median,
                                imports=imports),
                       name='median')
    wf.connect(tsnr, 'detrended_file', calc_median, 'in_files')
    """Segment and Register
    """
    registration = create_reg_workflow(name='registration')
    wf.connect(calc_median, 'median_file', registration,
               'inputspec.mean_image')
    registration.inputs.inputspec.subject_id = subject_id
    registration.inputs.inputspec.subjects_dir = subjects_dir
    registration.inputs.inputspec.target_image = target_file
    """Use :class:`nipype.algorithms.rapidart` to determine which of the
    images in the functional series are outliers based on deviations in
    intensity or movement.
    """

    art = Node(interface=ArtifactDetect(), name="art")
    art.inputs.use_differences = [True, True]
    art.inputs.use_norm = True
    art.inputs.norm_threshold = norm_threshold
    art.inputs.zintensity_threshold = 9
    art.inputs.mask_type = 'spm_global'
    art.inputs.parameter_source = 'NiPy'
    """Here we are connecting all the nodes together. Notice that we add the merge node only if you choose
    to use 4D. Also `get_vox_dims` function is passed along the input volume of normalise to set the optimal
    voxel sizes.
    """

    wf.connect([
        (name_unique, realign, [('out_file', 'in_file')]),
        (realign, art, [('out_file', 'realigned_files')]),
        (realign, art, [('par_file', 'realignment_parameters')]),
    ])

    def selectindex(files, idx):
        import numpy as np
        from nipype.utils.filemanip import filename_to_list, list_to_filename
        return list_to_filename(
            np.array(filename_to_list(files))[idx].tolist())

    mask = Node(fsl.BET(), name='getmask')
    mask.inputs.mask = True
    wf.connect(calc_median, 'median_file', mask, 'in_file')

    # get segmentation in normalized functional space

    def merge_files(in1, in2):
        out_files = filename_to_list(in1)
        out_files.extend(filename_to_list(in2))
        return out_files

    # filter some noise

    # Compute motion regressors
    motreg = Node(Function(
        input_names=['motion_params', 'order', 'derivatives'],
        output_names=['out_files'],
        function=motion_regressors,
        imports=imports),
                  name='getmotionregress')
    wf.connect(realign, 'par_file', motreg, 'motion_params')

    # Create a filter to remove motion and art confounds
    createfilter1 = Node(Function(
        input_names=['motion_params', 'comp_norm', 'outliers', 'detrend_poly'],
        output_names=['out_files'],
        function=build_filter1,
        imports=imports),
                         name='makemotionbasedfilter')
    createfilter1.inputs.detrend_poly = 2
    wf.connect(motreg, 'out_files', createfilter1, 'motion_params')
    wf.connect(art, 'norm_files', createfilter1, 'comp_norm')
    wf.connect(art, 'outlier_files', createfilter1, 'outliers')

    filter1 = MapNode(fsl.GLM(out_f_name='F_mcart.nii.gz',
                              out_pf_name='pF_mcart.nii.gz',
                              demean=True),
                      iterfield=['in_file', 'design', 'out_res_name'],
                      name='filtermotion')

    wf.connect(realign, 'out_file', filter1, 'in_file')
    wf.connect(realign, ('out_file', rename, '_filtermotart'), filter1,
               'out_res_name')
    wf.connect(createfilter1, 'out_files', filter1, 'design')

    createfilter2 = MapNode(Function(input_names=[
        'realigned_file', 'mask_file', 'num_components', 'extra_regressors'
    ],
                                     output_names=['out_files'],
                                     function=extract_noise_components,
                                     imports=imports),
                            iterfield=['realigned_file', 'extra_regressors'],
                            name='makecompcorrfilter')
    createfilter2.inputs.num_components = num_components

    wf.connect(createfilter1, 'out_files', createfilter2, 'extra_regressors')
    wf.connect(filter1, 'out_res', createfilter2, 'realigned_file')
    wf.connect(registration,
               ('outputspec.segmentation_files', selectindex, [0, 2]),
               createfilter2, 'mask_file')

    filter2 = MapNode(fsl.GLM(out_f_name='F.nii.gz',
                              out_pf_name='pF.nii.gz',
                              demean=True),
                      iterfield=['in_file', 'design', 'out_res_name'],
                      name='filter_noise_nosmooth')
    wf.connect(filter1, 'out_res', filter2, 'in_file')
    wf.connect(filter1, ('out_res', rename, '_cleaned'), filter2,
               'out_res_name')
    wf.connect(createfilter2, 'out_files', filter2, 'design')
    wf.connect(mask, 'mask_file', filter2, 'mask')

    bandpass = Node(Function(
        input_names=['files', 'lowpass_freq', 'highpass_freq', 'fs'],
        output_names=['out_files'],
        function=bandpass_filter,
        imports=imports),
                    name='bandpass_unsmooth')
    bandpass.inputs.fs = 1. / TR
    bandpass.inputs.highpass_freq = highpass_freq
    bandpass.inputs.lowpass_freq = lowpass_freq
    wf.connect(filter2, 'out_res', bandpass, 'files')
    """Smooth the functional data using
    :class:`nipype.interfaces.fsl.IsotropicSmooth`.
    """

    smooth = MapNode(interface=fsl.IsotropicSmooth(),
                     name="smooth",
                     iterfield=["in_file"])
    smooth.inputs.fwhm = vol_fwhm

    wf.connect(bandpass, 'out_files', smooth, 'in_file')

    collector = Node(Merge(2), name='collect_streams')
    wf.connect(smooth, 'out_file', collector, 'in1')
    wf.connect(bandpass, 'out_files', collector, 'in2')
    """
    Transform the remaining images. First to anatomical and then to target
    """

    warpall = MapNode(ants.ApplyTransforms(),
                      iterfield=['input_image'],
                      name='warpall')
    warpall.inputs.input_image_type = 3
    warpall.inputs.interpolation = 'Linear'
    warpall.inputs.invert_transform_flags = [False, False]
    warpall.inputs.terminal_output = 'file'
    warpall.inputs.reference_image = target_file
    warpall.inputs.args = '--float'
    warpall.inputs.num_threads = 1

    # transform to target
    wf.connect(collector, 'out', warpall, 'input_image')
    wf.connect(registration, 'outputspec.transforms', warpall, 'transforms')

    mask_target = Node(fsl.ImageMaths(op_string='-bin'), name='target_mask')

    wf.connect(registration, 'outputspec.anat2target', mask_target, 'in_file')

    maskts = MapNode(fsl.ApplyMask(), iterfield=['in_file'], name='ts_masker')
    wf.connect(warpall, 'output_image', maskts, 'in_file')
    wf.connect(mask_target, 'out_file', maskts, 'mask_file')

    # map to surface
    # extract aparc+aseg ROIs
    # extract subcortical ROIs
    # extract target space ROIs
    # combine subcortical and cortical rois into a single cifti file

    #######
    # Convert aparc to subject functional space

    # Sample the average time series in aparc ROIs
    sampleaparc = MapNode(
        freesurfer.SegStats(default_color_table=True),
        iterfield=['in_file', 'summary_file', 'avgwf_txt_file'],
        name='aparc_ts')
    sampleaparc.inputs.segment_id = ([8] + range(10, 14) + [17, 18, 26, 47] +
                                     range(49, 55) + [58] + range(1001, 1036) +
                                     range(2001, 2036))

    wf.connect(registration, 'outputspec.aparc', sampleaparc,
               'segmentation_file')
    wf.connect(collector, 'out', sampleaparc, 'in_file')

    def get_names(files, suffix):
        """Generate appropriate names for output files
        """
        from nipype.utils.filemanip import (split_filename, filename_to_list,
                                            list_to_filename)
        import os
        out_names = []
        for filename in files:
            path, name, _ = split_filename(filename)
            out_names.append(os.path.join(path, name + suffix))
        return list_to_filename(out_names)

    wf.connect(collector, ('out', get_names, '_avgwf.txt'), sampleaparc,
               'avgwf_txt_file')
    wf.connect(collector, ('out', get_names, '_summary.stats'), sampleaparc,
               'summary_file')

    # Sample the time series onto the surface of the target surface. Performs
    # sampling into left and right hemisphere
    target = Node(IdentityInterface(fields=['target_subject']), name='target')
    target.iterables = ('target_subject', filename_to_list(target_subject))

    samplerlh = MapNode(freesurfer.SampleToSurface(),
                        iterfield=['source_file'],
                        name='sampler_lh')
    samplerlh.inputs.sampling_method = "average"
    samplerlh.inputs.sampling_range = (0.1, 0.9, 0.1)
    samplerlh.inputs.sampling_units = "frac"
    samplerlh.inputs.interp_method = "trilinear"
    samplerlh.inputs.smooth_surf = surf_fwhm
    #samplerlh.inputs.cortex_mask = True
    samplerlh.inputs.out_type = 'niigz'
    samplerlh.inputs.subjects_dir = subjects_dir

    samplerrh = samplerlh.clone('sampler_rh')

    samplerlh.inputs.hemi = 'lh'
    wf.connect(collector, 'out', samplerlh, 'source_file')
    wf.connect(registration, 'outputspec.out_reg_file', samplerlh, 'reg_file')
    wf.connect(target, 'target_subject', samplerlh, 'target_subject')

    samplerrh.set_input('hemi', 'rh')
    wf.connect(collector, 'out', samplerrh, 'source_file')
    wf.connect(registration, 'outputspec.out_reg_file', samplerrh, 'reg_file')
    wf.connect(target, 'target_subject', samplerrh, 'target_subject')

    # Combine left and right hemisphere to text file
    combiner = MapNode(Function(input_names=['left', 'right'],
                                output_names=['out_file'],
                                function=combine_hemi,
                                imports=imports),
                       iterfield=['left', 'right'],
                       name="combiner")
    wf.connect(samplerlh, 'out_file', combiner, 'left')
    wf.connect(samplerrh, 'out_file', combiner, 'right')

    # Sample the time series file for each subcortical roi
    ts2txt = MapNode(Function(
        input_names=['timeseries_file', 'label_file', 'indices'],
        output_names=['out_file'],
        function=extract_subrois,
        imports=imports),
                     iterfield=['timeseries_file'],
                     name='getsubcortts')
    ts2txt.inputs.indices = [8] + range(10, 14) + [17, 18, 26, 47] +\
                            range(49, 55) + [58]
    ts2txt.inputs.label_file = \
        os.path.abspath(('OASIS-TRT-20_jointfusion_DKT31_CMA_labels_in_MNI152_'
                         '2mm_v2.nii.gz'))
    wf.connect(maskts, 'out_file', ts2txt, 'timeseries_file')

    ######

    substitutions = [
        ('_target_subject_', ''),
        ('_filtermotart_cleaned_bp_trans_masked', ''),
        ('_filtermotart_cleaned_bp', ''),
    ]
    substitutions += [("_smooth%d" % i, "") for i in range(11)[::-1]]
    substitutions += [("_ts_masker%d" % i, "") for i in range(11)[::-1]]
    substitutions += [("_getsubcortts%d" % i, "") for i in range(11)[::-1]]
    substitutions += [("_combiner%d" % i, "") for i in range(11)[::-1]]
    substitutions += [("_filtermotion%d" % i, "") for i in range(11)[::-1]]
    substitutions += [("_filter_noise_nosmooth%d" % i, "")
                      for i in range(11)[::-1]]
    substitutions += [("_makecompcorfilter%d" % i, "")
                      for i in range(11)[::-1]]

    substitutions += [("T1_out_brain_pve_0_maths_warped", "compcor_csf"),
                      ("T1_out_brain_pve_1_maths_warped", "compcor_gm"),
                      ("T1_out_brain_pve_2_maths_warped", "compcor_wm"),
                      ("output_warped_image_maths", "target_brain_mask"),
                      ("median_brain_mask", "native_brain_mask"),
                      ("corr_", "")]

    regex_subs = [
        ('_combiner.*/sar', '/smooth/'),
        ('_combiner.*/ar', '/unsmooth/'),
        ('_aparc_ts.*/sar', '/smooth/'),
        ('_aparc_ts.*/ar', '/unsmooth/'),
        ('_getsubcortts.*/sar', '/smooth/'),
        ('_getsubcortts.*/ar', '/unsmooth/'),
        ('series/sar', 'series/smooth/'),
        ('series/ar', 'series/unsmooth/'),
        ('_inverse_transform./', ''),
    ]
    # Save the relevant data into an output directory
    datasink = Node(interface=DataSink(), name="datasink")
    datasink.inputs.base_directory = sink_directory
    datasink.inputs.container = subject_id
    datasink.inputs.substitutions = substitutions
    datasink.inputs.regexp_substitutions = regex_subs  #(r'(/_.*(\d+/))', r'/run\2')
    wf.connect(realign, 'par_file', datasink, 'resting.qa.motion')
    wf.connect(art, 'norm_files', datasink, 'resting.qa.art.@norm')
    wf.connect(art, 'intensity_files', datasink, 'resting.qa.art.@intensity')
    wf.connect(art, 'outlier_files', datasink, 'resting.qa.art.@outlier_files')
    wf.connect(registration, 'outputspec.segmentation_files', datasink,
               'resting.mask_files')
    wf.connect(registration, 'outputspec.anat2target', datasink,
               'resting.qa.ants')
    wf.connect(mask, 'mask_file', datasink, 'resting.mask_files.@brainmask')
    wf.connect(mask_target, 'out_file', datasink, 'resting.mask_files.target')
    wf.connect(filter1, 'out_f', datasink, 'resting.qa.compmaps.@mc_F')
    wf.connect(filter1, 'out_pf', datasink, 'resting.qa.compmaps.@mc_pF')
    wf.connect(filter2, 'out_f', datasink, 'resting.qa.compmaps')
    wf.connect(filter2, 'out_pf', datasink, 'resting.qa.compmaps.@p')
    wf.connect(bandpass, 'out_files', datasink,
               'resting.timeseries.@bandpassed')
    wf.connect(smooth, 'out_file', datasink, 'resting.timeseries.@smoothed')
    wf.connect(createfilter1, 'out_files', datasink,
               'resting.regress.@regressors')
    wf.connect(createfilter2, 'out_files', datasink,
               'resting.regress.@compcorr')
    wf.connect(maskts, 'out_file', datasink, 'resting.timeseries.target')
    wf.connect(sampleaparc, 'summary_file', datasink,
               'resting.parcellations.aparc')
    wf.connect(sampleaparc, 'avgwf_txt_file', datasink,
               'resting.parcellations.aparc.@avgwf')
    wf.connect(ts2txt, 'out_file', datasink,
               'resting.parcellations.grayo.@subcortical')

    datasink2 = Node(interface=DataSink(), name="datasink2")
    datasink2.inputs.base_directory = sink_directory
    datasink2.inputs.container = subject_id
    datasink2.inputs.substitutions = substitutions
    datasink2.inputs.regexp_substitutions = regex_subs  #(r'(/_.*(\d+/))', r'/run\2')
    wf.connect(combiner, 'out_file', datasink2,
               'resting.parcellations.grayo.@surface')
    return wf
Exemplo n.º 7
0
def generic(
    bids_base,
    template,
    actual_size=True,
    autorotate=False,
    debug=False,
    functional_blur_xy=False,
    functional_match={},
    functional_registration_method="composite",
    keep_work=False,
    negative_contrast_agent=False,
    n_procs=N_PROCS,
    out_base=None,
    realign="time",
    registration_mask="",
    sessions=[],
    structural_match={},
    subjects=[],
    tr=1,
    workflow_name='generic',
    params={},
    phase_dictionary=GENERIC_PHASES,
):
    '''
	Generic preprocessing and registration workflow for small animal data in BIDS format.

	Parameters
	----------
	bids_base : str
		Path to the BIDS data set root.
	template : str
		Path to the template to register the data to.
	actual_size : bool, optional
		Whether to keep the data at its original scale; if `False`, the spatial representation will be stretched 10-fold in each dimension.
	autorotate : bool, optional
		Whether to use a multi-rotation-state transformation start.
		This allows the registration to commence with the best rotational fit, and may help if the orientation of the data is malformed with respect to the header.
	debug : bool, optional
		Whether to enable nipype debug mode.
		This increases logging.
	functional_blur_xy : float, optional
		Factor by which to smooth data in the xy-plane; if parameter evaluates to false, no smoothing will be applied.
		Ideally this value should correspond to the resolution or smoothness in the z-direction (assuing z represents the lower-resolution slice-encoding direction).
	functional_match : dict, optional
		Dictionary specifying a whitelist to use for functional data inclusion into the workflow; if dictionary is empty no whitelist is present and all data will be considered.
		The dictionary should have keys which are 'acquisition', 'task', or 'modality', and values which are lists of acceptable strings for the respective BIDS field.
	functional_registration_method : {'composite','functional','structural'}, optional
		How to register the functional scan to the template.
		Values mean the following: 'composite' that it will be registered to the structural scan which will in turn be registered to the template, 'functional' that it will be registered directly, 'structural' that it will be registered exactly as the structural scan.
	keep_work : bool, str
		Whether to keep the work directory after workflow conclusion (this directory contains all the intermediary processing commands, inputs, and outputs --- it is invaluable for debugging but many times larger in size than the actual output).
	negative_contrast_agent : bool, optional
		Whether the scan was acquired witn a negative contrast agent given the imaging modality; if true the values will be inverted with respect to zero.
		This is commonly used for iron nano-particle Cerebral Blood Volume (CBV) measurements.
	n_procs : int, optional
		Number of processors to maximally use for the workflow; if unspecified a best guess will be estimate based on hardware (but not on current load).
	out_base : str, optional
		Output base directory --- inside which a directory named `workflow_name`(as well as associated directories) will be created.
	realign : {"space","time","spacetime",""}, optional
		Parameter that dictates slictiming correction and realignment of slices. "time" (FSL.SliceTimer) is default, since it works safely. Use others only with caution!
	registration_mask : str, optional
		Mask to use for the registration process.
		This mask will constrain the area for similarity metric evaluation, but the data will not be cropped.
	sessions : list, optional
		A whitelist of sessions to include in the workflow, if the list is empty there is no whitelist and all sessions will be considered.
	structural_match : dict, optional
		Dictionary specifying a whitelist to use for structural data inclusion into the workflow; if dictionary is empty no whitelist is present and all data will be considered.
		The dictionary should have keys which are 'acquisition', or 'modality', and values which are lists of acceptable strings for the respective BIDS field.
	subjects : list, optional
		A whitelist of subjects to include in the workflow, if the list is empty there is no whitelist and all sessions will be considered.
	tr : float, optional
		Repetition time, explicitly.
		WARNING! This is a parameter waiting for deprecation.
	workflow_name : str, optional
		Top level name for the output directory.
	'''

    bids_base, out_base, out_dir, template, registration_mask, data_selection, functional_scan_types, structural_scan_types, subjects_sessions, func_ind, struct_ind = common_select(
        bids_base,
        out_base,
        workflow_name,
        template,
        registration_mask,
        functional_match,
        structural_match,
        subjects,
        sessions,
    )

    get_f_scan = pe.Node(name='get_f_scan',
                         interface=util.Function(
                             function=get_bids_scan,
                             input_names=inspect.getargspec(get_bids_scan)[0],
                             output_names=[
                                 'scan_path', 'scan_type', 'task', 'nii_path',
                                 'nii_name', 'file_name', 'events_name',
                                 'subject_session'
                             ]))
    get_f_scan.inputs.ignore_exception = True
    get_f_scan.inputs.data_selection = data_selection
    get_f_scan.inputs.bids_base = bids_base
    get_f_scan.iterables = ("ind_type", func_ind)

    dummy_scans = pe.Node(
        name='dummy_scans',
        interface=util.Function(
            function=force_dummy_scans,
            input_names=inspect.getargspec(force_dummy_scans)[0],
            output_names=['out_file', 'deleted_scans']))
    dummy_scans.inputs.desired_dummy_scans = DUMMY_SCANS

    events_file = pe.Node(
        name='events_file',
        interface=util.Function(
            function=write_bids_events_file,
            input_names=inspect.getargspec(write_bids_events_file)[0],
            output_names=['out_file']))

    datasink = pe.Node(nio.DataSink(), name='datasink')
    datasink.inputs.base_directory = out_dir
    datasink.inputs.parameterization = False

    workflow_connections = [
        (get_f_scan, dummy_scans, [('nii_path', 'in_file')]),
        (get_f_scan, dummy_scans, [('scan_path', 'scan_dir')]),
        (dummy_scans, events_file, [('deleted_scans', 'forced_dummy_scans')]),
        (get_f_scan, events_file, [('nii_path', 'timecourse_file'),
                                   ('task', 'task'),
                                   ('scan_path', 'scan_dir')]),
        (events_file, datasink, [('out_file', 'func.@events')]),
        (get_f_scan, events_file, [('events_name', 'out_file')]),
        (get_f_scan, datasink, [(('subject_session', ss_to_path), 'container')
                                ]),
    ]

    if realign == "space":
        realigner = pe.Node(interface=spm.Realign(), name="realigner")
        realigner.inputs.register_to_mean = True
        workflow_connections.extend([
            (dummy_scans, realigner, [('out_file', 'in_file')]),
        ])

    elif realign == "spacetime":
        realigner = pe.Node(interface=nipy.SpaceTimeRealigner(),
                            name="realigner")
        realigner.inputs.slice_times = "asc_alt_2"
        realigner.inputs.tr = tr
        realigner.inputs.slice_info = 3  #3 for coronal slices (2 for horizontal, 1 for sagittal)
        workflow_connections.extend([
            (dummy_scans, realigner, [('out_file', 'in_file')]),
        ])

    elif realign == "time":
        realigner = pe.Node(interface=fsl.SliceTimer(), name="slicetimer")
        realigner.inputs.time_repetition = tr
        workflow_connections.extend([
            (dummy_scans, realigner, [('out_file', 'in_file')]),
        ])

    #ADDING SELECTABLE NODES AND EXTENDING WORKFLOW AS APPROPRIATE:
    if actual_size:
        s_biascorrect, f_biascorrect = real_size_nodes()
    else:
        s_biascorrect, f_biascorrect = inflated_size_nodes()

    if structural_scan_types.any():
        get_s_scan = pe.Node(
            name='get_s_scan',
            interface=util.Function(
                function=get_bids_scan,
                input_names=inspect.getargspec(get_bids_scan)[0],
                output_names=[
                    'scan_path', 'scan_type', 'task', 'nii_path', 'nii_name',
                    'file_name', 'events_name', 'subject_session'
                ]))
        get_s_scan.inputs.ignore_exception = True
        get_s_scan.inputs.data_selection = data_selection
        get_s_scan.inputs.bids_base = bids_base

        if actual_size:
            s_register, s_warp, _, _ = DSURQEc_structural_registration(
                template,
                registration_mask,
                parameters=params,
                phase_dictionary=phase_dictionary,
            )
            #TODO: incl. in func registration
            if autorotate:
                workflow_connections.extend([
                    (s_biascorrect, s_rotated, [('output_image', 'out_file')]),
                    (s_rotated, s_register, [('out_file', 'moving_image')]),
                ])
            else:
                workflow_connections.extend([
                    (s_biascorrect, s_register, [('output_image',
                                                  'moving_image')]),
                    (s_register, s_warp, [('composite_transform', 'transforms')
                                          ]),
                    (get_s_scan, s_warp, [('nii_path', 'input_image')]),
                    (s_warp, datasink, [('output_image', 'anat')]),
                ])
        else:
            s_reg_biascorrect = pe.Node(interface=ants.N4BiasFieldCorrection(),
                                        name="s_reg_biascorrect")
            s_reg_biascorrect.inputs.dimension = 3
            s_reg_biascorrect.inputs.bspline_fitting_distance = 95
            s_reg_biascorrect.inputs.shrink_factor = 2
            s_reg_biascorrect.inputs.n_iterations = [500, 500, 500, 500]
            s_reg_biascorrect.inputs.convergence_threshold = 1e-14

            s_cutoff = pe.Node(interface=fsl.ImageMaths(), name="s_cutoff")
            s_cutoff.inputs.op_string = "-thrP 20 -uthrp 98"

            s_BET = pe.Node(interface=fsl.BET(), name="s_BET")
            s_BET.inputs.mask = True
            s_BET.inputs.frac = 0.3
            s_BET.inputs.robust = True

            s_mask = pe.Node(interface=fsl.ApplyMask(), name="s_mask")
            s_register, s_warp, f_warp = structural_registration(template)

            workflow_connections.extend([
                (get_s_scan, s_reg_biascorrect, [('nii_path', 'input_image')]),
                (s_reg_biascorrect, s_cutoff, [('output_image', 'in_file')]),
                (s_cutoff, s_BET, [('out_file', 'in_file')]),
                (s_biascorrect, s_mask, [('output_image', 'in_file')]),
                (s_BET, s_mask, [('mask_file', 'mask_file')]),
            ])

            #TODO: incl. in func registration
            if autorotate:
                workflow_connections.extend([
                    (s_mask, s_rotated, [('out_file', 'out_file')]),
                    (s_rotated, s_register, [('out_file', 'moving_image')]),
                ])
            else:
                workflow_connections.extend([
                    (s_mask, s_register, [('out_file', 'moving_image')]),
                    (s_register, s_warp, [('composite_transform', 'transforms')
                                          ]),
                    (get_s_scan, s_warp, [('nii_path', 'input_image')]),
                    (s_warp, datasink, [('output_image', 'anat')]),
                ])

        if autorotate:
            s_rotated = autorotate(template)

        workflow_connections.extend([
            (get_f_scan, get_s_scan, [('subject_session', 'selector')]),
            (get_s_scan, s_warp, [('nii_name', 'output_image')]),
            (get_s_scan, s_biascorrect, [('nii_path', 'input_image')]),
        ])

    if functional_registration_method == "structural":
        if not structural_scan_types.any():
            raise ValueError(
                'The option `registration="structural"` requires there to be a structural scan type.'
            )
        workflow_connections.extend([
            (s_register, f_warp, [('composite_transform', 'transforms')]),
        ])
        if realign == "space":
            workflow_connections.extend([
                (realigner, f_warp, [('realigned_files', 'input_image')]),
            ])
        elif realign == "spacetime":
            workflow_connections.extend([
                (realigner, f_warp, [('out_file', 'input_image')]),
            ])
        elif realign == "time":
            workflow_connections.extend([
                (realigner, f_warp, [('slice_time_corrected_file',
                                      'input_image')]),
            ])
        else:
            workflow_connections.extend([
                (dummy_scans, f_warp, [('out_file', 'input_image')]),
            ])
    elif functional_registration_method == "composite":
        if not structural_scan_types.any():
            raise ValueError(
                'The option `registration="composite"` requires there to be a structural scan type.'
            )
        _, _, f_register, f_warp = DSURQEc_structural_registration(
            template,
            registration_mask,
            parameters=params,
            phase_dictionary=phase_dictionary,
        )
        temporal_mean = pe.Node(interface=fsl.MeanImage(),
                                name="temporal_mean")

        merge = pe.Node(util.Merge(2), name='merge')

        workflow_connections.extend([
            (temporal_mean, f_biascorrect, [('out_file', 'input_image')]),
            (f_biascorrect, f_register, [('output_image', 'moving_image')]),
            (s_biascorrect, f_register, [('output_image', 'fixed_image')]),
            (f_register, merge, [('composite_transform', 'in1')]),
            (s_register, merge, [('composite_transform', 'in2')]),
            (merge, f_warp, [('out', 'transforms')]),
        ])
        if realign == "space":
            workflow_connections.extend([
                (realigner, temporal_mean, [('realigned_files', 'in_file')]),
                (realigner, f_warp, [('realigned_files', 'input_image')]),
            ])
        elif realign == "spacetime":
            workflow_connections.extend([
                (realigner, temporal_mean, [('out_file', 'in_file')]),
                (realigner, f_warp, [('out_file', 'input_image')]),
            ])
        elif realign == "time":
            workflow_connections.extend([
                (realigner, temporal_mean, [('slice_time_corrected_file',
                                             'in_file')]),
                (realigner, f_warp, [('slice_time_corrected_file',
                                      'input_image')]),
            ])
        else:
            workflow_connections.extend([
                (dummy_scans, temporal_mean, [('out_file', 'in_file')]),
                (dummy_scans, f_warp, [('out_file', 'input_image')]),
            ])
    elif functional_registration_method == "functional":
        f_register, f_warp = functional_registration(template)

        temporal_mean = pe.Node(interface=fsl.MeanImage(),
                                name="temporal_mean")

        #f_cutoff = pe.Node(interface=fsl.ImageMaths(), name="f_cutoff")
        #f_cutoff.inputs.op_string = "-thrP 30"

        #f_BET = pe.Node(interface=fsl.BET(), name="f_BET")
        #f_BET.inputs.mask = True
        #f_BET.inputs.frac = 0.5

        workflow_connections.extend([
            (temporal_mean, f_biascorrect, [('out_file', 'input_image')]),
            #(f_biascorrect, f_cutoff, [('output_image', 'in_file')]),
            #(f_cutoff, f_BET, [('out_file', 'in_file')]),
            #(f_BET, f_register, [('out_file', 'moving_image')]),
            (f_biascorrect, f_register, [('output_image', 'moving_image')]),
            (f_register, f_warp, [('composite_transform', 'transforms')]),
        ])
        if realign == "space":
            workflow_connections.extend([
                (realigner, temporal_mean, [('realigned_files', 'in_file')]),
                (realigner, f_warp, [('realigned_files', 'input_image')]),
            ])
        elif realign == "spacetime":
            workflow_connections.extend([
                (realigner, temporal_mean, [('out_file', 'in_file')]),
                (realigner, f_warp, [('out_file', 'input_image')]),
            ])
        elif realign == "time":
            workflow_connections.extend([
                (realigner, temporal_mean, [('slice_time_corrected_file',
                                             'in_file')]),
                (realigner, f_warp, [('slice_time_corrected_file',
                                      'input_image')]),
            ])
        else:
            workflow_connections.extend([
                (dummy_scans, temporal_mean, [('out_file', 'in_file')]),
                (dummy_scans, f_warp, [('out_file', 'input_image')]),
            ])

    invert = pe.Node(interface=fsl.ImageMaths(), name="invert")
    blur = pe.Node(interface=afni.preprocess.BlurToFWHM(), name="blur")
    blur.inputs.fwhmxy = functional_blur_xy
    if functional_blur_xy and negative_contrast_agent:
        workflow_connections.extend([
            (f_warp, blur, [('output_image', 'in_file')]),
            (blur, invert, [(('out_file', fslmaths_invert_values), 'op_string')
                            ]),
            (blur, invert, [('out_file', 'in_file')]),
            (get_f_scan, invert, [('nii_name', 'output_image')]),
            (invert, datasink, [('out_file', 'func')]),
        ])

    elif functional_blur_xy:
        workflow_connections.extend([
            (get_f_scan, blur, [('nii_name', 'output_image')]),
            (f_warp, blur, [('output_image', 'in_file')]),
            (blur, datasink, [('out_file', 'func')]),
        ])

    elif negative_contrast_agent:
        workflow_connections.extend([
            (get_f_scan, invert, [('nii_name', 'out_file')]),
            (f_warp, invert, [(('output_image', fslmaths_invert_values),
                               'op_string')]),
            (f_warp, invert, [('output_image', 'in_file')]),
            (invert, datasink, [('out_file', 'func')]),
        ])
    else:
        workflow_connections.extend([
            (get_f_scan, f_warp, [('nii_name', 'output_image')]),
            (f_warp, datasink, [('output_image', 'func')]),
        ])

    workflow_config = {
        'execution': {
            'crashdump_dir': path.join(out_base, 'crashdump'),
        }
    }
    if debug:
        workflow_config['logging'] = {
            'workflow_level': 'DEBUG',
            'utils_level': 'DEBUG',
            'interface_level': 'DEBUG',
            'filemanip_level': 'DEBUG',
            'log_to_file': 'true',
        }

    workdir_name = workflow_name + "_work"
    #this gives the name of the workdir, the output name is passed to the datasink
    workflow = pe.Workflow(name=workdir_name)
    workflow.connect(workflow_connections)
    workflow.base_dir = out_base
    workflow.config = workflow_config
    workflow.write_graph(dotfilename=path.join(workflow.base_dir, workdir_name,
                                               "graph.dot"),
                         graph2use="hierarchical",
                         format="png")

    workflow.run(plugin="MultiProc", plugin_args={'n_procs': n_procs})
    if not keep_work:
        workdir = path.join(workflow.base_dir, workdir_name)
        try:
            shutil.rmtree(workdir)
        except OSError as e:
            if str(e) == 'Cannot call rmtree on a symbolic link':
                print(
                    'Not deleting top level workdir (`{}`), as it is a symlink. Deleting only contents instead'
                    .format(workdir))
                for file_object in os.listdir(workdir):
                    file_object_path = os.path.join(workdir, file_object)
                    if os.path.isfile(file_object_path):
                        os.unlink(file_object_path)
                    else:
                        shutil.rmtree(file_object_path)
            else:
                raise OSError(str(e))
Exemplo n.º 8
0
                                output_names=["out_file"],
                                function=strip_rois_func),
                  name='remove_vol')
remove_vol.inputs.t_min = vol_to_remove
preproc.connect([(selectfiles, remove_vol, [('rest', 'in_file')])])

# Thermal noise removal
# func_denoise = Node(util.Function(input_names=['in_file'],
#                                     output_names=['denoised_data', 'sigmas',
#                                                   'preserved_components'],
#                                      function=pca_denoising),
#                                      name='func_denoise')
# preproc.connect([(remove_vol, func_denoise, [('out_file', 'in_file')])])

# motion correction
moco = Node(nipy.SpaceTimeRealigner(slice_times='asc_alt_2', tr=tr, slice_info=[2,1]),name="moco")
#preproc.connect([(func_denoise, moco, [('denoised_data', 'in_file')])])
preproc.connect([(remove_vol, moco, [('out_file
', 'in_file')])])


# compute median
median = Node(util.Function(input_names=['in_files'],
                       output_names=['median_file'],
                       function=median),
              name='median')

preproc.connect([(moco, median, [('out_file', 'in_files')])])

# bias field correction
biasfield = Node(ants.N4BiasFieldCorrection(dimension=3,
Exemplo n.º 9
0
def legacy(
    bids_base,
    template,
    autorotate=False,
    debug=False,
    functional_blur_xy=False,
    functional_match={},
    keep_work=False,
    negative_contrast_agent=False,
    n_procs=N_PROCS,
    out_base=None,
    realign="time",
    registration_mask=False,
    sessions=[],
    structural_match={},
    subjects=[],
    tr=1,
    workflow_name='legacy',
):
    '''
	Legacy realignment and registration workflow representative of the tweaks and workarounds commonly used in the pre-SAMRI period.

	Parameters
	----------
	bids_base : str
		Path to the BIDS data set root.
	template : str
		Path to the template to register the data to.
	autorotate : bool, optional
		Whether to use a multi-rotation-state transformation start.
		This allows the registration to commence with the best rotational fit, and may help if the orientation of the data is malformed with respect to the header.
	debug : bool, optional
		Whether to enable nipype debug mode.
		This increases logging.
	functional_blur_xy : float, optional
		Factor by which to smooth data in the xy-plane; if parameter evaluates to false, no smoothing will be applied.
		Ideally this value should correspond to the resolution or smoothness in the z-direction (assuing z represents the lower-resolution slice-encoding direction).
	functional_match : dict, optional
		Dictionary specifying a whitelist to use for functional data inclusion into the workflow; if dictionary is empty no whitelist is present and all data will be considered.
		The dictionary should have keys which are 'acquisition', 'task', or 'modality', and values which are lists of acceptable strings for the respective BIDS field.
	keep_work : bool, str
		Whether to keep the work directory after workflow conclusion (this directory contains all the intermediary processing commands, inputs, and outputs --- it is invaluable for debugging but many times larger in size than the actual output).
	negative_contrast_agent : bool, optional
		Whether the scan was acquired witn a negative contrast agent given the imaging modality; if true the values will be inverted with respect to zero.
		This is commonly used for iron nano-particle Cerebral Blood Volume (CBV) measurements.
	n_procs : int, optional
		Number of processors to maximally use for the workflow; if unspecified a best guess will be estimate based on hardware (but not on current load).
	out_base : str, optional
		Output base directory --- inside which a directory named `workflow_name` (as well as associated directories) will be created.
	realign : {"space","time","spacetime",""}, optional
		Parameter that dictates slictiming correction and realignment of slices. "time" (FSL.SliceTimer) is default, since it works safely. Use others only with caution!
	registration_mask : str, optional
		Mask to use for the registration process.
		This mask will constrain the area for similarity metric evaluation, but the data will not be cropped.
	sessions : list, optional
		A whitelist of sessions to include in the workflow, if the list is empty there is no whitelist and all sessions will be considered.
	structural_match : dict, optional
		Dictionary specifying a whitelist to use for structural data inclusion into the workflow; if dictionary is empty no whitelist is present and all data will be considered.
		The dictionary should have keys which are 'acquisition', or 'modality', and values which are lists of acceptable strings for the respective BIDS field.
	subjects : list, optional
		A whitelist of subjects to include in the workflow, if the list is empty there is no whitelist and all sessions will be considered.
	tr : float, optional
		Repetition time, explicitly.
		WARNING! This is a parameter waiting for deprecation.
	workflow_name : str, optional
		Top level name for the output directory.
	'''

    bids_base, out_base, out_dir, template, registration_mask, data_selection, functional_scan_types, structural_scan_types, subjects_sessions, func_ind, struct_ind = common_select(
        bids_base,
        out_base,
        workflow_name,
        template,
        registration_mask,
        functional_match,
        structural_match,
        subjects,
        sessions,
    )

    get_f_scan = pe.Node(name='get_f_scan',
                         interface=util.Function(
                             function=get_bids_scan,
                             input_names=inspect.getargspec(get_bids_scan)[0],
                             output_names=[
                                 'scan_path', 'scan_type', 'task', 'nii_path',
                                 'nii_name', 'file_name', 'events_name',
                                 'subject_session'
                             ]))
    get_f_scan.inputs.ignore_exception = True
    get_f_scan.inputs.data_selection = data_selection
    get_f_scan.inputs.bids_base = bids_base
    get_f_scan.iterables = ("ind_type", func_ind)

    dummy_scans = pe.Node(
        name='dummy_scans',
        interface=util.Function(
            function=force_dummy_scans,
            input_names=inspect.getargspec(force_dummy_scans)[0],
            output_names=['out_file', 'deleted_scans']))
    dummy_scans.inputs.desired_dummy_scans = DUMMY_SCANS

    events_file = pe.Node(
        name='events_file',
        interface=util.Function(
            function=write_bids_events_file,
            input_names=inspect.getargspec(write_bids_events_file)[0],
            output_names=['out_file']))

    temporal_mean = pe.Node(interface=fsl.MeanImage(), name="temporal_mean")

    f_resize = pe.Node(interface=VoxelResize(), name="f_resize")
    f_resize.inputs.resize_factors = [10, 10, 10]

    f_percentile = pe.Node(interface=fsl.ImageStats(), name="f_percentile")
    f_percentile.inputs.op_string = '-p 98'

    f_threshold = pe.Node(interface=fsl.Threshold(), name="f_threshold")

    f_fast = pe.Node(interface=fsl.FAST(), name="f_fast")
    f_fast.inputs.no_pve = True
    f_fast.inputs.output_biascorrected = True

    f_bet = pe.Node(interface=fsl.BET(), name="f_BET")

    f_swapdim = pe.Node(interface=fsl.SwapDimensions(), name="f_swapdim")
    f_swapdim.inputs.new_dims = ('x', '-z', '-y')

    f_deleteorient = pe.Node(interface=FSLOrient(), name="f_deleteorient")
    f_deleteorient.inputs.main_option = 'deleteorient'

    datasink = pe.Node(nio.DataSink(), name='datasink')
    datasink.inputs.base_directory = out_dir
    datasink.inputs.parameterization = False

    workflow_connections = [
        (get_f_scan, dummy_scans, [('nii_path', 'in_file')]),
        (get_f_scan, dummy_scans, [('scan_path', 'scan_dir')]),
        (dummy_scans, events_file, [('deleted_scans', 'forced_dummy_scans')]),
        (dummy_scans, f_resize, [('out_file', 'in_file')]),
        (get_f_scan, events_file, [('nii_path', 'timecourse_file'),
                                   ('task', 'task'),
                                   ('scan_path', 'scan_dir')]),
        (events_file, datasink, [('out_file', 'func.@events')]),
        (get_f_scan, events_file, [('events_name', 'out_file')]),
        (get_f_scan, datasink, [(('subject_session', ss_to_path), 'container')
                                ]),
        (temporal_mean, f_percentile, [('out_file', 'in_file')]),
        # here we divide by 10 assuming 10 percent noise
        (f_percentile, f_threshold, [(('out_stat', divideby_10), 'thresh')]),
        (temporal_mean, f_threshold, [('out_file', 'in_file')]),
        (f_threshold, f_fast, [('out_file', 'in_files')]),
        (f_fast, f_bet, [('restored_image', 'in_file')]),
        (f_resize, f_deleteorient, [('out_file', 'in_file')]),
        (f_deleteorient, f_swapdim, [('out_file', 'in_file')]),
    ]

    if realign == "space":
        realigner = pe.Node(interface=spm.Realign(), name="realigner")
        realigner.inputs.register_to_mean = True
        workflow_connections.extend([
            (f_swapdim, realigner, [('out_file', 'in_file')]),
        ])

    elif realign == "spacetime":
        realigner = pe.Node(interface=nipy.SpaceTimeRealigner(),
                            name="realigner")
        realigner.inputs.slice_times = "asc_alt_2"
        realigner.inputs.tr = tr
        realigner.inputs.slice_info = 3  #3 for coronal slices (2 for horizontal, 1 for sagittal)
        workflow_connections.extend([
            (f_swapdim, realigner, [('out_file', 'in_file')]),
        ])

    elif realign == "time":
        realigner = pe.Node(interface=fsl.SliceTimer(), name="slicetimer")
        realigner.inputs.time_repetition = tr
        workflow_connections.extend([
            (f_swapdim, realigner, [('out_file', 'in_file')]),
        ])

    #if structural_scan_types.any():
    #	get_s_scan = pe.Node(name='get_s_scan', interface=util.Function(function=get_bids_scan, input_names=inspect.getargspec(get_bids_scan)[0], output_names=['scan_path','scan_type','task', 'nii_path', 'nii_name', 'file_name', 'events_name', 'subject_session']))
    #	get_s_scan.inputs.ignore_exception = True
    #	get_s_scan.inputs.data_selection = data_selection
    #	get_s_scan.inputs.bids_base = bids_base

    #	s_cutoff = pe.Node(interface=fsl.ImageMaths(), name="s_cutoff")
    #	s_cutoff.inputs.op_string = "-thrP 20 -uthrp 98"

    #	s_resize = pe.Node(interface=VoxelResize(), name="s_resize")

    #	s_BET = pe.Node(interface=fsl.BET(), name="s_BET")
    #	s_BET.inputs.mask = True
    #	s_BET.inputs.frac = 0.3
    #	s_BET.inputs.robust = True

    #	ants_introduction = pe.Node(interface=legacy.antsIntroduction(), name='ants_introduction')
    #	ants_introduction.inputs.dimension = 3
    #	ants_introduction.inputs.reference_image = template
    #	#will need updating to `1`
    #	ants_introduction.inputs.bias_field_correction = True
    #	ants_introduction.inputs.transformation_model = 'GR'
    #	ants_introduction.inputs.max_iterations = [8,15,8]

    #	s_mask = pe.Node(interface=fsl.ApplyMask(), name="s_mask")
    #	s_register, s_warp, f_warp = structural_registration(template)

    #	workflow_connections.extend([
    #		(get_s_scan, s_reg_biascorrect, [('nii_path', 'input_image')]),
    #		(s_reg_biascorrect, s_cutoff, [('output_image', 'in_file')]),
    #		(s_cutoff, s_BET, [('out_file', 'in_file')]),
    #		(s_biascorrect, s_mask, [('output_image', 'in_file')]),
    #		(s_BET, s_mask, [('mask_file', 'mask_file')]),
    #		])

    #	#TODO: incl. in func registration
    #	if autorotate:
    #		workflow_connections.extend([
    #			(s_mask, s_rotated, [('out_file', 'out_file')]),
    #			(s_rotated, s_register, [('out_file', 'moving_image')]),
    #			])
    #	else:
    #		workflow_connections.extend([
    #			(s_mask, s_register, [('out_file', 'moving_image')]),
    #			(s_register, s_warp, [('composite_transform', 'transforms')]),
    #			(get_s_scan, s_warp, [('nii_path', 'input_image')]),
    #			(s_warp, datasink, [('output_image', 'anat')]),
    #			])

    #	if autorotate:
    #		s_rotated = autorotate(template)

    #	workflow_connections.extend([
    #		(get_f_scan, get_s_scan, [('subject_session', 'selector')]),
    #		(get_s_scan, s_warp, [('nii_name','output_image')]),
    #		(get_s_scan, s_biascorrect, [('nii_path', 'input_image')]),
    #		])

    f_antsintroduction = pe.Node(interface=antslegacy.antsIntroduction(),
                                 name='ants_introduction')
    f_antsintroduction.inputs.dimension = 3
    f_antsintroduction.inputs.reference_image = template
    #will need updating to `1`
    f_antsintroduction.inputs.bias_field_correction = True
    f_antsintroduction.inputs.transformation_model = 'GR'
    f_antsintroduction.inputs.max_iterations = [8, 15, 8]

    f_warp = pe.Node(interface=ants.WarpTimeSeriesImageMultiTransform(),
                     name='f_warp')
    f_warp.inputs.reference_image = template
    f_warp.inputs.dimension = 4

    f_copysform2qform = pe.Node(interface=FSLOrient(),
                                name='f_copysform2qform')
    f_copysform2qform.inputs.main_option = 'copysform2qform'

    warp_merge = pe.Node(util.Merge(2), name='warp_merge')

    workflow_connections.extend([
        (f_bet, f_antsintroduction, [('out_file', 'input_image')]),
        (f_antsintroduction, warp_merge, [('warp_field', 'in1')]),
        (f_antsintroduction, warp_merge, [('affine_transformation', 'in2')]),
        (warp_merge, f_warp, [('out', 'transformation_series')]),
        (f_warp, f_copysform2qform, [('output_image', 'in_file')]),
    ])
    if realign == "space":
        workflow_connections.extend([
            (realigner, temporal_mean, [('realigned_files', 'in_file')]),
            (realigner, f_warp, [('realigned_files', 'input_image')]),
        ])
    elif realign == "spacetime":
        workflow_connections.extend([
            (realigner, temporal_mean, [('out_file', 'in_file')]),
            (realigner, f_warp, [('out_file', 'input_image')]),
        ])
    elif realign == "time":
        workflow_connections.extend([
            (realigner, temporal_mean, [('slice_time_corrected_file',
                                         'in_file')]),
            (realigner, f_warp, [('slice_time_corrected_file', 'input_image')
                                 ]),
        ])
    else:
        workflow_connections.extend([
            (f_resize, temporal_mean, [('out_file', 'in_file')]),
            (f_swapdim, f_warp, [('out_file', 'input_image')]),
        ])

    invert = pe.Node(interface=fsl.ImageMaths(), name="invert")

    blur = pe.Node(interface=afni.preprocess.BlurToFWHM(), name="blur")
    blur.inputs.fwhmxy = functional_blur_xy

    if functional_blur_xy and negative_contrast_agent:
        workflow_connections.extend([
            (f_copysform2qform, blur, [('out_file', 'in_file')]),
            (blur, invert, [(('out_file', fslmaths_invert_values), 'op_string')
                            ]),
            (blur, invert, [('out_file', 'in_file')]),
            (get_f_scan, invert, [('nii_name', 'output_image')]),
            (invert, datasink, [('out_file', 'func')]),
        ])

    elif functional_blur_xy:
        workflow_connections.extend([
            (get_f_scan, blur, [('nii_name', 'output_image')]),
            (f_copysform2qform, blur, [('out_file', 'in_file')]),
            (blur, datasink, [('out_file', 'func')]),
        ])

    elif negative_contrast_agent:
        workflow_connections.extend([
            (get_f_scan, invert, [('nii_name', 'out_file')]),
            (f_copysform2qform, invert, [(('out_file', fslmaths_invert_values),
                                          'op_string')]),
            (f_copysform2qform, invert, [('out_file', 'in_file')]),
            (invert, datasink, [('out_file', 'func')]),
        ])
    else:

        f_rename = pe.Node(util.Rename(), name='f_rename')

        workflow_connections.extend([
            (get_f_scan, f_rename, [('nii_name', 'format_string')]),
            (f_copysform2qform, f_rename, [('out_file', 'in_file')]),
            (f_rename, datasink, [('out_file', 'func')]),
        ])

    workflow_config = {
        'execution': {
            'crashdump_dir': path.join(bids_base, 'preprocessing/crashdump'),
        }
    }
    if debug:
        workflow_config['logging'] = {
            'workflow_level': 'DEBUG',
            'utils_level': 'DEBUG',
            'interface_level': 'DEBUG',
            'filemanip_level': 'DEBUG',
            'log_to_file': 'true',
        }

    workdir_name = workflow_name + "_work"
    #this gives the name of the workdir, the output name is passed to the datasink
    workflow = pe.Workflow(name=workdir_name)
    workflow.connect(workflow_connections)
    workflow.base_dir = out_base
    workflow.config = workflow_config
    workflow.write_graph(dotfilename=path.join(workflow.base_dir, workdir_name,
                                               "graph.dot"),
                         graph2use="hierarchical",
                         format="png")

    workflow.run(plugin="MultiProc", plugin_args={'n_procs': n_procs})
    if not keep_work:
        workdir = path.join(workflow.base_dir, workdir_name)
        try:
            shutil.rmtree(workdir)
        except OSError as e:
            if str(e) == 'Cannot call rmtree on a symbolic link':
                print(
                    'Not deleting top level workdir (`{}`), as it is a symlink. Deleting only contents instead'
                    .format(workdir))
                for file_object in os.listdir(workdir):
                    file_object_path = os.path.join(workdir, file_object)
                    if os.path.isfile(file_object_path):
                        os.unlink(file_object_path)
                    else:
                        shutil.rmtree(file_object_path)
            else:
                raise OSError(str(e))
Exemplo n.º 10
0
def bruker(measurements_base,
	functional_scan_types=[],
	structural_scan_types=[],
	sessions=[],
	subjects=[],
	measurements=[],
	exclude_subjects=[],
	exclude_measurements=[],
	actual_size=False,
	functional_blur_xy=False,
	functional_registration_method="structural",
	highpass_sigma=225,
	lowpass_sigma=None,
	negative_contrast_agent=False,
	n_procs=N_PROCS,
	realign=True,
	registration_mask=False,
	template="/home/chymera/ni_data/templates/ds_QBI_chr.nii.gz",
	tr=1,
	very_nasty_bruker_delay_hack=False,
	workflow_name="generic",
	keep_work=False,
	autorotate=False,
	strict=False,
	):

	measurements_base = os.path.abspath(os.path.expanduser(measurements_base))

	#select all functional/sturctural scan types unless specified
	if not functional_scan_types or not structural_scan_types:
		scan_classification = pd.read_csv(scan_classification_file_path)
		if not functional_scan_types:
			functional_scan_types = list(scan_classification[(scan_classification["categories"] == "functional")]["scan_type"])
		if not structural_scan_types:
			structural_scan_types = list(scan_classification[(scan_classification["categories"] == "structural")]["scan_type"])

	#hack to allow structural scan type disabling:
	if structural_scan_types == -1:
		structural_scan_types = []

	# define measurement directories to be processed, and populate the list either with the given include_measurements, or with an intelligent selection
	scan_types = deepcopy(functional_scan_types)
	scan_types.extend(structural_scan_types)
	data_selection=get_data_selection(measurements_base, sessions, scan_types=scan_types, subjects=subjects, exclude_subjects=exclude_subjects, measurements=measurements, exclude_measurements=exclude_measurements)
	if not subjects:
		subjects = set(list(data_selection["subject"]))
	if not sessions:
		sessions = set(list(data_selection["session"]))

	if structural_registration:
		structural_scan_types = [structural_scan_types[0]]

	# here we start to define the nipype workflow elements (nodes, connectons, meta)
	subjects_sessions = data_selection[["subject","session"]].drop_duplicates().values.tolist()
	infosource = pe.Node(interface=util.IdentityInterface(fields=['subject_session']), name="infosource")
	infosource.iterables = [('subject_session', subjects_sessions)]

	get_f_scan = pe.Node(name='get_f_scan', interface=util.Function(function=get_scan,input_names=inspect.getargspec(get_scan)[0], output_names=['scan_path','scan_type']))
	if not strict:
		get_f_scan.inputs.ignore_exception = True
	get_f_scan.inputs.data_selection = data_selection
	get_f_scan.inputs.measurements_base = measurements_base
	get_f_scan.iterables = ("scan_type", functional_scan_types)

	f_bru2nii = pe.Node(interface=bru2nii.Bru2(), name="f_bru2nii")
	f_bru2nii.inputs.actual_size=actual_size

	dummy_scans = pe.Node(name='dummy_scans', interface=util.Function(function=force_dummy_scans,input_names=inspect.getargspec(force_dummy_scans)[0], output_names=['out_file']))
	dummy_scans.inputs.desired_dummy_scans = DUMMY_SCANS

	events_file = pe.Node(name='events_file', interface=util.Function(function=write_events_file,input_names=inspect.getargspec(write_events_file)[0], output_names=['out_file']))
	events_file.inputs.dummy_scans_ms = DUMMY_SCANS * tr * 1000
	events_file.inputs.stim_protocol_dictionary = STIM_PROTOCOL_DICTIONARY
	events_file.inputs.very_nasty_bruker_delay_hack = very_nasty_bruker_delay_hack

	if realign:
		realigner = pe.Node(interface=nipy.SpaceTimeRealigner(), name="realigner")
		realigner.inputs.slice_times = "asc_alt_2"
		realigner.inputs.tr = tr
		realigner.inputs.slice_info = 3 #3 for coronal slices (2 for horizontal, 1 for sagittal)

	bandpass = pe.Node(interface=fsl.maths.TemporalFilter(), name="bandpass")
	bandpass.inputs.highpass_sigma = highpass_sigma
	if lowpass_sigma:
		bandpass.inputs.lowpass_sigma = lowpass_sigma
	else:
		bandpass.inputs.lowpass_sigma = tr

	bids_filename = pe.Node(name='bids_filename', interface=util.Function(function=sss_filename,input_names=inspect.getargspec(sss_filename)[0], output_names=['filename']))

	bids_stim_filename = pe.Node(name='bids_stim_filename', interface=util.Function(function=sss_filename,input_names=inspect.getargspec(sss_filename)[0], output_names=['filename']))
	bids_stim_filename.inputs.suffix = "events"
	bids_stim_filename.inputs.extension = ".tsv"

	datasink = pe.Node(nio.DataSink(), name='datasink')
	datasink.inputs.base_directory = path.join(measurements_base,"preprocessing",workflow_name)
	datasink.inputs.parameterization = False

	workflow_connections = [
		(infosource, get_f_scan, [('subject_session', 'selector')]),
		(infosource, bids_stim_filename, [('subject_session', 'subject_session')]),
		(get_f_scan, bids_stim_filename, [('scan_type', 'scan')]),
		(get_f_scan, f_bru2nii, [('scan_path', 'input_dir')]),
		(f_bru2nii, dummy_scans, [('nii_file', 'in_file')]),
		(get_f_scan, dummy_scans, [('scan_path', 'scan_dir')]),
		(get_f_scan, events_file, [
			('scan_type', 'scan_type'),
			('scan_path', 'scan_dir')
			]),
		(events_file, datasink, [('out_file', 'func.@events')]),
		(bids_stim_filename, events_file, [('filename', 'out_file')]),
		(infosource, datasink, [(('subject_session',ss_to_path), 'container')]),
		(infosource, bids_filename, [('subject_session', 'subject_session')]),
		(get_f_scan, bids_filename, [('scan_type', 'scan')]),
		(bids_filename, bandpass, [('filename', 'out_file')]),
		(bandpass, datasink, [('out_file', 'func')]),
		]

	if realign:
		workflow_connections.extend([
			(dummy_scans, realigner, [('out_file', 'in_file')]),
			])

	#ADDING SELECTABLE NODES AND EXTENDING WORKFLOW AS APPROPRIATE:
	if structural_scan_types:
		get_s_scan = pe.Node(name='get_s_scan', interface=util.Function(function=get_scan,input_names=inspect.getargspec(get_scan)[0], output_names=['scan_path','scan_type']))
		if not strict:
			get_s_scan.inputs.ignore_exception = True
		get_s_scan.inputs.data_selection = data_selection
		get_s_scan.inputs.measurements_base = measurements_base
		get_s_scan.iterables = ("scan_type", structural_scan_types)

		s_bru2nii = pe.Node(interface=bru2nii.Bru2(), name="s_bru2nii")
		s_bru2nii.inputs.force_conversion=True
		s_bru2nii.inputs.actual_size=actual_size

		if "DSURQEc" in template:
			s_biascorrect = pe.Node(interface=ants.N4BiasFieldCorrection(), name="s_biascorrect")
			s_biascorrect.inputs.dimension = 3
			s_biascorrect.inputs.bspline_fitting_distance = 10
			s_biascorrect.inputs.bspline_order = 4
			s_biascorrect.inputs.shrink_factor = 2
			s_biascorrect.inputs.n_iterations = [150,100,50,30]
			s_biascorrect.inputs.convergence_threshold = 1e-16
			s_register, s_warp, _, _ = DSURQEc_structural_registration(template, registration_mask)
			#TODO: incl. in func registration
			if autorotate:
				workflow_connections.extend([
					(s_biascorrect, s_rotated, [('output_image', 'out_file')]),
					(s_rotated, s_register, [('out_file', 'moving_image')]),
					])
			else:
				workflow_connections.extend([
					(s_biascorrect, s_register, [('output_image', 'moving_image')]),
					(s_register, s_warp, [('composite_transform', 'transforms')]),
					(s_bru2nii, s_warp, [('nii_file', 'input_image')]),
					(s_warp, datasink, [('output_image', 'anat')]),
					])
		else:
			s_biascorrect = pe.Node(interface=ants.N4BiasFieldCorrection(), name="s_biascorrect")
			s_biascorrect.inputs.dimension = 3
			s_biascorrect.inputs.bspline_fitting_distance = 100
			s_biascorrect.inputs.shrink_factor = 2
			s_biascorrect.inputs.n_iterations = [200,200,200,200]
			s_biascorrect.inputs.convergence_threshold = 1e-11

			s_reg_biascorrect = pe.Node(interface=ants.N4BiasFieldCorrection(), name="s_reg_biascorrect")
			s_reg_biascorrect.inputs.dimension = 3
			s_reg_biascorrect.inputs.bspline_fitting_distance = 95
			s_reg_biascorrect.inputs.shrink_factor = 2
			s_reg_biascorrect.inputs.n_iterations = [500,500,500,500]
			s_reg_biascorrect.inputs.convergence_threshold = 1e-14

			s_cutoff = pe.Node(interface=fsl.ImageMaths(), name="s_cutoff")
			s_cutoff.inputs.op_string = "-thrP 20 -uthrp 98"

			s_BET = pe.Node(interface=fsl.BET(), name="s_BET")
			s_BET.inputs.mask = True
			s_BET.inputs.frac = 0.3
			s_BET.inputs.robust = True

			s_mask = pe.Node(interface=fsl.ApplyMask(), name="s_mask")
			s_register, s_warp, _, _ = structural_registration(template)

			workflow_connections.extend([
				(s_bru2nii, s_reg_biascorrect, [('nii_file', 'input_image')]),
				(s_reg_biascorrect, s_cutoff, [('output_image', 'in_file')]),
				(s_cutoff, s_BET, [('out_file', 'in_file')]),
				(s_biascorrect, s_mask, [('output_image', 'in_file')]),
				(s_BET, s_mask, [('mask_file', 'mask_file')]),
				])

			#TODO: incl. in func registration
			if autorotate:
				workflow_connections.extend([
					(s_mask, s_rotated, [('out_file', 'out_file')]),
					(s_rotated, s_register, [('out_file', 'moving_image')]),
					])
			else:
				workflow_connections.extend([
					(s_mask, s_register, [('out_file', 'moving_image')]),
					(s_register, s_warp, [('composite_transform', 'transforms')]),
					(s_bru2nii, s_warp, [('nii_file', 'input_image')]),
					(s_warp, datasink, [('output_image', 'anat')]),
					])


		if(autorotate):
			s_rotated = autorotate(template)

		s_bids_filename = pe.Node(name='s_bids_filename', interface=util.Function(function=sss_filename,input_names=inspect.getargspec(sss_filename)[0], output_names=['filename']))
		s_bids_filename.inputs.scan_prefix = False

		workflow_connections.extend([
			(infosource, get_s_scan, [('subject_session', 'selector')]),
			(infosource, s_bids_filename, [('subject_session', 'subject_session')]),
			(get_s_scan, s_bru2nii, [('scan_path','input_dir')]),
			(get_s_scan, s_bids_filename, [('scan_type', 'scan')]),
			(s_bids_filename, s_warp, [('filename','output_image')]),
			(s_bru2nii, s_biascorrect, [('nii_file', 'input_image')]),
			])



	if functional_registration_method == "structural":
		if not structural_scan_types:
			raise ValueError('The option `registration="structural"` requires there to be a structural scan type.')
		workflow_connections.extend([
			(s_register, f_warp, [('composite_transform', 'transforms')]),
			])
		if realign:
			workflow_connections.extend([
				(realigner, f_warp, [('out_file', 'input_image')]),
				])
		else:
			workflow_connections.extend([
				(dummy_scans, f_warp, [('out_file', 'input_image')]),
				])


	if functional_registration_method == "composite":
		if not structural_scan_types:
			raise ValueError('The option `registration="composite"` requires there to be a structural scan type.')
		_, _, f_register, f_warp = DSURQEc_structural_registration(template, registration_mask)

		temporal_mean = pe.Node(interface=fsl.MeanImage(), name="temporal_mean")

		f_biascorrect = pe.Node(interface=ants.N4BiasFieldCorrection(), name="f_biascorrect")
		f_biascorrect.inputs.dimension = 3
		f_biascorrect.inputs.bspline_fitting_distance = 100
		f_biascorrect.inputs.shrink_factor = 2
		f_biascorrect.inputs.n_iterations = [200,200,200,200]
		f_biascorrect.inputs.convergence_threshold = 1e-11

		merge = pe.Node(util.Merge(2), name='merge')

		workflow_connections.extend([
			(temporal_mean, f_biascorrect, [('out_file', 'input_image')]),
			(f_biascorrect, f_register, [('output_image', 'moving_image')]),
			(s_biascorrect, f_register, [('output_image', 'fixed_image')]),
			(f_register, merge, [('composite_transform', 'in1')]),
			(s_register, merge, [('composite_transform', 'in2')]),
			(merge, f_warp, [('out', 'transforms')]),
			])
		if realign:
			workflow_connections.extend([
				(realigner, temporal_mean, [('out_file', 'in_file')]),
				(realigner, f_warp, [('out_file', 'input_image')]),
				])
		else:
			workflow_connections.extend([
				(dummy_scans, temporal_mean, [('out_file', 'input_image')]),
				(dummy_scans, f_warp, [('out_file', 'input_image')]),
				])

	elif functional_registration_method == "functional":
		f_register, f_warp = functional_registration(template)

		temporal_mean = pe.Node(interface=fsl.MeanImage(), name="temporal_mean")

		f_biascorrect = pe.Node(interface=ants.N4BiasFieldCorrection(), name="f_biascorrect")
		f_biascorrect.inputs.dimension = 3
		f_biascorrect.inputs.bspline_fitting_distance = 100
		f_biascorrect.inputs.shrink_factor = 2
		f_biascorrect.inputs.n_iterations = [200,200,200,200]
		f_biascorrect.inputs.convergence_threshold = 1e-11

		f_cutoff = pe.Node(interface=fsl.ImageMaths(), name="f_cutoff")
		f_cutoff.inputs.op_string = "-thrP 30"

		f_BET = pe.Node(interface=fsl.BET(), name="f_BET")
		f_BET.inputs.mask = True
		f_BET.inputs.frac = 0.5

		workflow_connections.extend([
			(temporal_mean, f_biascorrect, [('out_file', 'input_image')]),
			(f_biascorrect, f_cutoff, [('output_image', 'in_file')]),
			(f_cutoff, f_BET, [('out_file', 'in_file')]),
			(f_BET, register, [('out_file', 'moving_image')]),
			(register, f_warp, [('composite_transform', 'transforms')]),
			])
		if realign:
			workflow_connections.extend([
				(realigner, temporal_mean, [('out_file', 'in_file')]),
				(realigner, f_warp, [('out_file', 'input_image')]),
				])
		else:
			workflow_connections.extend([
				(dummy_scans, temporal_mean, [('out_file', 'input_image')]),
				(dummy_scans, f_warp, [('out_file', 'input_image')]),
				])


	invert = pe.Node(interface=fsl.ImageMaths(), name="invert")
	if functional_blur_xy and negative_contrast_agent:
		blur = pe.Node(interface=afni.preprocess.BlurToFWHM(), name="blur")
		blur.inputs.fwhmxy = functional_blur_xy
		workflow_connections.extend([
			(f_warp, blur, [('output_image', 'in_file')]),
			(blur, invert, [(('out_file', fslmaths_invert_values), 'op_string')]),
			(blur, invert, [('out_file', 'in_file')]),
			(invert, bandpass, [('out_file', 'in_file')]),
			])
	elif functional_blur_xy:
		blur = pe.Node(interface=afni.preprocess.BlurToFWHM(), name="blur")
		blur.inputs.fwhmxy = functional_blur_xy
		workflow_connections.extend([
			(f_warp, blur, [('output_image', 'in_file')]),
			(blur, bandpass, [('out_file', 'in_file')]),
			])
	elif negative_contrast_agent:
		blur = pe.Node(interface=afni.preprocess.BlurToFWHM(), name="blur")
		blur.inputs.fwhmxy = functional_blur_xy
		workflow_connections.extend([
			(f_warp, invert, [(('output_image', fslmaths_invert_values), 'op_string')]),
			(f_warp, invert, [('output_image', 'in_file')]),
			(invert, bandpass, [('out_file', 'in_file')]),
			])
	else:
		workflow_connections.extend([
			(f_warp, bandpass, [('output_image', 'in_file')]),
			])

	workdir_name = workflow_name+"_work"
	workflow = pe.Workflow(name=workdir_name)
	workflow.connect(workflow_connections)
	workflow.base_dir = path.join(measurements_base,"preprocessing")
	workflow.config = {"execution": {"crashdump_dir": path.join(measurements_base,"preprocessing/crashdump")}}
	workflow.write_graph(dotfilename=path.join(workflow.base_dir,workdir_name,"graph.dot"), graph2use="hierarchical", format="png")

	workflow.run(plugin="MultiProc",  plugin_args={'n_procs' : n_procs})
	if not keep_work:
		shutil.rmtree(path.join(workflow.base_dir,workdir_name))
Exemplo n.º 11
0
                  name='getsubs')
getsubs.inputs.ignore_exception = False
motcor_sltimes_wf.connect(subj_iterable, 'subject_id', getsubs, 'subject_id')
motcor_sltimes_wf.connect(datasource, 'mri_files', getsubs, 'mri_files')

# Extract the first volume of the first run as the reference
extractref = pe.Node(fsl.ExtractROI(t_size=1, t_min=0),
                     iterfield=['in_file'],
                     name='extractref')
motcor_sltimes_wf.connect(datasource, ('mri_files', pickfirst), extractref,
                          'in_file')
motcor_sltimes_wf.connect(extractref, 'roi_file', outputspec, 'reference')

# NOTE: Committing to NIPY
# Simultaneous motion and slice timing correction with Nipy algorithm
motion_sltime_correct = pe.MapNode(nipy.SpaceTimeRealigner(),
                                   name='motion_sltime_correct',
                                   iterfield=['in_file', 'slice_times'])
motcor_sltimes_wf.connect(datasource, ('mri_files', calc_slicetimes, 2.),
                          motion_sltime_correct, 'slice_times')
motion_sltime_correct.inputs.tr = 2.
motion_sltime_correct.inputs.slice_info = 2
motion_sltime_correct.plugin_args = {
    'bsub_args': '-n {0}'.format(os.environ['MKL_NUM_THREADS'])
}
motion_sltime_correct.plugin_args = {'bsub_args': '-R "span[hosts=1]"'}
motcor_sltimes_wf.connect(datasource, 'mri_files', motion_sltime_correct,
                          'in_file')
motcor_sltimes_wf.connect(motion_sltime_correct, 'par_file', outputspec,
                          'motion_parameters')
motcor_sltimes_wf.connect(motion_sltime_correct, 'out_file', outputspec,
Exemplo n.º 12
0
def create_workflow(func_runs,
                    subject_id,
                    subjects_dir,
                    fwhm,
                    slice_times,
                    highpass_frequency,
                    lowpass_frequency,
                    TR,
                    sink_directory,
                    use_fsl_bp,
                    num_components,
                    whichvol,
                    name='wmaze'):
    
    wf = pe.Workflow(name=name)

    datasource = pe.Node(nio.DataGrabber(infields=['subject_id', 'run'],
                                         outfields=['func']),
                         name='datasource')
    datasource.inputs.subject_id = subject_id
    datasource.inputs.run = func_runs
    datasource.inputs.template = '/home/data/madlab/data/mri/wmaze/%s/bold/bold_%03d/bold.nii.gz'
    datasource.inputs.sort_filelist = True
    
    # Rename files in case they are named identically
    name_unique = pe.MapNode(util.Rename(format_string='wmaze_%(run)02d'),
                             iterfield = ['in_file', 'run'],
                             name='rename')
    name_unique.inputs.keep_ext = True
    name_unique.inputs.run = func_runs
    wf.connect(datasource, 'func', name_unique, 'in_file')

    # Define the outputs for the preprocessing workflow
    output_fields = ['reference',
                     'motion_parameters',
                     'motion_parameters_plusDerivs',
                     'motionandoutlier_noise_file',
                     'noise_components',
                     'realigned_files',
                     'motion_plots',
                     'mask_file',
                     'smoothed_files',
                     'bandpassed_files',
                     'reg_file',
                     'reg_cost',
                     'reg_fsl_file',
                     'artnorm_files',
                     'artoutlier_files',
                     'artdisplacement_files',
                     'tsnr_file']
        
    outputnode = pe.Node(util.IdentityInterface(fields=output_fields),
                         name='outputspec')

    # Convert functional images to float representation
    img2float = pe.MapNode(fsl.ImageMaths(out_data_type='float',
                                        op_string = '',
                                        suffix='_dtype'),
                           iterfield=['in_file'],
                           name='img2float')
    wf.connect(name_unique, 'out_file', img2float, 'in_file')

    # Run AFNI's despike. This is always run, however, whether this is fed to
    # realign depends on the input configuration
    despiker = pe.MapNode(afni.Despike(outputtype='NIFTI_GZ'),
                          iterfield=['in_file'],
                          name='despike')
    num_threads = 4
    despiker.inputs.environ = {'OMP_NUM_THREADS': '%d' % num_threads}
    despiker.plugin_args = {'bsub_args': '-n %d' % num_threads}
    despiker.plugin_args = {'bsub_args': '-R "span[hosts=1]"'}
    wf.connect(img2float, 'out_file', despiker, 'in_file')

    # Extract the first volume of the first run as the reference 
    extractref = pe.Node(fsl.ExtractROI(t_size=1),
                         iterfield=['in_file'],
                         name = "extractref")
    wf.connect(despiker, ('out_file', pickfirst), extractref, 'in_file')
    wf.connect(despiker, ('out_file', pickvol, 0, whichvol), extractref, 't_min')
    wf.connect(extractref, 'roi_file', outputnode, 'reference')

    if slice_times is not None:
        # Simultaneous motion and slice timing correction with Nipy algorithm
        motion_correct = pe.Node(nipy.SpaceTimeRealigner(), name='motion_correct')
        motion_correct.inputs.tr = TR
        motion_correct.inputs.slice_times = slice_times
        motion_correct.inputs.slice_info = 2
        motion_correct.plugin_args = {'bsub_args': '-n %s' %os.environ['MKL_NUM_THREADS']}
        motion_correct.plugin_args = {'bsub_args': '-R "span[hosts=1]"'}
        wf.connect(despiker, 'out_file', motion_correct, 'in_file')
        wf.connect(motion_correct, 'par_file', outputnode, 'motion_parameters')
        wf.connect(motion_correct, 'out_file', outputnode, 'realigned_files')
    else:
        # Motion correct functional runs to the reference (1st volume of 1st run)
        motion_correct =  pe.MapNode(fsl.MCFLIRT(save_mats = True,
                                                 save_plots = True,
                                                 interpolation = 'sinc'),
                                     name = 'motion_correct',
                                     iterfield = ['in_file'])
        wf.connect(despiker, 'out_file', motion_correct, 'in_file')
        wf.connect(extractref, 'roi_file', motion_correct, 'ref_file')
        wf.connect(motion_correct, 'par_file', outputnode, 'motion_parameters')
        wf.connect(motion_correct, 'out_file', outputnode, 'realigned_files')

    # Compute TSNR on realigned data regressing polynomials upto order 2
    tsnr = pe.MapNode(TSNR(regress_poly=2), iterfield=['in_file'], name='tsnr')
    wf.connect(motion_correct, 'out_file', tsnr, 'in_file')
    wf.connect(tsnr, 'tsnr_file', outputnode, 'tsnr_file')

    # Plot the estimated motion parameters
    plot_motion = pe.MapNode(fsl.PlotMotionParams(in_source='fsl'),
                             name='plot_motion',
                             iterfield=['in_file'])
    plot_motion.iterables = ('plot_type', ['rotations', 'translations'])
    wf.connect(motion_correct, 'par_file', plot_motion, 'in_file')
    wf.connect(plot_motion, 'out_file', outputnode, 'motion_plots')

    # Register a source file to fs space and create a brain mask in source space
    fssource = pe.Node(nio.FreeSurferSource(),
                       name ='fssource')
    fssource.inputs.subject_id = subject_id
    fssource.inputs.subjects_dir = subjects_dir

    # Extract aparc+aseg brain mask and binarize
    fs_threshold = pe.Node(fs.Binarize(min=0.5, out_type='nii'),
                           name ='fs_threshold')
    wf.connect(fssource, ('aparc_aseg', get_aparc_aseg), fs_threshold, 'in_file')

    # Calculate the transformation matrix from EPI space to FreeSurfer space
    # using the BBRegister command
    fs_register = pe.MapNode(fs.BBRegister(init='fsl'),
                             iterfield=['source_file'],
                             name ='fs_register')
    fs_register.inputs.contrast_type = 't2'
    fs_register.inputs.out_fsl_file = True
    fs_register.inputs.subject_id = subject_id
    fs_register.inputs.subjects_dir = subjects_dir
    wf.connect(extractref, 'roi_file', fs_register, 'source_file')
    wf.connect(fs_register, 'out_reg_file', outputnode, 'reg_file')
    wf.connect(fs_register, 'min_cost_file', outputnode, 'reg_cost')
    wf.connect(fs_register, 'out_fsl_file', outputnode, 'reg_fsl_file')

    # Extract wm+csf, brain masks by eroding freesurfer lables
    wmcsf = pe.MapNode(fs.Binarize(), 
                       iterfield=['match', 'binary_file', 'erode'], name='wmcsfmask')
    #wmcsf.inputs.wm_ven_csf = True
    wmcsf.inputs.match = [[2, 41], [4, 5, 14, 15, 24, 31, 43, 44, 63]]
    wmcsf.inputs.binary_file = ['wm.nii.gz', 'csf.nii.gz']
    wmcsf.inputs.erode = [2, 2] #int(np.ceil(slice_thickness))
    wf.connect(fssource, ('aparc_aseg', get_aparc_aseg), wmcsf, 'in_file')

    # Now transform the wm and csf masks to 1st volume of 1st run
    wmcsftransform = pe.MapNode(fs.ApplyVolTransform(inverse=True,
                                                     interp='nearest'),
                                iterfield=['target_file'],
                                name='wmcsftransform')
    wmcsftransform.inputs.subjects_dir = subjects_dir
    wf.connect(extractref, 'roi_file', wmcsftransform, 'source_file')
    wf.connect(fs_register, ('out_reg_file', pickfirst), wmcsftransform, 'reg_file')
    wf.connect(wmcsf, 'binary_file', wmcsftransform, 'target_file')

    # Transform the binarized aparc+aseg file to the 1st volume of 1st run space
    fs_voltransform = pe.MapNode(fs.ApplyVolTransform(inverse=True),
                                 iterfield = ['source_file', 'reg_file'],
                                 name='fs_transform')
    fs_voltransform.inputs.subjects_dir = subjects_dir
    wf.connect(extractref, 'roi_file', fs_voltransform, 'source_file')
    wf.connect(fs_register, 'out_reg_file', fs_voltransform, 'reg_file')
    wf.connect(fs_threshold, 'binary_file', fs_voltransform, 'target_file')

    # Dilate the binarized mask by 1 voxel that is now in the EPI space
    fs_threshold2 = pe.MapNode(fs.Binarize(min=0.5, out_type='nii'),
                               iterfield=['in_file'],
                               name='fs_threshold2')
    fs_threshold2.inputs.dilate = 1
    wf.connect(fs_voltransform, 'transformed_file', fs_threshold2, 'in_file')
    wf.connect(fs_threshold2, 'binary_file', outputnode, 'mask_file')
    
    # Use RapidART to detect motion/intensity outliers
    art = pe.MapNode(ra.ArtifactDetect(use_differences = [True, False],
                                       use_norm = True,
                                       zintensity_threshold = 3,
                                       norm_threshold = 1,
                                       bound_by_brainmask=True,
                                       mask_type = "file"),
                     iterfield=["realignment_parameters","realigned_files"],
                     name="art")
    if slice_times is not None:
        art.inputs.parameter_source = "NiPy"
    else:
        art.inputs.parameter_source = "FSL"
    wf.connect(motion_correct, 'par_file', art, 'realignment_parameters')
    wf.connect(motion_correct, 'out_file', art, 'realigned_files')
    wf.connect(fs_threshold2, ('binary_file', pickfirst), art, 'mask_file')
    wf.connect(art, 'norm_files', outputnode, 'artnorm_files')
    wf.connect(art, 'outlier_files', outputnode, 'artoutlier_files')
    wf.connect(art, 'displacement_files', outputnode, 'artdisplacement_files')

    # Compute motion regressors (save file with 1st and 2nd derivatives)
    motreg = pe.Node(util.Function(input_names=['motion_params', 'order',
                                                'derivatives'],
                                   output_names=['out_files'],
                                   function=motion_regressors,
                                   imports=imports),
                     name='getmotionregress')
    wf.connect(motion_correct, 'par_file', motreg, 'motion_params')
    wf.connect(motreg, 'out_files', outputnode, 'motion_parameters_plusDerivs')

    # Create a filter text file to remove motion (+ derivatives), art confounds,
    # and 1st, 2nd, and 3rd order legendre polynomials.
    createfilter1 = pe.Node(util.Function(input_names=['motion_params', 'comp_norm',
                                                       'outliers', 'detrend_poly'],
                                          output_names=['out_files'],
                                          function=build_filter1,
                                          imports=imports),
                            name='makemotionbasedfilter')
    createfilter1.inputs.detrend_poly = 3
    wf.connect(motreg, 'out_files', createfilter1, 'motion_params')
    wf.connect(art, 'norm_files', createfilter1, 'comp_norm')
    wf.connect(art, 'outlier_files', createfilter1, 'outliers')
    wf.connect(createfilter1, 'out_files', outputnode, 'motionandoutlier_noise_file')

    # Create a filter to remove noise components based on white matter and CSF
    createfilter2 = pe.MapNode(util.Function(input_names=['realigned_file', 'mask_file',
                                                          'num_components',
                                                          'extra_regressors'],
                                             output_names=['out_files'],
                                             function=extract_noise_components,
                                             imports=imports),
                               iterfield=['realigned_file', 'extra_regressors'],
                               name='makecompcorrfilter')
    createfilter2.inputs.num_components = num_components
    wf.connect(createfilter1, 'out_files', createfilter2, 'extra_regressors')
    wf.connect(motion_correct, 'out_file', createfilter2, 'realigned_file')
    wf.connect(wmcsftransform, 'transformed_file', createfilter2, 'mask_file')
    wf.connect(createfilter2, 'out_files', outputnode, 'noise_components')

    # Mask the functional runs with the extracted mask
    maskfunc = pe.MapNode(fsl.ImageMaths(suffix='_bet',
                                         op_string='-mas'),
                          iterfield=['in_file'],
                          name = 'maskfunc')
    wf.connect(motion_correct, 'out_file', maskfunc, 'in_file')
    wf.connect(fs_threshold2, ('binary_file', pickfirst), maskfunc, 'in_file2')
    
    # Smooth each run using SUSAn with the brightness threshold set to 75%
    # of the median value for each run and a mask constituting the mean functional
    smooth_median = pe.MapNode(fsl.ImageStats(op_string='-k %s -p 50'),
                               iterfield = ['in_file'],
                               name='smooth_median')
    wf.connect(maskfunc, 'out_file', smooth_median, 'in_file')
    wf.connect(fs_threshold2, ('binary_file', pickfirst), smooth_median, 'mask_file')
    
    smooth_meanfunc = pe.MapNode(fsl.ImageMaths(op_string='-Tmean',
                                                suffix='_mean'),
                                 iterfield=['in_file'],
                                 name='smooth_meanfunc')
    wf.connect(maskfunc, 'out_file', smooth_meanfunc, 'in_file')

    smooth_merge = pe.Node(util.Merge(2, axis='hstack'),
                           name='smooth_merge')
    wf.connect(smooth_meanfunc, 'out_file', smooth_merge, 'in1')
    wf.connect(smooth_median, 'out_stat', smooth_merge, 'in2')

    smooth = pe.MapNode(fsl.SUSAN(),
                        iterfield=['in_file', 'brightness_threshold', 'usans'],
                        name='smooth')
    smooth.inputs.fwhm=fwhm
    wf.connect(maskfunc, 'out_file', smooth, 'in_file')
    wf.connect(smooth_median, ('out_stat', getbtthresh), smooth, 'brightness_threshold')
    wf.connect(smooth_merge, ('out', getusans), smooth, 'usans')
    
    # Mask the smoothed data with the dilated mask
    maskfunc2 = pe.MapNode(fsl.ImageMaths(suffix='_mask',
                                          op_string='-mas'),
                           iterfield=['in_file'],
                           name='maskfunc2')
    wf.connect(smooth, 'smoothed_file', maskfunc2, 'in_file')
    wf.connect(fs_threshold2, ('binary_file', pickfirst), maskfunc2, 'in_file2')
    wf.connect(maskfunc2, 'out_file', outputnode, 'smoothed_files')

    # Band-pass filter the timeseries
    if use_fsl_bp == 'True':
        determine_bp_sigmas = pe.Node(util.Function(input_names=['tr',
                                                                 'highpass_freq',
                                                                 'lowpass_freq'],
                                                    output_names = ['out_sigmas'],
                                                    function=calc_fslbp_sigmas),
                                      name='determine_bp_sigmas')
        determine_bp_sigmas.inputs.tr = float(TR)
        determine_bp_sigmas.inputs.highpass_freq = float(highpass_frequency)
        determine_bp_sigmas.inputs.lowpass_freq = float(lowpass_frequency)

        bandpass = pe.MapNode(fsl.ImageMaths(suffix='_tempfilt'),
                              iterfield=["in_file"],
                              name="bandpass")
        wf.connect(determine_bp_sigmas, ('out_sigmas', highpass_operand), bandpass, 'op_string')
        wf.connect(maskfunc2, 'out_file', bandpass, 'in_file')
        wf.connect(bandpass, 'out_file', outputnode, 'bandpassed_files')
    else:
        bandpass = pe.Node(util.Function(input_names=['files',
                                                      'lowpass_freq',
                                                      'highpass_freq',
                                                      'fs'],
                                         output_names=['out_files'],
                                         function=bandpass_filter,
                                         imports=imports),
                           name='bandpass')
        bandpass.inputs.fs = 1./TR
        if highpass_frequency < 0:
            bandpass.inputs.highpass_freq = -1
        else:
            bandpass.inputs.highpass_freq = highpass_frequency
        if lowpass_frequency < 0:
            bandpass.inputs.lowpass_freq = -1
        else:
            bandpass.inputs.lowpass_freq = lowpass_frequency
        wf.connect(maskfunc2, 'out_file', bandpass, 'files')
        wf.connect(bandpass, 'out_files', outputnode, 'bandpassed_files')

    # Save the relevant data into an output directory
    datasink = pe.Node(nio.DataSink(), name="datasink")
    datasink.inputs.base_directory = sink_directory
    datasink.inputs.container = subject_id
    wf.connect(outputnode, 'reference', datasink, 'ref')
    wf.connect(outputnode, 'motion_parameters', datasink, 'motion')
    wf.connect(outputnode, 'realigned_files', datasink, 'func.realigned')
    wf.connect(outputnode, 'motion_plots', datasink, 'motion.@plots')
    wf.connect(outputnode, 'mask_file', datasink, 'ref.@mask')
    wf.connect(outputnode, 'smoothed_files', datasink, 'func.smoothed_fullspectrum')
    wf.connect(outputnode, 'bandpassed_files', datasink, 'func.smoothed_bandpassed')
    wf.connect(outputnode, 'reg_file', datasink, 'bbreg.@reg')
    wf.connect(outputnode, 'reg_cost', datasink, 'bbreg.@cost')
    wf.connect(outputnode, 'reg_fsl_file', datasink, 'bbreg.@regfsl')
    wf.connect(outputnode, 'artnorm_files', datasink, 'art.@norm_files')
    wf.connect(outputnode, 'artoutlier_files', datasink, 'art.@outlier_files')
    wf.connect(outputnode, 'artdisplacement_files', datasink, 'art.@displacement_files')
    wf.connect(outputnode, 'motion_parameters_plusDerivs', datasink, 'noise.@motionplusDerivs')
    wf.connect(outputnode, 'motionandoutlier_noise_file', datasink, 'noise.@motionplusoutliers')
    wf.connect(outputnode, 'noise_components', datasink, 'compcor')
    wf.connect(outputnode, 'tsnr_file', datasink, 'tsnr')    

    return wf
Exemplo n.º 13
0
                                output_names=["out_file"],
                                function=strip_rois_func),
                  name='remove_vol')
remove_vol.inputs.t_min = vol_to_remove
preproc.connect([(selectfiles, remove_vol, [('rest', 'in_file')])])

# Thermal noise removal
# func_denoise = Node(util.Function(input_names=['in_file'],
#                                     output_names=['denoised_data', 'sigmas',
#                                                   'preserved_components'],
#                                      function=pca_denoising),
#                                      name='func_denoise')
# preproc.connect([(remove_vol, func_denoise, [('out_file', 'in_file')])])

# motion correction
moco = Node(nipy.SpaceTimeRealigner(), name="moco")
#preproc.connect([(func_denoise, moco, [('denoised_data', 'in_file')])])
preproc.connect([(remove_vol, moco, [('out_file', 'in_file')])])

# compute median
median = Node(util.Function(input_names=['in_files'],
                            output_names=['median_file'],
                            function=median),
              name='median')

preproc.connect([(moco, median, [('out_file', 'in_files')])])

# bias field correction
biasfield = Node(ants.segmentation.N4BiasFieldCorrection(
    dimension=3,
    n_iterations=[150, 100, 50, 30],
Exemplo n.º 14
0
    from nipype.utils.filemanip import split_filename

    nii = nb.load(in_file)
    new_nii = nb.Nifti1Image(nii.get_data()[:, :, :, t_min:], nii.get_affine(),
                             nii.get_header())
    new_nii.set_data_dtype(np.float32)
    _, base, _ = split_filename(in_file)
    nb.save(new_nii, base + "_roi.nii.gz")
    return os.path.abspath(base + "_roi.nii.gz")


n_vol_remove = 5
img_rois = strip_rois_func(img_rest, n_vol_remove)

# Step#2 simultaneous slice-time & motion correction
realigner = nipy.SpaceTimeRealigner()
realigner.inputs.in_file = img_rois
realigner.inputs.tr = 2.3

# get slice time sequence depending on subject_id
# reads the sequence from text file for stroke data
# assigns it to "asc_alt_2_1" for healthy controls
if subject_id[0:2] == 'sd':
    # find slice sequence text file
    filename = os.path.join(data_dir, subject_id, 'nifti/resting',
                            'slice_timing.txt')
    print "getting slice time sequence from", filename
    with open(filename) as f:
        st = map(float, f)
    print st
    realigner.inputs.slice_times = st
Exemplo n.º 15
0
def legacy(
    bids_base,
    template,
    debug=False,
    functional_blur_xy=False,
    functional_match={},
    keep_work=False,
    n_jobs=False,
    n_jobs_percentage=0.8,
    out_base=None,
    realign="time",
    registration_mask=False,
    sessions=[],
    structural_match={},
    subjects=[],
    tr=1,
    workflow_name='legacy',
    enforce_dummy_scans=DUMMY_SCANS,
    exclude={},
):
    '''
	Legacy realignment and registration workflow representative of the tweaks and workarounds commonly used in the pre-SAMRI period.

	Parameters
	----------
	bids_base : str
		Path to the BIDS data set root.
	template : str
		Path to the template to register the data to.
	debug : bool, optional
		Whether to enable nipype debug mode.
		This increases logging.
	exclude : dict
		A dictionary with any combination of "sessions", "subjects", "tasks" as keys and corresponding identifiers as values.
		If this is specified matching entries will be excluded in the analysis.
	functional_blur_xy : float, optional
		Factor by which to smooth data in the xy-plane; if parameter evaluates to false, no smoothing will be applied.
		Ideally this value should correspond to the resolution or smoothness in the z-direction (assuing z represents the lower-resolution slice-encoding direction).
	functional_match : dict, optional
		Dictionary specifying a whitelist to use for functional data inclusion into the workflow; if dictionary is empty no whitelist is present and all data will be considered.
		The dictionary should have keys which are 'acquisition', 'task', or 'modality', and values which are lists of acceptable strings for the respective BIDS field.
	keep_work : bool, str
		Whether to keep the work directory after workflow conclusion (this directory contains all the intermediary processing commands, inputs, and outputs --- it is invaluable for debugging but many times larger in size than the actual output).
	n_jobs : int, optional
		Number of processors to maximally use for the workflow; if unspecified a best guess will be estimate based on `n_jobs_percentage` and hardware (but not on current load).
	n_jobs_percentage : float, optional
		Percentage of available processors (as in available hardware, not available free load) to maximally use for the workflow (this is overriden by `n_jobs`).
	out_base : str, optional
		Output base directory - inside which a directory named `workflow_name` (as well as associated directories) will be created.
	realign : {"space","time","spacetime",""}, optional
		Parameter that dictates slictiming correction and realignment of slices. "time" (FSL.SliceTimer) is default, since it works safely. Use others only with caution!
	registration_mask : str, optional
		Mask to use for the registration process.
		This mask will constrain the area for similarity metric evaluation, but the data will not be cropped.
	sessions : list, optional
		A whitelist of sessions to include in the workflow, if the list is empty there is no whitelist and all sessions will be considered.
	structural_match : dict, optional
		Dictionary specifying a whitelist to use for structural data inclusion into the workflow; if dictionary is empty no whitelist is present and all data will be considered.
		The dictionary should have keys which are 'acquisition', or 'modality', and values which are lists of acceptable strings for the respective BIDS field.
	subjects : list, optional
		A whitelist of subjects to include in the workflow, if the list is empty there is no whitelist and all sessions will be considered.
	tr : float, optional
		Repetition time, explicitly.
		WARNING! This is a parameter waiting for deprecation.
	workflow_name : str, optional
		Top level name for the output directory.
	'''

    try:
        import nipype.interfaces.ants.legacy as antslegacy
    except ModuleNotFoundError:
        print('''
			The `nipype.interfaces.ants.legacy` was not found on this system.
			You may want to downgrade nipype to e.g. 1.1.1, as this module has been removed in more recent versions:
			https://github.com/nipy/nipype/issues/3197
		''')

    bids_base, out_base, out_dir, template, registration_mask, data_selection, functional_scan_types, structural_scan_types, subjects_sessions, func_ind, struct_ind = common_select(
        bids_base,
        out_base,
        workflow_name,
        template,
        registration_mask,
        functional_match,
        structural_match,
        subjects,
        sessions,
        exclude,
    )

    if not n_jobs:
        n_jobs = max(int(round(mp.cpu_count() * n_jobs_percentage)), 2)

    get_f_scan = pe.Node(name='get_f_scan',
                         interface=util.Function(
                             function=get_bids_scan,
                             input_names=inspect.getargspec(get_bids_scan)[0],
                             output_names=[
                                 'scan_path', 'scan_type', 'task', 'nii_path',
                                 'nii_name', 'events_name', 'subject_session',
                                 'metadata_filename', 'dict_slice', 'ind_type'
                             ]))
    get_f_scan.inputs.ignore_exception = True
    get_f_scan.inputs.data_selection = data_selection
    get_f_scan.inputs.bids_base = bids_base
    get_f_scan.iterables = ("ind_type", func_ind)

    dummy_scans = pe.Node(
        name='dummy_scans',
        interface=util.Function(
            function=force_dummy_scans,
            input_names=inspect.getargspec(force_dummy_scans)[0],
            output_names=['out_file', 'deleted_scans']))
    dummy_scans.inputs.desired_dummy_scans = enforce_dummy_scans

    events_file = pe.Node(
        name='events_file',
        interface=util.Function(
            function=write_bids_events_file,
            input_names=inspect.getargspec(write_bids_events_file)[0],
            output_names=['out_file']))

    temporal_mean = pe.Node(interface=fsl.MeanImage(), name="temporal_mean")

    f_resize = pe.Node(interface=VoxelResize(), name="f_resize")
    f_resize.inputs.resize_factors = [10, 10, 10]

    f_percentile = pe.Node(interface=fsl.ImageStats(), name="f_percentile")
    f_percentile.inputs.op_string = '-p 98'

    f_threshold = pe.Node(interface=fsl.Threshold(), name="f_threshold")

    f_fast = pe.Node(interface=fsl.FAST(), name="f_fast")
    f_fast.inputs.no_pve = True
    f_fast.inputs.output_biascorrected = True

    f_bet = pe.Node(interface=fsl.BET(), name="f_BET")

    f_swapdim = pe.Node(interface=fsl.SwapDimensions(), name="f_swapdim")
    f_swapdim.inputs.new_dims = ('x', '-z', '-y')

    f_deleteorient = pe.Node(interface=FSLOrient(), name="f_deleteorient")
    f_deleteorient.inputs.main_option = 'deleteorient'

    datasink = pe.Node(nio.DataSink(), name='datasink')
    datasink.inputs.base_directory = out_dir
    datasink.inputs.parameterization = False

    workflow_connections = [
        (get_f_scan, dummy_scans, [('nii_path', 'in_file')]),
        (dummy_scans, events_file, [('deleted_scans', 'forced_dummy_scans')]),
        (dummy_scans, f_resize, [('out_file', 'in_file')]),
        (get_f_scan, events_file, [('nii_path', 'timecourse_file'),
                                   ('task', 'task'),
                                   ('scan_path', 'scan_dir')]),
        (events_file, datasink, [('out_file', 'func.@events')]),
        (get_f_scan, events_file, [('events_name', 'out_file')]),
        (get_f_scan, datasink, [(('subject_session', ss_to_path), 'container')
                                ]),
        (temporal_mean, f_percentile, [('out_file', 'in_file')]),
        # here we divide by 10 assuming 10 percent noise
        (f_percentile, f_threshold, [(('out_stat', divideby_10), 'thresh')]),
        (temporal_mean, f_threshold, [('out_file', 'in_file')]),
        (f_threshold, f_fast, [('out_file', 'in_files')]),
        (f_fast, f_bet, [('restored_image', 'in_file')]),
        (f_resize, f_deleteorient, [('out_file', 'in_file')]),
        (f_deleteorient, f_swapdim, [('out_file', 'in_file')]),
    ]

    if realign == "space":
        realigner = pe.Node(interface=spm.Realign(), name="realigner")
        realigner.inputs.register_to_mean = True
        workflow_connections.extend([
            (f_swapdim, realigner, [('out_file', 'in_file')]),
        ])

    elif realign == "spacetime":
        realigner = pe.Node(interface=nipy.SpaceTimeRealigner(),
                            name="realigner")
        realigner.inputs.slice_times = "asc_alt_2"
        realigner.inputs.tr = tr
        realigner.inputs.slice_info = 3  #3 for coronal slices (2 for horizontal, 1 for sagittal)
        workflow_connections.extend([
            (f_swapdim, realigner, [('out_file', 'in_file')]),
        ])

    elif realign == "time":
        realigner = pe.Node(interface=fsl.SliceTimer(), name="slicetimer")
        realigner.inputs.time_repetition = tr
        workflow_connections.extend([
            (f_swapdim, realigner, [('out_file', 'in_file')]),
        ])

    f_antsintroduction = pe.Node(interface=antslegacy.antsIntroduction(),
                                 name='ants_introduction')
    f_antsintroduction.inputs.dimension = 3
    f_antsintroduction.inputs.reference_image = template
    #will need updating to `1`
    f_antsintroduction.inputs.bias_field_correction = True
    f_antsintroduction.inputs.transformation_model = 'GR'
    f_antsintroduction.inputs.max_iterations = [8, 15, 8]

    f_warp = pe.Node(interface=ants.WarpTimeSeriesImageMultiTransform(),
                     name='f_warp')
    f_warp.inputs.reference_image = template
    f_warp.inputs.dimension = 4

    f_copysform2qform = pe.Node(interface=FSLOrient(),
                                name='f_copysform2qform')
    f_copysform2qform.inputs.main_option = 'copysform2qform'

    warp_merge = pe.Node(util.Merge(2), name='warp_merge')

    workflow_connections.extend([
        (f_bet, f_antsintroduction, [('out_file', 'input_image')]),
        (f_antsintroduction, warp_merge, [('warp_field', 'in1')]),
        (f_antsintroduction, warp_merge, [('affine_transformation', 'in2')]),
        (warp_merge, f_warp, [('out', 'transformation_series')]),
        (f_warp, f_copysform2qform, [('output_image', 'in_file')]),
    ])
    if realign == "space":
        workflow_connections.extend([
            (realigner, temporal_mean, [('realigned_files', 'in_file')]),
            (realigner, f_warp, [('realigned_files', 'input_image')]),
        ])
    elif realign == "spacetime":
        workflow_connections.extend([
            (realigner, temporal_mean, [('out_file', 'in_file')]),
            (realigner, f_warp, [('out_file', 'input_image')]),
        ])
    elif realign == "time":
        workflow_connections.extend([
            (realigner, temporal_mean, [('slice_time_corrected_file',
                                         'in_file')]),
            (realigner, f_warp, [('slice_time_corrected_file', 'input_image')
                                 ]),
        ])
    else:
        workflow_connections.extend([
            (f_resize, temporal_mean, [('out_file', 'in_file')]),
            (f_swapdim, f_warp, [('out_file', 'input_image')]),
        ])

    if functional_blur_xy:
        blur = pe.Node(interface=afni.preprocess.BlurToFWHM(), name="blur")
        blur.inputs.fwhmxy = functional_blur_xy
        workflow_connections.extend([
            (get_f_scan, blur, [('nii_name', 'out_file')]),
            (f_copysform2qform, blur, [('out_file', 'in_file')]),
            (blur, datasink, [('out_file', 'func')]),
        ])
    else:

        f_rename = pe.Node(util.Rename(), name='f_rename')

        workflow_connections.extend([
            (get_f_scan, f_rename, [('nii_name', 'format_string')]),
            (f_copysform2qform, f_rename, [('out_file', 'in_file')]),
            (f_rename, datasink, [('out_file', 'func')]),
        ])

    workflow_config = {
        'execution': {
            'crashdump_dir': path.join(out_base, 'crashdump'),
        }
    }
    if debug:
        workflow_config['logging'] = {
            'workflow_level': 'DEBUG',
            'utils_level': 'DEBUG',
            'interface_level': 'DEBUG',
            'filemanip_level': 'DEBUG',
            'log_to_file': 'true',
        }

    workdir_name = workflow_name + "_work"
    #this gives the name of the workdir, the output name is passed to the datasink
    workflow = pe.Workflow(name=workdir_name)
    workflow.connect(workflow_connections)
    workflow.base_dir = out_base
    workflow.config = workflow_config
    try:
        workflow.write_graph(dotfilename=path.join(workflow.base_dir,
                                                   workdir_name, "graph.dot"),
                             graph2use="hierarchical",
                             format="png")
    except OSError:
        print(
            'We could not write the DOT file for visualization (`dot` function from the graphviz package). This is non-critical to the processing, but you should get this fixed.'
        )

    workflow.run(plugin="MultiProc", plugin_args={'n_procs': n_jobs})
    copy_bids_files(bids_base, os.path.join(out_base, workflow_name))
    if not keep_work:
        workdir = path.join(workflow.base_dir, workdir_name)
        try:
            shutil.rmtree(workdir)
        except OSError as e:
            if str(e) == 'Cannot call rmtree on a symbolic link':
                print(
                    'Not deleting top level workdir (`{}`), as it is a symlink. Deleting only contents instead'
                    .format(workdir))
                for file_object in os.listdir(workdir):
                    file_object_path = os.path.join(workdir, file_object)
                    if os.path.isfile(file_object_path):
                        os.unlink(file_object_path)
                    else:
                        shutil.rmtree(file_object_path)
            else:
                raise OSError(str(e))
Exemplo n.º 16
0
def diagnose(
    measurements_base,
    functional_scan_types=[],
    structural_scan_types=[],
    sessions=[],
    subjects=[],
    measurements=[],
    exclude_subjects=[],
    exclude_measurements=[],
    actual_size=False,
    components=None,
    keep_work=False,
    loud=False,
    n_procs=N_PROCS,
    realign=False,
    tr=1,
    workflow_name="diagnostic",
):

    measurements_base = path.abspath(path.expanduser(measurements_base))

    #select all functional/sturctural scan types unless specified
    if not functional_scan_types or not structural_scan_types:
        scan_classification = pd.read_csv(scan_classification_file_path)
        if not functional_scan_types:
            functional_scan_types = list(
                scan_classification[(scan_classification["categories"] ==
                                     "functional")]["scan_type"])
        if not structural_scan_types:
            structural_scan_types = list(
                scan_classification[(scan_classification["categories"] ==
                                     "structural")]["scan_type"])

    #hack to allow structural scan type disabling:
    if structural_scan_types == ["none"]:
        structural_scan_types = []

    # define measurement directories to be processed, and populate the list either with the given include_measurements, or with an intelligent selection
    scan_types = deepcopy(functional_scan_types)
    scan_types.extend(structural_scan_types)
    data_selection = get_data_selection(
        measurements_base,
        sessions,
        scan_types=scan_types,
        subjects=subjects,
        exclude_subjects=exclude_subjects,
        measurements=measurements,
        exclude_measurements=exclude_measurements)
    if not subjects:
        subjects = set(list(data_selection["subject"]))
    if not sessions:
        sessions = set(list(data_selection["session"]))

    # here we start to define the nipype workflow elements (nodes, connectons, meta)
    subjects_sessions = data_selection[["subject", "session"
                                        ]].drop_duplicates().values.tolist()
    infosource = pe.Node(
        interface=util.IdentityInterface(fields=['subject_session']),
        name="infosource")
    infosource.iterables = [('subject_session', subjects_sessions)]

    get_f_scan = pe.Node(name='get_f_scan',
                         interface=util.Function(
                             function=get_scan,
                             input_names=inspect.getargspec(get_scan)[0],
                             output_names=['scan_path', 'scan_type']))
    get_f_scan.inputs.data_selection = data_selection
    get_f_scan.inputs.measurements_base = measurements_base
    get_f_scan.iterables = ("scan_type", functional_scan_types)

    f_bru2nii = pe.Node(interface=bru2nii.Bru2(), name="f_bru2nii")
    f_bru2nii.inputs.actual_size = actual_size

    dummy_scans = pe.Node(
        name='dummy_scans',
        interface=util.Function(
            function=force_dummy_scans,
            input_names=inspect.getargspec(force_dummy_scans)[0],
            output_names=['out_file']))
    dummy_scans.inputs.desired_dummy_scans = 10

    bids_filename = pe.Node(
        name='bids_filename',
        interface=util.Function(
            function=sss_filename,
            input_names=inspect.getargspec(sss_filename)[0],
            output_names=['filename']))
    bids_filename.inputs.suffix = "MELODIC"
    bids_filename.inputs.extension = ""

    datasink = pe.Node(nio.DataSink(), name='datasink')
    datasink.inputs.base_directory = path.join(measurements_base,
                                               workflow_name)
    datasink.inputs.parameterization = False

    melodic = pe.Node(interface=fsl.model.MELODIC(), name="melodic")
    melodic.inputs.tr_sec = tr
    melodic.inputs.report = True
    if components:
        melodic.inputs.dim = int(components)

    workflow_connections = [
        (infosource, get_f_scan, [('subject_session', 'selector')]),
        (get_f_scan, f_bru2nii, [('scan_path', 'input_dir')]),
        (f_bru2nii, dummy_scans, [('nii_file', 'in_file')]),
        (get_f_scan, dummy_scans, [('scan_path', 'scan_dir')]),
        (infosource, datasink, [(('subject_session', ss_to_path), 'container')
                                ]),
        (infosource, bids_filename, [('subject_session', 'subject_session')]),
        (get_f_scan, bids_filename, [('scan_type', 'scan')]),
        (bids_filename, melodic, [('filename', 'out_dir')]),
        (melodic, datasink, [('out_dir', 'func')]),
    ]

    #ADDING SELECTABLE NODES AND EXTENDING WORKFLOW AS APPROPRIATE:
    if structural_scan_types:
        get_s_scan = pe.Node(name='get_s_scan',
                             interface=util.Function(
                                 function=get_scan,
                                 input_names=inspect.getargspec(get_scan)[0],
                                 output_names=['scan_path', 'scan_type']))
        get_s_scan.inputs.data_selection = data_selection
        get_s_scan.inputs.measurements_base = measurements_base
        get_s_scan.iterables = ("scan_type", structural_scan_types)

        s_bru2nii = pe.Node(interface=bru2nii.Bru2(), name="s_bru2nii")
        s_bru2nii.inputs.force_conversion = True
        s_bru2nii.inputs.actual_size = actual_size

        s_bids_filename = pe.Node(
            name='s_bids_filename',
            interface=util.Function(
                function=sss_filename,
                input_names=inspect.getargspec(sss_filename)[0],
                output_names=['filename']))
        s_bids_filename.inputs.extension = ""
        s_bids_filename.inputs.scan_prefix = False

        workflow_connections.extend([
            (infosource, get_s_scan, [('subject_session', 'selector')]),
            (infosource, s_bids_filename, [('subject_session',
                                            'subject_session')]),
            (get_s_scan, s_bru2nii, [('scan_path', 'input_dir')]),
            (get_s_scan, s_bids_filename, [('scan_type', 'scan')]),
            (s_bids_filename, s_bru2nii, [('filename', 'output_filename')]),
            (s_bru2nii, datasink, [('nii_file', 'anat')]),
        ])

    #TODO: case: "space" - melodic ICAs don't break, case: "spacetime" - melodic ICAs break
    if realign == "space":
        realigner = pe.Node(interface=spm.Realign(), name="realigner")
        realigner.inputs.register_to_mean = True
        workflow_connections.extend([
            (dummy_scans, realigner, [('out_file', 'in_file')]),
            (realigner, melodic, [('out_file', 'in_files')]),
        ])

    elif realign == "spacetime":
        realigner = pe.Node(interface=nipy.SpaceTimeRealigner(),
                            name="realigner")
        realigner.inputs.slice_times = "asc_alt_2"
        realigner.inputs.tr = tr
        realigner.inputs.slice_info = 3  #3 for coronal slices (2 for horizontal, 1 for sagittal)
        workflow_connections.extend([
            (dummy_scans, realigner, [('out_file', 'in_file')]),
            (realigner, melodic, [('out_file', 'in_files')]),
        ])

    else:
        workflow_connections.extend([
            (dummy_scans, melodic, [('out_file', 'in_files')]),
        ])

    workdir_name = workflow_name + "_work"
    workflow = pe.Workflow(name=workdir_name)
    workflow.connect(workflow_connections)
    workflow.base_dir = path.join(measurements_base)
    workflow.write_graph(dotfilename=path.join(workflow.base_dir, workdir_name,
                                               "graph.dot"),
                         graph2use="hierarchical",
                         format="png")
    if not loud:
        try:
            workflow.run(plugin="MultiProc", plugin_args={'n_procs': n_procs})
        except RuntimeError:
            print(
                "WARNING: Some expected scans have not been found (or another TypeError has occured)."
            )
        for f in listdir(getcwd()):
            if re.search("crash.*?get_s_scan|get_f_scan.*?pklz", f):
                remove(path.join(getcwd(), f))
    else:
        workflow.run(plugin="MultiProc", plugin_args={'n_procs': n_procs})
    if not keep_work:
        shutil.rmtree(path.join(workflow.base_dir, workdir_name))
Exemplo n.º 17
0
def bruker(
    measurements_base,
    template,
    DEBUG=False,
    exclude={},
    functional_match={},
    structural_match={},
    sessions=[],
    subjects=[],
    actual_size=True,
    functional_blur_xy=False,
    functional_registration_method="structural",
    highpass_sigma=225,
    lowpass_sigma=None,
    negative_contrast_agent=False,
    n_procs=N_PROCS,
    realign="time",
    registration_mask=False,
    tr=1,
    very_nasty_bruker_delay_hack=False,
    workflow_name="generic",
    keep_work=False,
    autorotate=False,
    strict=False,
    verbose=False,
):
    '''

	realign: {"space","time","spacetime",""}
		Parameter that dictates slictiming correction and realignment of slices. "time" (FSL.SliceTimer) is default, since it works safely. Use others only with caution!

	'''
    if template:
        if template == "mouse":
            template = fetch_mouse_DSURQE()['template']
            registration_mask = fetch_mouse_DSURQE()['mask']
        elif template == "rat":
            template = fetch_rat_waxholm()['template']
            registration_mask = fetch_rat_waxholm()['mask']
        else:
            pass
    else:
        raise ValueError("No species or template specified")
        return -1

    measurements_base = path.abspath(path.expanduser(measurements_base))

    # add subject and session filters if present
    if subjects:
        structural_scan_types['subject'] = subjects
    if sessions:
        structural_scan_types['session'] = sessions

    # define measurement directories to be processed, and populate the list either with the given include_measurements, or with an intelligent selection
    data_selection = pd.DataFrame([])
    if structural_match:
        s_data_selection = get_data_selection(
            measurements_base,
            match=structural_match,
            exclude=exclude,
        )
        structural_scan_types = s_data_selection['scan_type'].unique()
        data_selection = pd.concat([data_selection, s_data_selection])
    if functional_match:
        f_data_selection = get_data_selection(
            measurements_base,
            match=functional_match,
            exclude=exclude,
        )
        functional_scan_types = f_data_selection['scan_type'].unique()
        data_selection = pd.concat([data_selection, f_data_selection])

    # we currently only support one structural scan type per session
    #if functional_registration_method in ("structural", "composite") and structural_scan_types:
    #	structural_scan_types = [structural_scan_types[0]]

    # we start to define nipype workflow elements (nodes, connections, meta)
    subjects_sessions = data_selection[["subject", "session"
                                        ]].drop_duplicates().values.tolist()
    if debug:
        print('Data selection:')
        print(data_selection)
        print('Iterating over:')
        print(subjects_sessions)
    infosource = pe.Node(interface=util.IdentityInterface(
        fields=['subject_session'], mandatory_inputs=False),
                         name="infosource")
    infosource.iterables = [('subject_session', subjects_sessions)]

    get_f_scan = pe.Node(name='get_f_scan',
                         interface=util.Function(
                             function=get_scan,
                             input_names=inspect.getargspec(get_scan)[0],
                             output_names=['scan_path', 'scan_type', 'trial']))
    if not strict:
        get_f_scan.inputs.ignore_exception = True
    get_f_scan.inputs.data_selection = data_selection
    get_f_scan.inputs.measurements_base = measurements_base
    get_f_scan.iterables = ("scan_type", functional_scan_types)

    f_bru2nii = pe.Node(interface=bru2nii.Bru2(), name="f_bru2nii")
    f_bru2nii.inputs.actual_size = actual_size

    dummy_scans = pe.Node(
        name='dummy_scans',
        interface=util.Function(
            function=force_dummy_scans,
            input_names=inspect.getargspec(force_dummy_scans)[0],
            output_names=['out_file']))
    dummy_scans.inputs.desired_dummy_scans = DUMMY_SCANS

    bandpass = pe.Node(interface=fsl.maths.TemporalFilter(), name="bandpass")
    bandpass.inputs.highpass_sigma = highpass_sigma
    if lowpass_sigma:
        bandpass.inputs.lowpass_sigma = lowpass_sigma
    else:
        bandpass.inputs.lowpass_sigma = tr

    #bids_filename = pe.Node(name='bids_filename', interface=util.Function(function=sss_filename,input_names=inspect.getargspec(sss_filename)[0], output_names=['filename']))
    bids_filename = pe.Node(name='bids_filename',
                            interface=util.Function(
                                function=bids_naming,
                                input_names=inspect.getargspec(bids_naming)[0],
                                output_names=['filename']))
    bids_filename.inputs.metadata = data_selection

    #bids_stim_filename = pe.Node(name='bids_stim_filename', interface=util.Function(function=sss_filename,input_names=inspect.getargspec(sss_filename)[0], output_names=['filename']))
    bids_stim_filename = pe.Node(
        name='bids_stim_filename',
        interface=util.Function(function=bids_naming,
                                input_names=inspect.getargspec(bids_naming)[0],
                                output_names=['filename']))
    bids_stim_filename.inputs.suffix = "events"
    bids_stim_filename.inputs.extension = ".tsv"
    bids_stim_filename.inputs.metadata = data_selection

    events_file = pe.Node(
        name='events_file',
        interface=util.Function(
            function=write_events_file,
            input_names=inspect.getargspec(write_events_file)[0],
            output_names=['out_file']))
    events_file.inputs.dummy_scans_ms = DUMMY_SCANS * tr * 1000
    events_file.inputs.stim_protocol_dictionary = STIM_PROTOCOL_DICTIONARY
    events_file.inputs.very_nasty_bruker_delay_hack = very_nasty_bruker_delay_hack
    if not (strict or verbose):
        events_file.inputs.ignore_exception = True

    datasink = pe.Node(nio.DataSink(), name='datasink')
    datasink.inputs.base_directory = path.join(measurements_base,
                                               "preprocessing", workflow_name)
    datasink.inputs.parameterization = False
    if not (strict or verbose):
        datasink.inputs.ignore_exception = True

    workflow_connections = [
        (infosource, get_f_scan, [('subject_session', 'selector')]),
        (infosource, bids_stim_filename, [('subject_session',
                                           'subject_session')]),
        (get_f_scan, bids_stim_filename, [('scan_type', 'scan_type')]),
        (get_f_scan, f_bru2nii, [('scan_path', 'input_dir')]),
        (f_bru2nii, dummy_scans, [('nii_file', 'in_file')]),
        (get_f_scan, dummy_scans, [('scan_path', 'scan_dir')]),
        (get_f_scan, events_file, [('trial', 'trial'),
                                   ('scan_path', 'scan_dir')]),
        (events_file, datasink, [('out_file', 'func.@events')]),
        (bids_stim_filename, events_file, [('filename', 'out_file')]),
        (infosource, datasink, [(('subject_session', ss_to_path), 'container')
                                ]),
        (infosource, bids_filename, [('subject_session', 'subject_session')]),
        (get_f_scan, bids_filename, [('scan_type', 'scan_type')]),
        (bids_filename, bandpass, [('filename', 'out_file')]),
        (bandpass, datasink, [('out_file', 'func')]),
    ]

    if realign == "space":
        realigner = pe.Node(interface=spm.Realign(), name="realigner")
        realigner.inputs.register_to_mean = True
        workflow_connections.extend([
            (dummy_scans, realigner, [('out_file', 'in_file')]),
        ])

    elif realign == "spacetime":
        realigner = pe.Node(interface=nipy.SpaceTimeRealigner(),
                            name="realigner")
        realigner.inputs.slice_times = "asc_alt_2"
        realigner.inputs.tr = tr
        realigner.inputs.slice_info = 3  #3 for coronal slices (2 for horizontal, 1 for sagittal)
        workflow_connections.extend([
            (dummy_scans, realigner, [('out_file', 'in_file')]),
        ])

    elif realign == "time":
        realigner = pe.Node(interface=fsl.SliceTimer(), name="slicetimer")
        realigner.inputs.time_repetition = tr
        workflow_connections.extend([
            (dummy_scans, realigner, [('out_file', 'in_file')]),
        ])

    #ADDING SELECTABLE NODES AND EXTENDING WORKFLOW AS APPROPRIATE:
    if actual_size:
        s_biascorrect, f_biascorrect = real_size_nodes()
    else:
        s_biascorrect, f_biascorrect = inflated_size_nodes()

    if structural_scan_types.any():
        get_s_scan = pe.Node(
            name='get_s_scan',
            interface=util.Function(
                function=get_scan,
                input_names=inspect.getargspec(get_scan)[0],
                output_names=['scan_path', 'scan_type', 'trial']))
        if not strict:
            get_s_scan.inputs.ignore_exception = True
        get_s_scan.inputs.data_selection = data_selection
        get_s_scan.inputs.measurements_base = measurements_base
        get_s_scan.iterables = ("scan_type", structural_scan_types)

        s_bru2nii = pe.Node(interface=bru2nii.Bru2(), name="s_bru2nii")
        s_bru2nii.inputs.force_conversion = True
        s_bru2nii.inputs.actual_size = actual_size

        #s_bids_filename = pe.Node(name='s_bids_filename', interface=util.Function(function=sss_filename,input_names=inspect.getargspec(sss_filename)[0], output_names=['filename']))
        s_bids_filename = pe.Node(
            name='s_bids_filename',
            interface=util.Function(
                function=bids_naming,
                input_names=inspect.getargspec(bids_naming)[0],
                output_names=['filename']))
        s_bids_filename.inputs.metadata = data_selection

        if actual_size:
            s_register, s_warp, _, _ = DSURQEc_structural_registration(
                template, registration_mask)
            #TODO: incl. in func registration
            if autorotate:
                workflow_connections.extend([
                    (s_biascorrect, s_rotated, [('output_image', 'out_file')]),
                    (s_rotated, s_register, [('out_file', 'moving_image')]),
                ])
            else:
                workflow_connections.extend([
                    (s_biascorrect, s_register, [('output_image',
                                                  'moving_image')]),
                    (s_register, s_warp, [('composite_transform', 'transforms')
                                          ]),
                    (s_bru2nii, s_warp, [('nii_file', 'input_image')]),
                    (s_warp, datasink, [('output_image', 'anat')]),
                ])
        else:
            s_reg_biascorrect = pe.Node(interface=ants.N4BiasFieldCorrection(),
                                        name="s_reg_biascorrect")
            s_reg_biascorrect.inputs.dimension = 3
            s_reg_biascorrect.inputs.bspline_fitting_distance = 95
            s_reg_biascorrect.inputs.shrink_factor = 2
            s_reg_biascorrect.inputs.n_iterations = [500, 500, 500, 500]
            s_reg_biascorrect.inputs.convergence_threshold = 1e-14

            s_cutoff = pe.Node(interface=fsl.ImageMaths(), name="s_cutoff")
            s_cutoff.inputs.op_string = "-thrP 20 -uthrp 98"

            s_BET = pe.Node(interface=fsl.BET(), name="s_BET")
            s_BET.inputs.mask = True
            s_BET.inputs.frac = 0.3
            s_BET.inputs.robust = True

            s_mask = pe.Node(interface=fsl.ApplyMask(), name="s_mask")
            s_register, s_warp, f_warp = structural_registration(template)

            workflow_connections.extend([
                (s_bru2nii, s_reg_biascorrect, [('nii_file', 'input_image')]),
                (s_reg_biascorrect, s_cutoff, [('output_image', 'in_file')]),
                (s_cutoff, s_BET, [('out_file', 'in_file')]),
                (s_biascorrect, s_mask, [('output_image', 'in_file')]),
                (s_BET, s_mask, [('mask_file', 'mask_file')]),
            ])

            #TODO: incl. in func registration
            if autorotate:
                workflow_connections.extend([
                    (s_mask, s_rotated, [('out_file', 'out_file')]),
                    (s_rotated, s_register, [('out_file', 'moving_image')]),
                ])
            else:
                workflow_connections.extend([
                    (s_mask, s_register, [('out_file', 'moving_image')]),
                    (s_register, s_warp, [('composite_transform', 'transforms')
                                          ]),
                    (s_bru2nii, s_warp, [('nii_file', 'input_image')]),
                    (s_warp, datasink, [('output_image', 'anat')]),
                ])

        if autorotate:
            s_rotated = autorotate(template)

        workflow_connections.extend([
            (infosource, get_s_scan, [('subject_session', 'selector')]),
            (infosource, s_bids_filename, [('subject_session',
                                            'subject_session')]),
            (get_s_scan, s_bru2nii, [('scan_path', 'input_dir')]),
            (get_s_scan, s_bids_filename, [('scan_type', 'scan_type')]),
            (s_bids_filename, s_warp, [('filename', 'output_image')]),
            (s_bru2nii, s_biascorrect, [('nii_file', 'input_image')]),
        ])

    if functional_registration_method == "structural":
        if not structural_scan_types:
            raise ValueError(
                'The option `registration="structural"` requires there to be a structural scan type.'
            )
        workflow_connections.extend([
            (s_register, f_warp, [('composite_transform', 'transforms')]),
        ])
        if realign == "space":
            workflow_connections.extend([
                (realigner, f_warp, [('realigned_files', 'input_image')]),
            ])
        elif realign == "spacetime":
            workflow_connections.extend([
                (realigner, f_warp, [('out_file', 'input_image')]),
            ])
        elif realign == "time":
            workflow_connections.extend([
                (realigner, f_warp, [('slice_time_corrected_file',
                                      'input_image')]),
            ])
        else:
            workflow_connections.extend([
                (dummy_scans, f_warp, [('out_file', 'input_image')]),
            ])

    if functional_registration_method == "composite":
        if not structural_scan_types.any():
            raise ValueError(
                'The option `registration="composite"` requires there to be a structural scan type.'
            )
        _, _, f_register, f_warp = DSURQEc_structural_registration(
            template, registration_mask)

        temporal_mean = pe.Node(interface=fsl.MeanImage(),
                                name="temporal_mean")

        merge = pe.Node(util.Merge(2), name='merge')

        workflow_connections.extend([
            (temporal_mean, f_biascorrect, [('out_file', 'input_image')]),
            (f_biascorrect, f_register, [('output_image', 'moving_image')]),
            (s_biascorrect, f_register, [('output_image', 'fixed_image')]),
            (f_register, merge, [('composite_transform', 'in1')]),
            (s_register, merge, [('composite_transform', 'in2')]),
            (merge, f_warp, [('out', 'transforms')]),
        ])
        if realign == "space":
            workflow_connections.extend([
                (realigner, temporal_mean, [('realigned_files', 'in_file')]),
                (realigner, f_warp, [('realigned_files', 'input_image')]),
            ])
        elif realign == "spacetime":
            workflow_connections.extend([
                (realigner, temporal_mean, [('out_file', 'in_file')]),
                (realigner, f_warp, [('out_file', 'input_image')]),
            ])
        elif realign == "time":
            workflow_connections.extend([
                (realigner, temporal_mean, [('slice_time_corrected_file',
                                             'in_file')]),
                (realigner, f_warp, [('slice_time_corrected_file',
                                      'input_image')]),
            ])
        else:
            workflow_connections.extend([
                (dummy_scans, temporal_mean, [('out_file', 'in_file')]),
                (dummy_scans, f_warp, [('out_file', 'input_image')]),
            ])

    elif functional_registration_method == "functional":
        f_register, f_warp = functional_registration(template)

        temporal_mean = pe.Node(interface=fsl.MeanImage(),
                                name="temporal_mean")

        #f_cutoff = pe.Node(interface=fsl.ImageMaths(), name="f_cutoff")
        #f_cutoff.inputs.op_string = "-thrP 30"

        #f_BET = pe.Node(interface=fsl.BET(), name="f_BET")
        #f_BET.inputs.mask = True
        #f_BET.inputs.frac = 0.5

        workflow_connections.extend([
            (temporal_mean, f_biascorrect, [('out_file', 'input_image')]),
            #(f_biascorrect, f_cutoff, [('output_image', 'in_file')]),
            #(f_cutoff, f_BET, [('out_file', 'in_file')]),
            #(f_BET, f_register, [('out_file', 'moving_image')]),
            (f_biascorrect, f_register, [('output_image', 'moving_image')]),
            (f_register, f_warp, [('composite_transform', 'transforms')]),
        ])
        if realign == "space":
            workflow_connections.extend([
                (realigner, temporal_mean, [('realigned_files', 'in_file')]),
                (realigner, f_warp, [('realigned_files', 'input_image')]),
            ])
        elif realign == "spacetime":
            workflow_connections.extend([
                (realigner, temporal_mean, [('out_file', 'in_file')]),
                (realigner, f_warp, [('out_file', 'input_image')]),
            ])
        elif realign == "time":
            workflow_connections.extend([
                (realigner, temporal_mean, [('slice_time_corrected_file',
                                             'in_file')]),
                (realigner, f_warp, [('slice_time_corrected_file',
                                      'input_image')]),
            ])
        else:
            workflow_connections.extend([
                (dummy_scans, temporal_mean, [('out_file', 'in_file')]),
                (dummy_scans, f_warp, [('out_file', 'input_image')]),
            ])

    invert = pe.Node(interface=fsl.ImageMaths(), name="invert")
    if functional_blur_xy and negative_contrast_agent:
        blur = pe.Node(interface=afni.preprocess.BlurToFWHM(), name="blur")
        blur.inputs.fwhmxy = functional_blur_xy
        workflow_connections.extend([
            (f_warp, blur, [('output_image', 'in_file')]),
            (blur, invert, [(('out_file', fslmaths_invert_values), 'op_string')
                            ]),
            (blur, invert, [('out_file', 'in_file')]),
            (invert, bandpass, [('out_file', 'in_file')]),
        ])
    elif functional_blur_xy:
        blur = pe.Node(interface=afni.preprocess.BlurToFWHM(), name="blur")
        blur.inputs.fwhmxy = functional_blur_xy
        workflow_connections.extend([
            (f_warp, blur, [('output_image', 'in_file')]),
            (blur, bandpass, [('out_file', 'in_file')]),
        ])
    elif negative_contrast_agent:
        blur = pe.Node(interface=afni.preprocess.BlurToFWHM(), name="blur")
        blur.inputs.fwhmxy = functional_blur_xy
        workflow_connections.extend([
            (f_warp, invert, [(('output_image', fslmaths_invert_values),
                               'op_string')]),
            (f_warp, invert, [('output_image', 'in_file')]),
            (invert, bandpass, [('out_file', 'in_file')]),
        ])
    else:
        workflow_connections.extend([
            (f_warp, bandpass, [('output_image', 'in_file')]),
        ])

    workflow_config = {
        'execution': {
            'crashdump_dir':
            path.join(measurements_base, 'preprocessing/crashdump'),
        }
    }
    if debug:
        workflow_config['logging'] = {
            'workflow_level': 'DEBUG',
            'utils_level': 'DEBUG',
            'interface_level': 'DEBUG',
            'filemanip_level': 'DEBUG',
            'log_to_file': 'true',
        }

    workdir_name = workflow_name + "_work"
    workflow = pe.Workflow(name=workdir_name)
    workflow.connect(workflow_connections)
    workflow.base_dir = path.join(measurements_base, "preprocessing")
    workflow.config = workflow_config
    workflow.write_graph(dotfilename=path.join(workflow.base_dir, workdir_name,
                                               "graph.dot"),
                         graph2use="hierarchical",
                         format="png")

    workflow.run(plugin="MultiProc", plugin_args={'n_procs': n_procs})
    if not keep_work:
        shutil.rmtree(path.join(workflow.base_dir, workdir_name))
Exemplo n.º 18
0
def diagnose(
    bids_base,
    components=None,
    debug=False,
    exclude={},
    include={},
    keep_crashdump=False,
    keep_work=False,
    match_regex='.+/sub-(?P<sub>[a-zA-Z0-9]+)/ses-(?P<ses>[a-zA-Z0-9]+)/.*?_task-(?P<task>[a-zA-Z0-9]+)_acq-(?P<acq>[a-zA-Z0-9]+)_run-(?P<run>[a-zA-Z0-9]+)_(?P<mod>[a-zA-Z0-9]+).(?:nii|nii\.gz)',
    n_procs=N_PROCS,
    realign="time",
    tr=None,
    workflow_name="diagnostic",
):
    '''Run a basic independent component analysis diagnotic (using FSL's MELODIC) on functional MRI data stored in a BIDS directory tree.

	Parameters
	----------

	bids_base : string, optional
		Path to the top level of a BIDS directory tree for which to perform the diagnostic.
	components : int, optional
		Number of independent components to produce for each functional measurement; if evaluated as False, the number of components is automatically optimized for the given data by FSL's MELODIC.
	debug : bool, optional
		Enable full nipype debugging support for the workflow construction and execution.
	exclude : dict, optional
		A dictionary with any subset of 'subject', 'session', 'acquisition', 'task', 'modality', and 'path' as keys and corresponding identifiers as values.
		This is a blacklist: if this is specified only non-matching entries will be included in the analysis.
	include : dict, optional
		A dictionary with any subset of 'subject', 'session', 'acquisition', 'task', 'modality', and 'path' as keys and corresponding identifiers as values.
		This is a whitelist: if this is specified only matching entries will be included in the analysis.
	keep_crashdump : bool, optional
		Whether to keep the crashdump directory (containing all the crash reports for intermediary workflow steps, as managed by nipypye).
		This is useful for debugging and quality control.
	keep_work : bool, optional
		Whether to keep the work directory (containing all the intermediary workflow steps, as managed by nipypye).
		This is useful for debugging and quality control.
	match_regex : str, optional
		Regex matching pattern by which to select input files. Has to contain groups named "sub", "ses", "acq", "task", and "mod".
	n_procs : int, optional
		Maximum number of processes which to simultaneously spawn for the workflow.
		If not explicitly defined, this is automatically calculated from the number of available cores and under the assumption that the workflow will be the main process running for the duration that it is running.
	realign : {"space","time","spacetime",""}
		Parameter that dictates slictiming correction and realignment of slices. "time" (FSL.SliceTimer) is default, since it works safely. Use others only with caution!
	tr : int, optional
		Repetition time (in seconds); if evaluated as False, the TR will be read from the NIfTI header of each file individually.
	workflow_name : string, optional
		Name of the workflow execution. The output will be saved one level above the bids_base, under a directory bearing the name given here.
	'''

    bids_base = path.abspath(path.expanduser(bids_base))

    datafind = nio.DataFinder()
    datafind.inputs.root_paths = bids_base
    datafind.inputs.match_regex = match_regex
    datafind_res = datafind.run()

    data_selection = zip(*[
        datafind_res.outputs.sub, datafind_res.outputs.ses,
        datafind_res.outputs.acq, datafind_res.outputs.task,
        datafind_res.outputs.mod, datafind_res.outputs.out_paths
    ])
    data_selection = [list(i) for i in data_selection]
    data_selection = pd.DataFrame(data_selection,
                                  columns=('subject', 'session', 'acquisition',
                                           'task', 'modality', 'path'))

    data_selection = data_selection.sort_values(['session', 'subject'],
                                                ascending=[1, 1])
    if exclude:
        for key in exclude:
            data_selection = data_selection[~data_selection[key].
                                            isin(exclude[key])]
    if include:
        for key in include:
            data_selection = data_selection[data_selection[key].isin(
                include[key])]

    data_selection['out_path'] = ''
    if data_selection['path'].str.contains('.nii.gz').any():
        data_selection['out_path'] = data_selection['path'].apply(
            lambda x: path.basename(
                path.splitext(path.splitext(x)[0])[0] + '_MELODIC'))
    else:
        data_selection['out_path'] = data_selection['path'].apply(
            lambda x: path.basename(path.splitext(x)[0] + '_MELODIC'))

    paths = data_selection['path']

    infosource = pe.Node(interface=util.IdentityInterface(
        fields=['path'], mandatory_inputs=False),
                         name="infosource")
    infosource.iterables = [('path', paths)]

    dummy_scans = pe.Node(
        name='dummy_scans',
        interface=util.Function(
            function=force_dummy_scans,
            input_names=inspect.getargspec(force_dummy_scans)[0],
            output_names=['out_file', 'deleted_scans']))
    dummy_scans.inputs.desired_dummy_scans = 10

    bids_filename = pe.Node(name='bids_filename',
                            interface=util.Function(
                                function=out_path,
                                input_names=inspect.getargspec(out_path)[0],
                                output_names=['filename']))
    bids_filename.inputs.selection_df = data_selection

    bids_container = pe.Node(name='path_container',
                             interface=util.Function(
                                 function=container,
                                 input_names=inspect.getargspec(container)[0],
                                 output_names=['container']))
    bids_container.inputs.selection_df = data_selection

    datasink = pe.Node(nio.DataSink(), name='datasink')
    datasink.inputs.base_directory = path.abspath(
        path.join(bids_base, '..', workflow_name))
    datasink.inputs.parameterization = False

    melodic = pe.Node(interface=fsl.model.MELODIC(), name="melodic")
    if tr:
        melodic.inputs.tr_sec = tr
    melodic.inputs.report = True
    if components:
        melodic.inputs.dim = int(components)

    workflow_connections = [
        (infosource, dummy_scans, [('path', 'in_file')]),
        (infosource, bids_filename, [('path', 'in_path')]),
        (bids_filename, bids_container, [('filename', 'out_path')]),
        (bids_filename, melodic, [('filename', 'out_dir')]),
        (bids_container, datasink, [('container', 'container')]),
        (melodic, datasink, [('out_dir', 'func')]),
    ]

    if not tr:
        report_tr = pe.Node(name='report_tr',
                            interface=util.Function(
                                function=get_tr,
                                input_names=inspect.getargspec(get_tr)[0],
                                output_names=['tr']))
        report_tr.inputs.ndim = 4

        workflow_connections.extend([
            (infosource, report_tr, [('path', 'in_file')]),
            (report_tr, melodic, [('tr', 'tr_sec')]),
        ])

    if realign == "space":
        realigner = pe.Node(interface=spm.Realign(), name="realigner")
        realigner.inputs.register_to_mean = True
        workflow_connections.extend([
            (dummy_scans, realigner, [('out_file', 'in_file')]),
            (realigner, melodic, [('out_file', 'in_files')]),
        ])
    elif realign == "spacetime":
        realigner = pe.Node(interface=nipy.SpaceTimeRealigner(),
                            name="realigner")
        realigner.inputs.slice_times = "asc_alt_2"
        if tr:
            realigner.inputs.tr = tr
        else:
            workflow_connections.extend([
                (report_tr, realigner, [('tr', 'tr')]),
            ])
        #3 for coronal slices (2 for horizontal, 1 for sagittal)
        realigner.inputs.slice_info = 3
        workflow_connections.extend([
            (dummy_scans, realigner, [('out_file', 'in_file')]),
            (realigner, melodic, [('out_file', 'in_files')]),
        ])
    elif realign == "time":
        realigner = pe.Node(interface=fsl.SliceTimer(), name="slicetimer")
        if tr:
            realigner.inputs.time_repetition = tr
        else:
            workflow_connections.extend([
                (report_tr, realigner, [('tr', 'time_repetition')]),
            ])
        workflow_connections.extend([
            (dummy_scans, realigner, [('out_file', 'in_file')]),
            (realigner, melodic, [('slice_time_corrected_file', 'in_files')]),
        ])
    else:
        workflow_connections.extend([
            (dummy_scans, melodic, [('out_file', 'in_files')]),
        ])

    crashdump_dir = path.abspath(
        path.join(bids_base, '..', workflow_name + '_crashdump'))
    workflow_config = {'execution': {'crashdump_dir': crashdump_dir}}
    if debug:
        workflow_config['logging'] = {
            'workflow_level': 'DEBUG',
            'utils_level': 'DEBUG',
            'interface_level': 'DEBUG',
            'filemanip_level': 'DEBUG',
            'log_to_file': 'true',
        }

    workdir_name = workflow_name + '_work'
    workflow = pe.Workflow(name=workdir_name)
    workflow.connect(workflow_connections)
    workflow.base_dir = path.abspath(path.join(bids_base, '..'))
    workflow.config = workflow_config
    workflow.write_graph(dotfilename=path.join(workflow.base_dir, workdir_name,
                                               "graph.dot"),
                         graph2use="hierarchical",
                         format="png")

    if not keep_work or not keep_crashdump:
        try:
            workflow.run(plugin="MultiProc", plugin_args={'n_procs': n_procs})
        except RuntimeError:
            pass
    else:
        workflow.run(plugin="MultiProc", plugin_args={'n_procs': n_procs})
    if not keep_work:
        shutil.rmtree(path.join(workflow.base_dir, workdir_name))
    if not keep_crashdump:
        try:
            shutil.rmtree(crashdump_dir)
        except (FileNotFoundError, OSError):
            pass

    return