Example #1
0
def bruker(
    measurements_base,
    template,
    DEBUG=False,
    exclude={},
    functional_match={},
    structural_match={},
    sessions=[],
    subjects=[],
    actual_size=True,
    functional_blur_xy=False,
    functional_registration_method="structural",
    highpass_sigma=225,
    lowpass_sigma=None,
    negative_contrast_agent=False,
    n_procs=N_PROCS,
    realign="time",
    registration_mask=False,
    tr=1,
    very_nasty_bruker_delay_hack=False,
    workflow_name="generic",
    keep_work=False,
    autorotate=False,
    strict=False,
    verbose=False,
):
    '''

	realign: {"space","time","spacetime",""}
		Parameter that dictates slictiming correction and realignment of slices. "time" (FSL.SliceTimer) is default, since it works safely. Use others only with caution!

	'''
    if template:
        if template == "mouse":
            template = fetch_mouse_DSURQE()['template']
            registration_mask = fetch_mouse_DSURQE()['mask']
        elif template == "rat":
            template = fetch_rat_waxholm()['template']
            registration_mask = fetch_rat_waxholm()['mask']
        else:
            pass
    else:
        raise ValueError("No species or template specified")
        return -1

    measurements_base = path.abspath(path.expanduser(measurements_base))

    # add subject and session filters if present
    if subjects:
        structural_scan_types['subject'] = subjects
    if sessions:
        structural_scan_types['session'] = sessions

    # define measurement directories to be processed, and populate the list either with the given include_measurements, or with an intelligent selection
    data_selection = pd.DataFrame([])
    if structural_match:
        s_data_selection = get_data_selection(
            measurements_base,
            match=structural_match,
            exclude=exclude,
        )
        structural_scan_types = s_data_selection['scan_type'].unique()
        data_selection = pd.concat([data_selection, s_data_selection])
    if functional_match:
        f_data_selection = get_data_selection(
            measurements_base,
            match=functional_match,
            exclude=exclude,
        )
        functional_scan_types = f_data_selection['scan_type'].unique()
        data_selection = pd.concat([data_selection, f_data_selection])

    # we currently only support one structural scan type per session
    #if functional_registration_method in ("structural", "composite") and structural_scan_types:
    #	structural_scan_types = [structural_scan_types[0]]

    # we start to define nipype workflow elements (nodes, connections, meta)
    subjects_sessions = data_selection[["subject", "session"
                                        ]].drop_duplicates().values.tolist()
    if debug:
        print('Data selection:')
        print(data_selection)
        print('Iterating over:')
        print(subjects_sessions)
    infosource = pe.Node(interface=util.IdentityInterface(
        fields=['subject_session'], mandatory_inputs=False),
                         name="infosource")
    infosource.iterables = [('subject_session', subjects_sessions)]

    get_f_scan = pe.Node(name='get_f_scan',
                         interface=util.Function(
                             function=get_scan,
                             input_names=inspect.getargspec(get_scan)[0],
                             output_names=['scan_path', 'scan_type', 'trial']))
    if not strict:
        get_f_scan.inputs.ignore_exception = True
    get_f_scan.inputs.data_selection = data_selection
    get_f_scan.inputs.measurements_base = measurements_base
    get_f_scan.iterables = ("scan_type", functional_scan_types)

    f_bru2nii = pe.Node(interface=bru2nii.Bru2(), name="f_bru2nii")
    f_bru2nii.inputs.actual_size = actual_size

    dummy_scans = pe.Node(
        name='dummy_scans',
        interface=util.Function(
            function=force_dummy_scans,
            input_names=inspect.getargspec(force_dummy_scans)[0],
            output_names=['out_file']))
    dummy_scans.inputs.desired_dummy_scans = DUMMY_SCANS

    bandpass = pe.Node(interface=fsl.maths.TemporalFilter(), name="bandpass")
    bandpass.inputs.highpass_sigma = highpass_sigma
    if lowpass_sigma:
        bandpass.inputs.lowpass_sigma = lowpass_sigma
    else:
        bandpass.inputs.lowpass_sigma = tr

    #bids_filename = pe.Node(name='bids_filename', interface=util.Function(function=sss_filename,input_names=inspect.getargspec(sss_filename)[0], output_names=['filename']))
    bids_filename = pe.Node(name='bids_filename',
                            interface=util.Function(
                                function=bids_naming,
                                input_names=inspect.getargspec(bids_naming)[0],
                                output_names=['filename']))
    bids_filename.inputs.metadata = data_selection

    #bids_stim_filename = pe.Node(name='bids_stim_filename', interface=util.Function(function=sss_filename,input_names=inspect.getargspec(sss_filename)[0], output_names=['filename']))
    bids_stim_filename = pe.Node(
        name='bids_stim_filename',
        interface=util.Function(function=bids_naming,
                                input_names=inspect.getargspec(bids_naming)[0],
                                output_names=['filename']))
    bids_stim_filename.inputs.suffix = "events"
    bids_stim_filename.inputs.extension = ".tsv"
    bids_stim_filename.inputs.metadata = data_selection

    events_file = pe.Node(
        name='events_file',
        interface=util.Function(
            function=write_events_file,
            input_names=inspect.getargspec(write_events_file)[0],
            output_names=['out_file']))
    events_file.inputs.dummy_scans_ms = DUMMY_SCANS * tr * 1000
    events_file.inputs.stim_protocol_dictionary = STIM_PROTOCOL_DICTIONARY
    events_file.inputs.very_nasty_bruker_delay_hack = very_nasty_bruker_delay_hack
    if not (strict or verbose):
        events_file.inputs.ignore_exception = True

    datasink = pe.Node(nio.DataSink(), name='datasink')
    datasink.inputs.base_directory = path.join(measurements_base,
                                               "preprocessing", workflow_name)
    datasink.inputs.parameterization = False
    if not (strict or verbose):
        datasink.inputs.ignore_exception = True

    workflow_connections = [
        (infosource, get_f_scan, [('subject_session', 'selector')]),
        (infosource, bids_stim_filename, [('subject_session',
                                           'subject_session')]),
        (get_f_scan, bids_stim_filename, [('scan_type', 'scan_type')]),
        (get_f_scan, f_bru2nii, [('scan_path', 'input_dir')]),
        (f_bru2nii, dummy_scans, [('nii_file', 'in_file')]),
        (get_f_scan, dummy_scans, [('scan_path', 'scan_dir')]),
        (get_f_scan, events_file, [('trial', 'trial'),
                                   ('scan_path', 'scan_dir')]),
        (events_file, datasink, [('out_file', 'func.@events')]),
        (bids_stim_filename, events_file, [('filename', 'out_file')]),
        (infosource, datasink, [(('subject_session', ss_to_path), 'container')
                                ]),
        (infosource, bids_filename, [('subject_session', 'subject_session')]),
        (get_f_scan, bids_filename, [('scan_type', 'scan_type')]),
        (bids_filename, bandpass, [('filename', 'out_file')]),
        (bandpass, datasink, [('out_file', 'func')]),
    ]

    if realign == "space":
        realigner = pe.Node(interface=spm.Realign(), name="realigner")
        realigner.inputs.register_to_mean = True
        workflow_connections.extend([
            (dummy_scans, realigner, [('out_file', 'in_file')]),
        ])

    elif realign == "spacetime":
        realigner = pe.Node(interface=nipy.SpaceTimeRealigner(),
                            name="realigner")
        realigner.inputs.slice_times = "asc_alt_2"
        realigner.inputs.tr = tr
        realigner.inputs.slice_info = 3  #3 for coronal slices (2 for horizontal, 1 for sagittal)
        workflow_connections.extend([
            (dummy_scans, realigner, [('out_file', 'in_file')]),
        ])

    elif realign == "time":
        realigner = pe.Node(interface=fsl.SliceTimer(), name="slicetimer")
        realigner.inputs.time_repetition = tr
        workflow_connections.extend([
            (dummy_scans, realigner, [('out_file', 'in_file')]),
        ])

    #ADDING SELECTABLE NODES AND EXTENDING WORKFLOW AS APPROPRIATE:
    if actual_size:
        s_biascorrect, f_biascorrect = real_size_nodes()
    else:
        s_biascorrect, f_biascorrect = inflated_size_nodes()

    if structural_scan_types.any():
        get_s_scan = pe.Node(
            name='get_s_scan',
            interface=util.Function(
                function=get_scan,
                input_names=inspect.getargspec(get_scan)[0],
                output_names=['scan_path', 'scan_type', 'trial']))
        if not strict:
            get_s_scan.inputs.ignore_exception = True
        get_s_scan.inputs.data_selection = data_selection
        get_s_scan.inputs.measurements_base = measurements_base
        get_s_scan.iterables = ("scan_type", structural_scan_types)

        s_bru2nii = pe.Node(interface=bru2nii.Bru2(), name="s_bru2nii")
        s_bru2nii.inputs.force_conversion = True
        s_bru2nii.inputs.actual_size = actual_size

        #s_bids_filename = pe.Node(name='s_bids_filename', interface=util.Function(function=sss_filename,input_names=inspect.getargspec(sss_filename)[0], output_names=['filename']))
        s_bids_filename = pe.Node(
            name='s_bids_filename',
            interface=util.Function(
                function=bids_naming,
                input_names=inspect.getargspec(bids_naming)[0],
                output_names=['filename']))
        s_bids_filename.inputs.metadata = data_selection

        if actual_size:
            s_register, s_warp, _, _ = DSURQEc_structural_registration(
                template, registration_mask)
            #TODO: incl. in func registration
            if autorotate:
                workflow_connections.extend([
                    (s_biascorrect, s_rotated, [('output_image', 'out_file')]),
                    (s_rotated, s_register, [('out_file', 'moving_image')]),
                ])
            else:
                workflow_connections.extend([
                    (s_biascorrect, s_register, [('output_image',
                                                  'moving_image')]),
                    (s_register, s_warp, [('composite_transform', 'transforms')
                                          ]),
                    (s_bru2nii, s_warp, [('nii_file', 'input_image')]),
                    (s_warp, datasink, [('output_image', 'anat')]),
                ])
        else:
            s_reg_biascorrect = pe.Node(interface=ants.N4BiasFieldCorrection(),
                                        name="s_reg_biascorrect")
            s_reg_biascorrect.inputs.dimension = 3
            s_reg_biascorrect.inputs.bspline_fitting_distance = 95
            s_reg_biascorrect.inputs.shrink_factor = 2
            s_reg_biascorrect.inputs.n_iterations = [500, 500, 500, 500]
            s_reg_biascorrect.inputs.convergence_threshold = 1e-14

            s_cutoff = pe.Node(interface=fsl.ImageMaths(), name="s_cutoff")
            s_cutoff.inputs.op_string = "-thrP 20 -uthrp 98"

            s_BET = pe.Node(interface=fsl.BET(), name="s_BET")
            s_BET.inputs.mask = True
            s_BET.inputs.frac = 0.3
            s_BET.inputs.robust = True

            s_mask = pe.Node(interface=fsl.ApplyMask(), name="s_mask")
            s_register, s_warp, f_warp = structural_registration(template)

            workflow_connections.extend([
                (s_bru2nii, s_reg_biascorrect, [('nii_file', 'input_image')]),
                (s_reg_biascorrect, s_cutoff, [('output_image', 'in_file')]),
                (s_cutoff, s_BET, [('out_file', 'in_file')]),
                (s_biascorrect, s_mask, [('output_image', 'in_file')]),
                (s_BET, s_mask, [('mask_file', 'mask_file')]),
            ])

            #TODO: incl. in func registration
            if autorotate:
                workflow_connections.extend([
                    (s_mask, s_rotated, [('out_file', 'out_file')]),
                    (s_rotated, s_register, [('out_file', 'moving_image')]),
                ])
            else:
                workflow_connections.extend([
                    (s_mask, s_register, [('out_file', 'moving_image')]),
                    (s_register, s_warp, [('composite_transform', 'transforms')
                                          ]),
                    (s_bru2nii, s_warp, [('nii_file', 'input_image')]),
                    (s_warp, datasink, [('output_image', 'anat')]),
                ])

        if autorotate:
            s_rotated = autorotate(template)

        workflow_connections.extend([
            (infosource, get_s_scan, [('subject_session', 'selector')]),
            (infosource, s_bids_filename, [('subject_session',
                                            'subject_session')]),
            (get_s_scan, s_bru2nii, [('scan_path', 'input_dir')]),
            (get_s_scan, s_bids_filename, [('scan_type', 'scan_type')]),
            (s_bids_filename, s_warp, [('filename', 'output_image')]),
            (s_bru2nii, s_biascorrect, [('nii_file', 'input_image')]),
        ])

    if functional_registration_method == "structural":
        if not structural_scan_types:
            raise ValueError(
                'The option `registration="structural"` requires there to be a structural scan type.'
            )
        workflow_connections.extend([
            (s_register, f_warp, [('composite_transform', 'transforms')]),
        ])
        if realign == "space":
            workflow_connections.extend([
                (realigner, f_warp, [('realigned_files', 'input_image')]),
            ])
        elif realign == "spacetime":
            workflow_connections.extend([
                (realigner, f_warp, [('out_file', 'input_image')]),
            ])
        elif realign == "time":
            workflow_connections.extend([
                (realigner, f_warp, [('slice_time_corrected_file',
                                      'input_image')]),
            ])
        else:
            workflow_connections.extend([
                (dummy_scans, f_warp, [('out_file', 'input_image')]),
            ])

    if functional_registration_method == "composite":
        if not structural_scan_types.any():
            raise ValueError(
                'The option `registration="composite"` requires there to be a structural scan type.'
            )
        _, _, f_register, f_warp = DSURQEc_structural_registration(
            template, registration_mask)

        temporal_mean = pe.Node(interface=fsl.MeanImage(),
                                name="temporal_mean")

        merge = pe.Node(util.Merge(2), name='merge')

        workflow_connections.extend([
            (temporal_mean, f_biascorrect, [('out_file', 'input_image')]),
            (f_biascorrect, f_register, [('output_image', 'moving_image')]),
            (s_biascorrect, f_register, [('output_image', 'fixed_image')]),
            (f_register, merge, [('composite_transform', 'in1')]),
            (s_register, merge, [('composite_transform', 'in2')]),
            (merge, f_warp, [('out', 'transforms')]),
        ])
        if realign == "space":
            workflow_connections.extend([
                (realigner, temporal_mean, [('realigned_files', 'in_file')]),
                (realigner, f_warp, [('realigned_files', 'input_image')]),
            ])
        elif realign == "spacetime":
            workflow_connections.extend([
                (realigner, temporal_mean, [('out_file', 'in_file')]),
                (realigner, f_warp, [('out_file', 'input_image')]),
            ])
        elif realign == "time":
            workflow_connections.extend([
                (realigner, temporal_mean, [('slice_time_corrected_file',
                                             'in_file')]),
                (realigner, f_warp, [('slice_time_corrected_file',
                                      'input_image')]),
            ])
        else:
            workflow_connections.extend([
                (dummy_scans, temporal_mean, [('out_file', 'in_file')]),
                (dummy_scans, f_warp, [('out_file', 'input_image')]),
            ])

    elif functional_registration_method == "functional":
        f_register, f_warp = functional_registration(template)

        temporal_mean = pe.Node(interface=fsl.MeanImage(),
                                name="temporal_mean")

        #f_cutoff = pe.Node(interface=fsl.ImageMaths(), name="f_cutoff")
        #f_cutoff.inputs.op_string = "-thrP 30"

        #f_BET = pe.Node(interface=fsl.BET(), name="f_BET")
        #f_BET.inputs.mask = True
        #f_BET.inputs.frac = 0.5

        workflow_connections.extend([
            (temporal_mean, f_biascorrect, [('out_file', 'input_image')]),
            #(f_biascorrect, f_cutoff, [('output_image', 'in_file')]),
            #(f_cutoff, f_BET, [('out_file', 'in_file')]),
            #(f_BET, f_register, [('out_file', 'moving_image')]),
            (f_biascorrect, f_register, [('output_image', 'moving_image')]),
            (f_register, f_warp, [('composite_transform', 'transforms')]),
        ])
        if realign == "space":
            workflow_connections.extend([
                (realigner, temporal_mean, [('realigned_files', 'in_file')]),
                (realigner, f_warp, [('realigned_files', 'input_image')]),
            ])
        elif realign == "spacetime":
            workflow_connections.extend([
                (realigner, temporal_mean, [('out_file', 'in_file')]),
                (realigner, f_warp, [('out_file', 'input_image')]),
            ])
        elif realign == "time":
            workflow_connections.extend([
                (realigner, temporal_mean, [('slice_time_corrected_file',
                                             'in_file')]),
                (realigner, f_warp, [('slice_time_corrected_file',
                                      'input_image')]),
            ])
        else:
            workflow_connections.extend([
                (dummy_scans, temporal_mean, [('out_file', 'in_file')]),
                (dummy_scans, f_warp, [('out_file', 'input_image')]),
            ])

    invert = pe.Node(interface=fsl.ImageMaths(), name="invert")
    if functional_blur_xy and negative_contrast_agent:
        blur = pe.Node(interface=afni.preprocess.BlurToFWHM(), name="blur")
        blur.inputs.fwhmxy = functional_blur_xy
        workflow_connections.extend([
            (f_warp, blur, [('output_image', 'in_file')]),
            (blur, invert, [(('out_file', fslmaths_invert_values), 'op_string')
                            ]),
            (blur, invert, [('out_file', 'in_file')]),
            (invert, bandpass, [('out_file', 'in_file')]),
        ])
    elif functional_blur_xy:
        blur = pe.Node(interface=afni.preprocess.BlurToFWHM(), name="blur")
        blur.inputs.fwhmxy = functional_blur_xy
        workflow_connections.extend([
            (f_warp, blur, [('output_image', 'in_file')]),
            (blur, bandpass, [('out_file', 'in_file')]),
        ])
    elif negative_contrast_agent:
        blur = pe.Node(interface=afni.preprocess.BlurToFWHM(), name="blur")
        blur.inputs.fwhmxy = functional_blur_xy
        workflow_connections.extend([
            (f_warp, invert, [(('output_image', fslmaths_invert_values),
                               'op_string')]),
            (f_warp, invert, [('output_image', 'in_file')]),
            (invert, bandpass, [('out_file', 'in_file')]),
        ])
    else:
        workflow_connections.extend([
            (f_warp, bandpass, [('output_image', 'in_file')]),
        ])

    workflow_config = {
        'execution': {
            'crashdump_dir':
            path.join(measurements_base, 'preprocessing/crashdump'),
        }
    }
    if debug:
        workflow_config['logging'] = {
            'workflow_level': 'DEBUG',
            'utils_level': 'DEBUG',
            'interface_level': 'DEBUG',
            'filemanip_level': 'DEBUG',
            'log_to_file': 'true',
        }

    workdir_name = workflow_name + "_work"
    workflow = pe.Workflow(name=workdir_name)
    workflow.connect(workflow_connections)
    workflow.base_dir = path.join(measurements_base, "preprocessing")
    workflow.config = workflow_config
    workflow.write_graph(dotfilename=path.join(workflow.base_dir, workdir_name,
                                               "graph.dot"),
                         graph2use="hierarchical",
                         format="png")

    workflow.run(plugin="MultiProc", plugin_args={'n_procs': n_procs})
    if not keep_work:
        shutil.rmtree(path.join(workflow.base_dir, workdir_name))
Example #2
0
def diagnose(
    measurements_base,
    functional_scan_types=[],
    structural_scan_types=[],
    sessions=[],
    subjects=[],
    measurements=[],
    exclude_subjects=[],
    exclude_measurements=[],
    actual_size=False,
    components=None,
    keep_work=False,
    loud=False,
    n_procs=N_PROCS,
    realign=False,
    tr=1,
    workflow_name="diagnostic",
):

    measurements_base = path.abspath(path.expanduser(measurements_base))

    #select all functional/sturctural scan types unless specified
    if not functional_scan_types or not structural_scan_types:
        scan_classification = pd.read_csv(scan_classification_file_path)
        if not functional_scan_types:
            functional_scan_types = list(
                scan_classification[(scan_classification["categories"] ==
                                     "functional")]["scan_type"])
        if not structural_scan_types:
            structural_scan_types = list(
                scan_classification[(scan_classification["categories"] ==
                                     "structural")]["scan_type"])

    #hack to allow structural scan type disabling:
    if structural_scan_types == ["none"]:
        structural_scan_types = []

    # define measurement directories to be processed, and populate the list either with the given include_measurements, or with an intelligent selection
    scan_types = deepcopy(functional_scan_types)
    scan_types.extend(structural_scan_types)
    data_selection = get_data_selection(
        measurements_base,
        sessions,
        scan_types=scan_types,
        subjects=subjects,
        exclude_subjects=exclude_subjects,
        measurements=measurements,
        exclude_measurements=exclude_measurements)
    if not subjects:
        subjects = set(list(data_selection["subject"]))
    if not sessions:
        sessions = set(list(data_selection["session"]))

    # here we start to define the nipype workflow elements (nodes, connectons, meta)
    subjects_sessions = data_selection[["subject", "session"
                                        ]].drop_duplicates().values.tolist()
    infosource = pe.Node(
        interface=util.IdentityInterface(fields=['subject_session']),
        name="infosource")
    infosource.iterables = [('subject_session', subjects_sessions)]

    get_f_scan = pe.Node(name='get_f_scan',
                         interface=util.Function(
                             function=get_scan,
                             input_names=inspect.getargspec(get_scan)[0],
                             output_names=['scan_path', 'scan_type']))
    get_f_scan.inputs.data_selection = data_selection
    get_f_scan.inputs.measurements_base = measurements_base
    get_f_scan.iterables = ("scan_type", functional_scan_types)

    f_bru2nii = pe.Node(interface=bru2nii.Bru2(), name="f_bru2nii")
    f_bru2nii.inputs.actual_size = actual_size

    dummy_scans = pe.Node(
        name='dummy_scans',
        interface=util.Function(
            function=force_dummy_scans,
            input_names=inspect.getargspec(force_dummy_scans)[0],
            output_names=['out_file']))
    dummy_scans.inputs.desired_dummy_scans = 10

    bids_filename = pe.Node(
        name='bids_filename',
        interface=util.Function(
            function=sss_filename,
            input_names=inspect.getargspec(sss_filename)[0],
            output_names=['filename']))
    bids_filename.inputs.suffix = "MELODIC"
    bids_filename.inputs.extension = ""

    datasink = pe.Node(nio.DataSink(), name='datasink')
    datasink.inputs.base_directory = path.join(measurements_base,
                                               workflow_name)
    datasink.inputs.parameterization = False

    melodic = pe.Node(interface=fsl.model.MELODIC(), name="melodic")
    melodic.inputs.tr_sec = tr
    melodic.inputs.report = True
    if components:
        melodic.inputs.dim = int(components)

    workflow_connections = [
        (infosource, get_f_scan, [('subject_session', 'selector')]),
        (get_f_scan, f_bru2nii, [('scan_path', 'input_dir')]),
        (f_bru2nii, dummy_scans, [('nii_file', 'in_file')]),
        (get_f_scan, dummy_scans, [('scan_path', 'scan_dir')]),
        (infosource, datasink, [(('subject_session', ss_to_path), 'container')
                                ]),
        (infosource, bids_filename, [('subject_session', 'subject_session')]),
        (get_f_scan, bids_filename, [('scan_type', 'scan')]),
        (bids_filename, melodic, [('filename', 'out_dir')]),
        (melodic, datasink, [('out_dir', 'func')]),
    ]

    #ADDING SELECTABLE NODES AND EXTENDING WORKFLOW AS APPROPRIATE:
    if structural_scan_types:
        get_s_scan = pe.Node(name='get_s_scan',
                             interface=util.Function(
                                 function=get_scan,
                                 input_names=inspect.getargspec(get_scan)[0],
                                 output_names=['scan_path', 'scan_type']))
        get_s_scan.inputs.data_selection = data_selection
        get_s_scan.inputs.measurements_base = measurements_base
        get_s_scan.iterables = ("scan_type", structural_scan_types)

        s_bru2nii = pe.Node(interface=bru2nii.Bru2(), name="s_bru2nii")
        s_bru2nii.inputs.force_conversion = True
        s_bru2nii.inputs.actual_size = actual_size

        s_bids_filename = pe.Node(
            name='s_bids_filename',
            interface=util.Function(
                function=sss_filename,
                input_names=inspect.getargspec(sss_filename)[0],
                output_names=['filename']))
        s_bids_filename.inputs.extension = ""
        s_bids_filename.inputs.scan_prefix = False

        workflow_connections.extend([
            (infosource, get_s_scan, [('subject_session', 'selector')]),
            (infosource, s_bids_filename, [('subject_session',
                                            'subject_session')]),
            (get_s_scan, s_bru2nii, [('scan_path', 'input_dir')]),
            (get_s_scan, s_bids_filename, [('scan_type', 'scan')]),
            (s_bids_filename, s_bru2nii, [('filename', 'output_filename')]),
            (s_bru2nii, datasink, [('nii_file', 'anat')]),
        ])

    #TODO: case: "space" - melodic ICAs don't break, case: "spacetime" - melodic ICAs break
    if realign == "space":
        realigner = pe.Node(interface=spm.Realign(), name="realigner")
        realigner.inputs.register_to_mean = True
        workflow_connections.extend([
            (dummy_scans, realigner, [('out_file', 'in_file')]),
            (realigner, melodic, [('out_file', 'in_files')]),
        ])

    elif realign == "spacetime":
        realigner = pe.Node(interface=nipy.SpaceTimeRealigner(),
                            name="realigner")
        realigner.inputs.slice_times = "asc_alt_2"
        realigner.inputs.tr = tr
        realigner.inputs.slice_info = 3  #3 for coronal slices (2 for horizontal, 1 for sagittal)
        workflow_connections.extend([
            (dummy_scans, realigner, [('out_file', 'in_file')]),
            (realigner, melodic, [('out_file', 'in_files')]),
        ])

    else:
        workflow_connections.extend([
            (dummy_scans, melodic, [('out_file', 'in_files')]),
        ])

    workdir_name = workflow_name + "_work"
    workflow = pe.Workflow(name=workdir_name)
    workflow.connect(workflow_connections)
    workflow.base_dir = path.join(measurements_base)
    workflow.write_graph(dotfilename=path.join(workflow.base_dir, workdir_name,
                                               "graph.dot"),
                         graph2use="hierarchical",
                         format="png")
    if not loud:
        try:
            workflow.run(plugin="MultiProc", plugin_args={'n_procs': n_procs})
        except RuntimeError:
            print(
                "WARNING: Some expected scans have not been found (or another TypeError has occured)."
            )
        for f in listdir(getcwd()):
            if re.search("crash.*?get_s_scan|get_f_scan.*?pklz", f):
                remove(path.join(getcwd(), f))
    else:
        workflow.run(plugin="MultiProc", plugin_args={'n_procs': n_procs})
    if not keep_work:
        shutil.rmtree(path.join(workflow.base_dir, workdir_name))
Example #3
0
def bru2bids(
    measurements_base,
    measurements=[],
    actual_size=True,
    debug=False,
    exclude={},
    functional_match={},
    keep_crashdump=False,
    keep_work=False,
    n_procs=N_PROCS,
    structural_match={},
):
    """
	Convert and reorganize Bruker "raw" directories (2dseq and ParaVision-formatted metadata files) into a BIDS-organized file hierarchy containing NIfTI files and associated metadata.
	If any exist, this workflow also reposits COSplay event files (already written according to BIDS) in the correct place in the output hierarchy.

	Parameters
	----------

	measurements_base : str
		Path of the top level directory containing all the Bruker scan directories to be converted and reformatted.
	actual_size : bool, optional
		Whether to conserve the voxel size reported by the scanner when converting the data to NIfTI.
		Setting this to `False` multiplies the voxel edge lengths by 10 (i.e. the volume by 1000); this is occasionally done in hackish small animal pipelines, which use routines designed exclusively for human data.
		Unless you are looking to reproduce such a workflow, this should be set to `True`.
	debug : bool, optional
		Whether to enable debug support.
		This prints the data selection before passing it to the nipype workflow management system, and turns on debug support in nipype (leading to more verbose logging).
	exclude : dict, optional
		A dictionary with any combination of "session", "subject", "trial" , and "acquisition" as keys and corresponding identifiers as values.
		Only scans not matching any of the listed criteria will be included in the workfolow - i.e. this is a blacklist (for functional and structural scans).
	functional_match : dict, optional
		A dictionary with any combination of "session", "subject", "trial", and "acquisition" as keys and corresponding lists of identifiers as values.
		Functional scans matching all identifiers will be included - i.e. this is a whitelist.
	keep_work : bool, optional
		Whether to keep the work directory (containing all the intermediary workflow steps, as managed by nipypye).
		This is useful for debugging and quality control.
	keep_crashdump : bool, optional
		Whether to keep the crashdump directory (containing all the crash reports for intermediary workflow steps, as managed by nipypye).
		This is useful for debugging and quality control.
	n_procs : int, optional
		Maximum number of processes which to simultaneously spawn for the workflow.
		If not explicitly defined, this is automatically calculated from the number of available cores and under the assumption that the workflow will be the main process running for the duration that it is running.
	structural_match : dict, optional
		A dictionary with any combination of "session", "subject", "trial", and "acquisition" as keys and corresponding lists of identifiers as values.
		Functional scans matching all identifiers will be included - i.e. this is a whitelist.
	"""

    measurements_base = path.abspath(path.expanduser(measurements_base))

    # define measurement directories to be processed, and populate the list either with the given include_measurements, or with an intelligent selection
    data_selection = pd.DataFrame([])
    if structural_match:
        s_data_selection = get_data_selection(
            measurements_base,
            match=structural_match,
            exclude=exclude,
            measurements=measurements,
        )
        structural_scan_types = s_data_selection['scan_type'].unique()
        data_selection = pd.concat([data_selection, s_data_selection])
    if functional_match:
        f_data_selection = get_data_selection(
            measurements_base,
            match=functional_match,
            exclude=exclude,
            measurements=measurements,
        )
        functional_scan_types = f_data_selection['scan_type'].unique()
        data_selection = pd.concat([data_selection, f_data_selection])

    # we start to define nipype workflow elements (nodes, connections, meta)
    subjects_sessions = data_selection[["subject", "session"
                                        ]].drop_duplicates().values.tolist()
    if debug:
        print('Data selection:')
        print(data_selection)
        print('Iterating over:')
        print(subjects_sessions)
    infosource = pe.Node(interface=util.IdentityInterface(
        fields=['subject_session'], mandatory_inputs=False),
                         name="infosource")
    infosource.iterables = [('subject_session', subjects_sessions)]

    get_f_scan = pe.Node(name='get_f_scan',
                         interface=util.Function(
                             function=get_scan,
                             input_names=inspect.getargspec(get_scan)[0],
                             output_names=['scan_path', 'scan_type', 'trial']))
    get_f_scan.inputs.ignore_exception = True
    get_f_scan.inputs.data_selection = data_selection
    get_f_scan.inputs.measurements_base = measurements_base
    get_f_scan.iterables = ("scan_type", functional_scan_types)

    f_bru2nii = pe.Node(interface=bru2nii.Bru2(), name="f_bru2nii")
    f_bru2nii.inputs.actual_size = actual_size

    f_filename = pe.Node(name='f_filename',
                         interface=util.Function(
                             function=bids_naming,
                             input_names=inspect.getargspec(bids_naming)[0],
                             output_names=['filename']))
    f_filename.inputs.metadata = data_selection
    f_filename.inputs.extension = ''

    f_metadata_filename = pe.Node(
        name='f_metadata_filename',
        interface=util.Function(function=bids_naming,
                                input_names=inspect.getargspec(bids_naming)[0],
                                output_names=['filename']))
    f_metadata_filename.inputs.extension = ".json"
    f_metadata_filename.inputs.metadata = data_selection

    events_filename = pe.Node(
        name='bids_stim_filename',
        interface=util.Function(function=bids_naming,
                                input_names=inspect.getargspec(bids_naming)[0],
                                output_names=['filename']))
    events_filename.inputs.suffix = "events"
    events_filename.inputs.extension = ".tsv"
    events_filename.inputs.metadata = data_selection
    events_filename.ignore_exception = True

    f_metadata_file = pe.Node(
        name='metadata_file',
        interface=util.Function(
            function=write_bids_metadata_file,
            input_names=inspect.getargspec(write_bids_metadata_file)[0],
            output_names=['out_file']))
    f_metadata_file.inputs.extraction_dicts = BIDS_METADATA_EXTRACTION_DICTS

    events_file = pe.Node(
        name='events_file',
        interface=util.Function(
            function=write_events_file,
            input_names=inspect.getargspec(write_events_file)[0],
            output_names=['out_file']))
    events_file.ignore_exception = True

    datasink = pe.Node(nio.DataSink(), name='datasink')
    datasink.inputs.base_directory = path.join(measurements_base, "bids")
    datasink.inputs.parameterization = False

    workflow_connections = [
        (infosource, datasink, [(('subject_session', ss_to_path), 'container')
                                ]),
        (infosource, get_f_scan, [('subject_session', 'selector')]),
        (infosource, f_metadata_filename, [('subject_session',
                                            'subject_session')]),
        (infosource, f_filename, [('subject_session', 'subject_session')]),
        (infosource, events_filename, [('subject_session', 'subject_session')
                                       ]),
        (get_f_scan, f_metadata_filename, [('scan_type', 'scan_type')]),
        (get_f_scan, f_filename, [('scan_type', 'scan_type')]),
        (get_f_scan, f_bru2nii, [('scan_path', 'input_dir')]),
        (get_f_scan, f_metadata_file, [('scan_path', 'scan_dir')]),
        (f_metadata_filename, f_metadata_file, [('filename', 'out_file')]),
        (f_filename, f_bru2nii, [('filename', 'output_filename')]),
        (events_filename, events_file, [('filename', 'out_file')]),
        (f_metadata_file, events_file, [('out_file', 'metadata_file')]),
        (f_bru2nii, events_file, [('nii_file', 'timecourse_file')]),
        (get_f_scan, events_filename, [('scan_type', 'scan_type')]),
        (f_bru2nii, datasink, [('nii_file', 'func')]),
        (get_f_scan, events_file, [('trial', 'trial'),
                                   ('scan_path', 'scan_dir')]),
        (events_file, datasink, [('out_file', 'func.@events')]),
        (f_metadata_file, datasink, [('out_file', 'func.@metadata')]),
    ]

    crashdump_dir = path.join(measurements_base, 'bids_crashdump')
    workflow_config = {'execution': {'crashdump_dir': crashdump_dir}}
    if debug:
        workflow_config['logging'] = {
            'workflow_level': 'DEBUG',
            'utils_level': 'DEBUG',
            'interface_level': 'DEBUG',
            'filemanip_level': 'DEBUG',
            'log_to_file': 'true',
        }

    workdir_name = 'bids_work'
    workflow = pe.Workflow(name=workdir_name)
    workflow.connect(workflow_connections)
    workflow.base_dir = path.join(measurements_base)
    workflow.config = workflow_config
    workflow.write_graph(dotfilename=path.join(workflow.base_dir, workdir_name,
                                               "graph.dot"),
                         graph2use="hierarchical",
                         format="png")

    if not keep_work or not keep_crashdump:
        try:
            workflow.run(plugin="MultiProc", plugin_args={'n_procs': n_procs})
        except RuntimeError:
            pass
    else:
        workflow.run(plugin="MultiProc", plugin_args={'n_procs': n_procs})
    if not keep_work:
        shutil.rmtree(path.join(workflow.base_dir, workdir_name))
    if not keep_crashdump:
        try:
            shutil.rmtree(crashdump_dir)
        except FileNotFoundError:
            pass

    try:
        if structural_scan_types.any():
            get_s_scan = pe.Node(
                name='get_s_scan',
                interface=util.Function(
                    function=get_scan,
                    input_names=inspect.getargspec(get_scan)[0],
                    output_names=['scan_path', 'scan_type', 'trial']))
            get_s_scan.inputs.ignore_exception = True
            get_s_scan.inputs.data_selection = data_selection
            get_s_scan.inputs.measurements_base = measurements_base
            get_s_scan.iterables = ("scan_type", structural_scan_types)

            s_bru2nii = pe.Node(interface=bru2nii.Bru2(), name="s_bru2nii")
            s_bru2nii.inputs.force_conversion = True
            s_bru2nii.inputs.actual_size = actual_size

            s_filename = pe.Node(
                name='s_filename',
                interface=util.Function(
                    function=bids_naming,
                    input_names=inspect.getargspec(bids_naming)[0],
                    output_names=['filename']))
            s_filename.inputs.metadata = data_selection
            s_filename.inputs.extension = ''

            s_metadata_filename = pe.Node(
                name='s_metadata_filename',
                interface=util.Function(
                    function=bids_naming,
                    input_names=inspect.getargspec(bids_naming)[0],
                    output_names=['filename']))
            s_metadata_filename.inputs.extension = ".json"
            s_metadata_filename.inputs.metadata = data_selection

            s_metadata_file = pe.Node(name='s_metadata_file',
                                      interface=util.Function(
                                          function=write_bids_metadata_file,
                                          input_names=inspect.getargspec(
                                              write_bids_metadata_file)[0],
                                          output_names=['out_file']))
            s_metadata_file.inputs.extraction_dicts = BIDS_METADATA_EXTRACTION_DICTS

            workflow_connections = [
                (infosource, datasink, [(('subject_session', ss_to_path),
                                         'container')]),
                (infosource, get_s_scan, [('subject_session', 'selector')]),
                (infosource, s_filename, [('subject_session',
                                           'subject_session')]),
                (infosource, s_metadata_filename, [('subject_session',
                                                    'subject_session')]),
                (get_s_scan, s_bru2nii, [('scan_path', 'input_dir')]),
                (get_s_scan, s_filename, [('scan_type', 'scan_type')]),
                (get_s_scan, s_metadata_filename, [('scan_type', 'scan_type')
                                                   ]),
                (get_s_scan, s_metadata_file, [('scan_path', 'scan_dir')]),
                (s_filename, s_bru2nii, [('filename', 'output_filename')]),
                (s_metadata_filename, s_metadata_file, [('filename',
                                                         'out_file')]),
                (s_bru2nii, datasink, [('nii_file', 'anat')]),
                (s_metadata_file, datasink, [('out_file', 'anat.@metadata')]),
            ]
            crashdump_dir = path.join(measurements_base, 'bids_crashdump')
            workflow_config = {'execution': {'crashdump_dir': crashdump_dir}}
            if debug:
                workflow_config['logging'] = {
                    'workflow_level': 'DEBUG',
                    'utils_level': 'DEBUG',
                    'interface_level': 'DEBUG',
                    'filemanip_level': 'DEBUG',
                    'log_to_file': 'true',
                }

            workdir_name = 'bids_work'
            workflow = pe.Workflow(name=workdir_name)
            workflow.connect(workflow_connections)
            workflow.base_dir = path.join(measurements_base)
            workflow.config = workflow_config
            workflow.write_graph(dotfilename=path.join(workflow.base_dir,
                                                       workdir_name,
                                                       "graph.dot"),
                                 graph2use="hierarchical",
                                 format="png")

            if not keep_work or not keep_crashdump:
                try:
                    workflow.run(plugin="MultiProc",
                                 plugin_args={'n_procs': n_procs})
                except RuntimeError:
                    pass
            else:
                workflow.run(plugin="MultiProc",
                             plugin_args={'n_procs': n_procs})
            if not keep_work:
                shutil.rmtree(path.join(workflow.base_dir, workdir_name))
            if not keep_crashdump:
                try:
                    shutil.rmtree(crashdump_dir)
                except FileNotFoundError:
                    pass
    except UnboundLocalError:
        pass
Example #4
0
def bruker(measurements_base,
	functional_scan_types=[],
	structural_scan_types=[],
	sessions=[],
	subjects=[],
	measurements=[],
	exclude_subjects=[],
	exclude_measurements=[],
	actual_size=False,
	functional_blur_xy=False,
	functional_registration_method="structural",
	highpass_sigma=225,
	lowpass_sigma=None,
	negative_contrast_agent=False,
	n_procs=N_PROCS,
	realign=True,
	registration_mask=False,
	template="/home/chymera/ni_data/templates/ds_QBI_chr.nii.gz",
	tr=1,
	very_nasty_bruker_delay_hack=False,
	workflow_name="generic",
	keep_work=False,
	autorotate=False,
	strict=False,
	):

	measurements_base = os.path.abspath(os.path.expanduser(measurements_base))

	#select all functional/sturctural scan types unless specified
	if not functional_scan_types or not structural_scan_types:
		scan_classification = pd.read_csv(scan_classification_file_path)
		if not functional_scan_types:
			functional_scan_types = list(scan_classification[(scan_classification["categories"] == "functional")]["scan_type"])
		if not structural_scan_types:
			structural_scan_types = list(scan_classification[(scan_classification["categories"] == "structural")]["scan_type"])

	#hack to allow structural scan type disabling:
	if structural_scan_types == -1:
		structural_scan_types = []

	# define measurement directories to be processed, and populate the list either with the given include_measurements, or with an intelligent selection
	scan_types = deepcopy(functional_scan_types)
	scan_types.extend(structural_scan_types)
	data_selection=get_data_selection(measurements_base, sessions, scan_types=scan_types, subjects=subjects, exclude_subjects=exclude_subjects, measurements=measurements, exclude_measurements=exclude_measurements)
	if not subjects:
		subjects = set(list(data_selection["subject"]))
	if not sessions:
		sessions = set(list(data_selection["session"]))

	if structural_registration:
		structural_scan_types = [structural_scan_types[0]]

	# here we start to define the nipype workflow elements (nodes, connectons, meta)
	subjects_sessions = data_selection[["subject","session"]].drop_duplicates().values.tolist()
	infosource = pe.Node(interface=util.IdentityInterface(fields=['subject_session']), name="infosource")
	infosource.iterables = [('subject_session', subjects_sessions)]

	get_f_scan = pe.Node(name='get_f_scan', interface=util.Function(function=get_scan,input_names=inspect.getargspec(get_scan)[0], output_names=['scan_path','scan_type']))
	if not strict:
		get_f_scan.inputs.ignore_exception = True
	get_f_scan.inputs.data_selection = data_selection
	get_f_scan.inputs.measurements_base = measurements_base
	get_f_scan.iterables = ("scan_type", functional_scan_types)

	f_bru2nii = pe.Node(interface=bru2nii.Bru2(), name="f_bru2nii")
	f_bru2nii.inputs.actual_size=actual_size

	dummy_scans = pe.Node(name='dummy_scans', interface=util.Function(function=force_dummy_scans,input_names=inspect.getargspec(force_dummy_scans)[0], output_names=['out_file']))
	dummy_scans.inputs.desired_dummy_scans = DUMMY_SCANS

	events_file = pe.Node(name='events_file', interface=util.Function(function=write_events_file,input_names=inspect.getargspec(write_events_file)[0], output_names=['out_file']))
	events_file.inputs.dummy_scans_ms = DUMMY_SCANS * tr * 1000
	events_file.inputs.stim_protocol_dictionary = STIM_PROTOCOL_DICTIONARY
	events_file.inputs.very_nasty_bruker_delay_hack = very_nasty_bruker_delay_hack

	if realign:
		realigner = pe.Node(interface=nipy.SpaceTimeRealigner(), name="realigner")
		realigner.inputs.slice_times = "asc_alt_2"
		realigner.inputs.tr = tr
		realigner.inputs.slice_info = 3 #3 for coronal slices (2 for horizontal, 1 for sagittal)

	bandpass = pe.Node(interface=fsl.maths.TemporalFilter(), name="bandpass")
	bandpass.inputs.highpass_sigma = highpass_sigma
	if lowpass_sigma:
		bandpass.inputs.lowpass_sigma = lowpass_sigma
	else:
		bandpass.inputs.lowpass_sigma = tr

	bids_filename = pe.Node(name='bids_filename', interface=util.Function(function=sss_filename,input_names=inspect.getargspec(sss_filename)[0], output_names=['filename']))

	bids_stim_filename = pe.Node(name='bids_stim_filename', interface=util.Function(function=sss_filename,input_names=inspect.getargspec(sss_filename)[0], output_names=['filename']))
	bids_stim_filename.inputs.suffix = "events"
	bids_stim_filename.inputs.extension = ".tsv"

	datasink = pe.Node(nio.DataSink(), name='datasink')
	datasink.inputs.base_directory = path.join(measurements_base,"preprocessing",workflow_name)
	datasink.inputs.parameterization = False

	workflow_connections = [
		(infosource, get_f_scan, [('subject_session', 'selector')]),
		(infosource, bids_stim_filename, [('subject_session', 'subject_session')]),
		(get_f_scan, bids_stim_filename, [('scan_type', 'scan')]),
		(get_f_scan, f_bru2nii, [('scan_path', 'input_dir')]),
		(f_bru2nii, dummy_scans, [('nii_file', 'in_file')]),
		(get_f_scan, dummy_scans, [('scan_path', 'scan_dir')]),
		(get_f_scan, events_file, [
			('scan_type', 'scan_type'),
			('scan_path', 'scan_dir')
			]),
		(events_file, datasink, [('out_file', 'func.@events')]),
		(bids_stim_filename, events_file, [('filename', 'out_file')]),
		(infosource, datasink, [(('subject_session',ss_to_path), 'container')]),
		(infosource, bids_filename, [('subject_session', 'subject_session')]),
		(get_f_scan, bids_filename, [('scan_type', 'scan')]),
		(bids_filename, bandpass, [('filename', 'out_file')]),
		(bandpass, datasink, [('out_file', 'func')]),
		]

	if realign:
		workflow_connections.extend([
			(dummy_scans, realigner, [('out_file', 'in_file')]),
			])

	#ADDING SELECTABLE NODES AND EXTENDING WORKFLOW AS APPROPRIATE:
	if structural_scan_types:
		get_s_scan = pe.Node(name='get_s_scan', interface=util.Function(function=get_scan,input_names=inspect.getargspec(get_scan)[0], output_names=['scan_path','scan_type']))
		if not strict:
			get_s_scan.inputs.ignore_exception = True
		get_s_scan.inputs.data_selection = data_selection
		get_s_scan.inputs.measurements_base = measurements_base
		get_s_scan.iterables = ("scan_type", structural_scan_types)

		s_bru2nii = pe.Node(interface=bru2nii.Bru2(), name="s_bru2nii")
		s_bru2nii.inputs.force_conversion=True
		s_bru2nii.inputs.actual_size=actual_size

		if "DSURQEc" in template:
			s_biascorrect = pe.Node(interface=ants.N4BiasFieldCorrection(), name="s_biascorrect")
			s_biascorrect.inputs.dimension = 3
			s_biascorrect.inputs.bspline_fitting_distance = 10
			s_biascorrect.inputs.bspline_order = 4
			s_biascorrect.inputs.shrink_factor = 2
			s_biascorrect.inputs.n_iterations = [150,100,50,30]
			s_biascorrect.inputs.convergence_threshold = 1e-16
			s_register, s_warp, _, _ = DSURQEc_structural_registration(template, registration_mask)
			#TODO: incl. in func registration
			if autorotate:
				workflow_connections.extend([
					(s_biascorrect, s_rotated, [('output_image', 'out_file')]),
					(s_rotated, s_register, [('out_file', 'moving_image')]),
					])
			else:
				workflow_connections.extend([
					(s_biascorrect, s_register, [('output_image', 'moving_image')]),
					(s_register, s_warp, [('composite_transform', 'transforms')]),
					(s_bru2nii, s_warp, [('nii_file', 'input_image')]),
					(s_warp, datasink, [('output_image', 'anat')]),
					])
		else:
			s_biascorrect = pe.Node(interface=ants.N4BiasFieldCorrection(), name="s_biascorrect")
			s_biascorrect.inputs.dimension = 3
			s_biascorrect.inputs.bspline_fitting_distance = 100
			s_biascorrect.inputs.shrink_factor = 2
			s_biascorrect.inputs.n_iterations = [200,200,200,200]
			s_biascorrect.inputs.convergence_threshold = 1e-11

			s_reg_biascorrect = pe.Node(interface=ants.N4BiasFieldCorrection(), name="s_reg_biascorrect")
			s_reg_biascorrect.inputs.dimension = 3
			s_reg_biascorrect.inputs.bspline_fitting_distance = 95
			s_reg_biascorrect.inputs.shrink_factor = 2
			s_reg_biascorrect.inputs.n_iterations = [500,500,500,500]
			s_reg_biascorrect.inputs.convergence_threshold = 1e-14

			s_cutoff = pe.Node(interface=fsl.ImageMaths(), name="s_cutoff")
			s_cutoff.inputs.op_string = "-thrP 20 -uthrp 98"

			s_BET = pe.Node(interface=fsl.BET(), name="s_BET")
			s_BET.inputs.mask = True
			s_BET.inputs.frac = 0.3
			s_BET.inputs.robust = True

			s_mask = pe.Node(interface=fsl.ApplyMask(), name="s_mask")
			s_register, s_warp, _, _ = structural_registration(template)

			workflow_connections.extend([
				(s_bru2nii, s_reg_biascorrect, [('nii_file', 'input_image')]),
				(s_reg_biascorrect, s_cutoff, [('output_image', 'in_file')]),
				(s_cutoff, s_BET, [('out_file', 'in_file')]),
				(s_biascorrect, s_mask, [('output_image', 'in_file')]),
				(s_BET, s_mask, [('mask_file', 'mask_file')]),
				])

			#TODO: incl. in func registration
			if autorotate:
				workflow_connections.extend([
					(s_mask, s_rotated, [('out_file', 'out_file')]),
					(s_rotated, s_register, [('out_file', 'moving_image')]),
					])
			else:
				workflow_connections.extend([
					(s_mask, s_register, [('out_file', 'moving_image')]),
					(s_register, s_warp, [('composite_transform', 'transforms')]),
					(s_bru2nii, s_warp, [('nii_file', 'input_image')]),
					(s_warp, datasink, [('output_image', 'anat')]),
					])


		if(autorotate):
			s_rotated = autorotate(template)

		s_bids_filename = pe.Node(name='s_bids_filename', interface=util.Function(function=sss_filename,input_names=inspect.getargspec(sss_filename)[0], output_names=['filename']))
		s_bids_filename.inputs.scan_prefix = False

		workflow_connections.extend([
			(infosource, get_s_scan, [('subject_session', 'selector')]),
			(infosource, s_bids_filename, [('subject_session', 'subject_session')]),
			(get_s_scan, s_bru2nii, [('scan_path','input_dir')]),
			(get_s_scan, s_bids_filename, [('scan_type', 'scan')]),
			(s_bids_filename, s_warp, [('filename','output_image')]),
			(s_bru2nii, s_biascorrect, [('nii_file', 'input_image')]),
			])



	if functional_registration_method == "structural":
		if not structural_scan_types:
			raise ValueError('The option `registration="structural"` requires there to be a structural scan type.')
		workflow_connections.extend([
			(s_register, f_warp, [('composite_transform', 'transforms')]),
			])
		if realign:
			workflow_connections.extend([
				(realigner, f_warp, [('out_file', 'input_image')]),
				])
		else:
			workflow_connections.extend([
				(dummy_scans, f_warp, [('out_file', 'input_image')]),
				])


	if functional_registration_method == "composite":
		if not structural_scan_types:
			raise ValueError('The option `registration="composite"` requires there to be a structural scan type.')
		_, _, f_register, f_warp = DSURQEc_structural_registration(template, registration_mask)

		temporal_mean = pe.Node(interface=fsl.MeanImage(), name="temporal_mean")

		f_biascorrect = pe.Node(interface=ants.N4BiasFieldCorrection(), name="f_biascorrect")
		f_biascorrect.inputs.dimension = 3
		f_biascorrect.inputs.bspline_fitting_distance = 100
		f_biascorrect.inputs.shrink_factor = 2
		f_biascorrect.inputs.n_iterations = [200,200,200,200]
		f_biascorrect.inputs.convergence_threshold = 1e-11

		merge = pe.Node(util.Merge(2), name='merge')

		workflow_connections.extend([
			(temporal_mean, f_biascorrect, [('out_file', 'input_image')]),
			(f_biascorrect, f_register, [('output_image', 'moving_image')]),
			(s_biascorrect, f_register, [('output_image', 'fixed_image')]),
			(f_register, merge, [('composite_transform', 'in1')]),
			(s_register, merge, [('composite_transform', 'in2')]),
			(merge, f_warp, [('out', 'transforms')]),
			])
		if realign:
			workflow_connections.extend([
				(realigner, temporal_mean, [('out_file', 'in_file')]),
				(realigner, f_warp, [('out_file', 'input_image')]),
				])
		else:
			workflow_connections.extend([
				(dummy_scans, temporal_mean, [('out_file', 'input_image')]),
				(dummy_scans, f_warp, [('out_file', 'input_image')]),
				])

	elif functional_registration_method == "functional":
		f_register, f_warp = functional_registration(template)

		temporal_mean = pe.Node(interface=fsl.MeanImage(), name="temporal_mean")

		f_biascorrect = pe.Node(interface=ants.N4BiasFieldCorrection(), name="f_biascorrect")
		f_biascorrect.inputs.dimension = 3
		f_biascorrect.inputs.bspline_fitting_distance = 100
		f_biascorrect.inputs.shrink_factor = 2
		f_biascorrect.inputs.n_iterations = [200,200,200,200]
		f_biascorrect.inputs.convergence_threshold = 1e-11

		f_cutoff = pe.Node(interface=fsl.ImageMaths(), name="f_cutoff")
		f_cutoff.inputs.op_string = "-thrP 30"

		f_BET = pe.Node(interface=fsl.BET(), name="f_BET")
		f_BET.inputs.mask = True
		f_BET.inputs.frac = 0.5

		workflow_connections.extend([
			(temporal_mean, f_biascorrect, [('out_file', 'input_image')]),
			(f_biascorrect, f_cutoff, [('output_image', 'in_file')]),
			(f_cutoff, f_BET, [('out_file', 'in_file')]),
			(f_BET, register, [('out_file', 'moving_image')]),
			(register, f_warp, [('composite_transform', 'transforms')]),
			])
		if realign:
			workflow_connections.extend([
				(realigner, temporal_mean, [('out_file', 'in_file')]),
				(realigner, f_warp, [('out_file', 'input_image')]),
				])
		else:
			workflow_connections.extend([
				(dummy_scans, temporal_mean, [('out_file', 'input_image')]),
				(dummy_scans, f_warp, [('out_file', 'input_image')]),
				])


	invert = pe.Node(interface=fsl.ImageMaths(), name="invert")
	if functional_blur_xy and negative_contrast_agent:
		blur = pe.Node(interface=afni.preprocess.BlurToFWHM(), name="blur")
		blur.inputs.fwhmxy = functional_blur_xy
		workflow_connections.extend([
			(f_warp, blur, [('output_image', 'in_file')]),
			(blur, invert, [(('out_file', fslmaths_invert_values), 'op_string')]),
			(blur, invert, [('out_file', 'in_file')]),
			(invert, bandpass, [('out_file', 'in_file')]),
			])
	elif functional_blur_xy:
		blur = pe.Node(interface=afni.preprocess.BlurToFWHM(), name="blur")
		blur.inputs.fwhmxy = functional_blur_xy
		workflow_connections.extend([
			(f_warp, blur, [('output_image', 'in_file')]),
			(blur, bandpass, [('out_file', 'in_file')]),
			])
	elif negative_contrast_agent:
		blur = pe.Node(interface=afni.preprocess.BlurToFWHM(), name="blur")
		blur.inputs.fwhmxy = functional_blur_xy
		workflow_connections.extend([
			(f_warp, invert, [(('output_image', fslmaths_invert_values), 'op_string')]),
			(f_warp, invert, [('output_image', 'in_file')]),
			(invert, bandpass, [('out_file', 'in_file')]),
			])
	else:
		workflow_connections.extend([
			(f_warp, bandpass, [('output_image', 'in_file')]),
			])

	workdir_name = workflow_name+"_work"
	workflow = pe.Workflow(name=workdir_name)
	workflow.connect(workflow_connections)
	workflow.base_dir = path.join(measurements_base,"preprocessing")
	workflow.config = {"execution": {"crashdump_dir": path.join(measurements_base,"preprocessing/crashdump")}}
	workflow.write_graph(dotfilename=path.join(workflow.base_dir,workdir_name,"graph.dot"), graph2use="hierarchical", format="png")

	workflow.run(plugin="MultiProc",  plugin_args={'n_procs' : n_procs})
	if not keep_work:
		shutil.rmtree(path.join(workflow.base_dir,workdir_name))