def run_spm_preprocessing(funcfile,
                          outdir,
                          repetition_time,
                          ref_slice,
                          slice_order,
                          template,
                          timings_corr_algo,
                          normalization,
                          spm_bin,
                          fsl_config,
                          enable_display=False):
    """
    """
    print "Study_config init..."
    study_config = StudyConfig(
        modules=["MatlabConfig", "SPMConfig", "FSLConfig", "NipypeConfig"],
        use_smart_caching=False,
        fsl_config=fsl_config,
        use_fsl=True,
        use_matlab=False,
        use_spm=True,
        spm_exec=spm_bin,
        spm_standalone=True,
        use_nipype=True,
        output_directory=outdir,
    )
    print "    ... done."

    # Processing definition: create the <clinfmri.preproc.FmriPreproc> that
    # define the different step of the processings.
    pipeline = get_process_instance(
        "clinfmri.preproc.converted_fmri_preproc.xml")

    # It is possible to display the pipeline.
    if enable_display:
        import sys
        from PySide import QtGui
        from capsul.qt_gui.widgets import PipelineDevelopperView

        app = QtGui.QApplication(sys.argv)
        view = PipelineDevelopperView(pipeline)
        view.show()
        app.exec_()

    # Now to parametrize the pipeline pipeline.
    pipeline.fmri_file = funcfile
    pipeline.realign_register_to_mean = True
    pipeline.select_slicer = timings_corr_algo
    pipeline.select_normalization = normalization
    pipeline.force_repetition_time = repetition_time
    pipeline.force_slice_orders = slice_order
    pipeline.realign_wrap = [0, 1, 0]
    pipeline.realign_write_wrap = [0, 1, 0]
    pipeline.ref_slice = ref_slice
    if template is not None:
        pipeline.template_file = template

    # The pipeline is now ready to be executed.
    study_config.run(pipeline, executer_qc_nodes=False, verbose=1)
# Create the subject output directory
soutdir = os.path.join(args.outdir, args.sid, "EPI_stop_signal")
capsulwd = os.path.join(soutdir, "capsul")
if args.erase and os.path.isdir(soutdir):
    shutil.rmtree(soutdir)
if not os.path.isdir(capsulwd):
    os.makedirs(capsulwd)

# Create the study configuration
print "Study_config init..."
study_config = StudyConfig(
    modules=["MatlabConfig", "SPMConfig", "FSLConfig", "NipypeConfig"],
    use_smart_caching=False,
    use_fsl=True,
    fsl_config=args.fslconfig,
    use_matlab=True,
    matlab_exec=args.matlabexec,
    use_spm=True,
    spm_directory=args.spmdir,
    use_nipype=True,
    output_directory=capsulwd)
print "    ... done."

# Get the pipeline
pipeline = get_process_instance(
    "clinfmri.statistics.spm_first_level_pipeline.xml")

# unzip nifti file (to be destroyed after)
fmri_session_unizp = os.path.join(
    capsulwd,
    os.path.basename(args.inputvolume).replace(".gz", ""))
Exemplo n.º 3
0
# Create the subject output directory
soutdir = os.path.join(args.outdir, args.sid)
capsulwd = os.path.join(soutdir, "capsul")
if args.erase and os.path.isdir(soutdir):
    shutil.rmtree(soutdir)
if not os.path.isdir(capsulwd):
    os.makedirs(capsulwd)

   
# Create the study configuration
print "Study_config init..."
study_config = StudyConfig(
    modules=["MatlabConfig", "SPMConfig", "NipypeConfig"],
    use_smart_caching=False,
    use_matlab=False,
    use_spm=True,
    spm_exec=args.spmbin,
    spm_standalone=True,
    use_nipype=True,
    output_directory=capsulwd)
print "    ... done."

# Get the pipeline
pipeline = get_process_instance("clinfmri.utils.spm_new_segment_only.xml")

# Configure the pipeline
pipeline.channel_files = [args.t1file]
#to find the template TPM.nii from the standalone distrib
pipeline.spm_dir = args.spmdir

# Execute the pipeline
Exemplo n.º 4
0
def pilot(working_dir="/volatile/nsap/caps", **kwargs):
    """
    ===============================
    Diffusion Brain Extraction Tool
    ===============================
    .. topic:: Objective

        We propose to extract the brain mask from a diffusion sequence.

    Import
    ------

    First we load the function that enables us to access the toy datasets
    """
    from caps.toy_datasets import get_sample_data
    """
    From capsul we then load the class to configure the study we want to
    perform
    """
    from capsul.study_config import StudyConfig
    """
    Here two utility tools are loaded. The first one enables the creation
    of ordered dictionary and the second ensure that a directory exist.
    Note that the directory will be created if necessary.
    """
    from capsul.utils.sorted_dictionary import SortedDictionary
    from nsap.lib.base import ensure_is_dir
    """
    Load the toy dataset
    --------------------

    We want to perform BET on a diffusion sequence.
    To do so, we use the *get_sample_data* function to load this
    dataset.

    .. seealso::

        For a complete description of the *get_sample_data* function, see the
        :ref:`Toy Datasets documentation <toy_datasets_guide>`
    """
    toy_dataset = get_sample_data("dwi")
    """
    The *toy_dataset* is an Enum structure with some specific
    elements of interest *dwi*, *bvals* that contain the nifti diffusion
    image and the b-values respectively.
    """
    print(toy_dataset.dwi, toy_dataset.bvals)
    """
    Will return:

    .. code-block:: python

        /home/ag239446/git/nsap-src/nsap/data/DTI30s010.nii
        /home/ag239446/git/nsap-src/nsap/data/DTI30s010.bval

    We can see that the image has been found in a local directory

    Processing definition
    ---------------------

    Now we need to define the processing step that will perform BET on
    diffusion sequence.
    """
    bet_pipeline = dBET()
    """
    It is possible to access the ipeline input specification.
    """
    print(bet_pipeline.get_input_spec())
    """
    Will return the input parameters the user can set:

    .. code-block:: python

        INPUT SPECIFICATIONS

        dw_image: ['File']
        bvals: ['File']
        specified_index_of_ref_image: ['Int']
        terminal_output: ['Enum']
        generate_binary_mask: ['Bool']
        use_4d_input: ['Bool']
        generate_mesh: ['Bool']
        generate_skull: ['Bool']
        bet_threshold: ['Float']

    We can now tune the pipeline parameters.
    We first set the input dwi file:
    """
    bet_pipeline.dw_image = toy_dataset.dwi
    """
    And set the b-values file
    """
    bet_pipeline.bvals = toy_dataset.bvals
    """
    Study Configuration
    -------------------

    The pipeline is now set up and ready to be executed.
    For a complete description of a study execution, see the
    :ref:`Study Configuration description <study_configuration_guide>`
    """
    bet_working_dir = os.path.join(working_dir, "diffusion_bet")
    ensure_is_dir(bet_working_dir)
    default_config = SortedDictionary(
        ("output_directory", bet_working_dir),
        ("fsl_config", "/etc/fsl/4.1/fsl.sh"), ("use_fsl", True),
        ("use_smart_caching", True), ("generate_logging", True))
    study = StudyConfig(default_config)
    study.run(bet_pipeline)
    """
    Results
    -------

    Finally, we print the pipeline outputs
    """
    print("\nOUTPUTS\n")
    for trait_name, trait_value in bet_pipeline.get_outputs().iteritems():
        print("{0}: {1}".format(trait_name, trait_value))
    """