def run_spm_preprocessing(funcfile,
                          outdir,
                          repetition_time,
                          ref_slice,
                          slice_order,
                          template,
                          timings_corr_algo,
                          normalization,
                          spm_bin,
                          fsl_config,
                          enable_display=False):
    """
    """
    print "Study_config init..."
    study_config = StudyConfig(
        modules=["MatlabConfig", "SPMConfig", "FSLConfig", "NipypeConfig"],
        use_smart_caching=False,
        fsl_config=fsl_config,
        use_fsl=True,
        use_matlab=False,
        use_spm=True,
        spm_exec=spm_bin,
        spm_standalone=True,
        use_nipype=True,
        output_directory=outdir,
    )
    print "    ... done."

    # Processing definition: create the <clinfmri.preproc.FmriPreproc> that
    # define the different step of the processings.
    pipeline = get_process_instance(
        "clinfmri.preproc.converted_fmri_preproc.xml")

    # It is possible to display the pipeline.
    if enable_display:
        import sys
        from PySide import QtGui
        from capsul.qt_gui.widgets import PipelineDevelopperView

        app = QtGui.QApplication(sys.argv)
        view = PipelineDevelopperView(pipeline)
        view.show()
        app.exec_()

    # Now to parametrize the pipeline pipeline.
    pipeline.fmri_file = funcfile
    pipeline.realign_register_to_mean = True
    pipeline.select_slicer = timings_corr_algo
    pipeline.select_normalization = normalization
    pipeline.force_repetition_time = repetition_time
    pipeline.force_slice_orders = slice_order
    pipeline.realign_wrap = [0, 1, 0]
    pipeline.realign_write_wrap = [0, 1, 0]
    pipeline.ref_slice = ref_slice
    if template is not None:
        pipeline.template_file = template

    # The pipeline is now ready to be executed.
    study_config.run(pipeline, executer_qc_nodes=False, verbose=1)
Пример #2
0
class TestQCNodes(unittest.TestCase):
    """ Test pipeline node types.
    """

    def setUp(self):
        """ Initialize the TestQCNodes class
        """
        self.pipeline = MyPipeline()
        self.output_directory = tempfile.mkdtemp()
        self.study_config = StudyConfig(output_directory=self.output_directory)

    def __del__(self):
        """ Remove temporary items.
        """
        shutil.rmtree(self.output_directory)

    def test_qc_active(self):
        """ Method to test if the run qc option works properly.
        """
        # Execute all the pipeline nodes
        self.study_config.run(self.pipeline, executer_qc_nodes=True)

        # Get the list of all the nodes that havec been executed
        execution_list = self.pipeline.workflow_ordered_nodes()

        # Go through all the executed nodes
        for process_node in execution_list:

            # Get the process instance that has been executed
            process_instance = process_node.process

            # Check that the node has been executed
            self.assertEqual(process_instance.log_file, "in")

    def test_qc_inactive(self):
        """ Method to test if the run qc option works properly.
        """
        # Execute all the pipeline nodes
        self.study_config.run(self.pipeline, executer_qc_nodes=False)

        # Get the list of all the nodes
        execution_list = self.pipeline.workflow_ordered_nodes()

        # Go through all the nodes
        for process_node in execution_list:

            # Get the process instance that may have been executed
            process_instance = process_node.process

            # Check that view nodes are not executed
            if process_node.node_type == "view_node":
                self.assertEqual(process_instance.log_file, None)
            else:
                self.assertEqual(process_instance.log_file, "in")
Пример #3
0
 def setUp(self):
     default_config = SortedDictionary(
         ("use_soma_workflow", True)
     )
     # use a custom temporary soma-workflow dir to avoid concurrent
     # access problems
     tmpdb = tempfile.mkstemp('', prefix='soma_workflow')
     os.close(tmpdb[0])
     os.unlink(tmpdb[1])
     self.soma_workflow_temp_dir = tmpdb[1]
     os.mkdir(self.soma_workflow_temp_dir)
     swf_conf = StringIO.StringIO('[%s]\nSOMA_WORKFLOW_DIR = %s\n' \
         % (socket.gethostname(), tmpdb[1]))
     swconfig.Configuration.search_config_path \
         = staticmethod(lambda : swf_conf)
     self.study_config = StudyConfig(init_config=default_config)
     self.atomic_pipeline = MyAtomicPipeline()
     self.composite_pipeline = MyCompositePipeline()
Пример #4
0
def pilot(working_dir="/volatile/nsap/caps", **kwargs):
    """
    ===============================
    Diffusion Brain Extraction Tool
    ===============================
    .. topic:: Objective

        We propose to extract the brain mask from a diffusion sequence.

    Import
    ------

    First we load the function that enables us to access the toy datasets
    """
    from caps.toy_datasets import get_sample_data

    """
    From capsul we then load the class to configure the study we want to
    perform
    """
    from capsul.study_config import StudyConfig

    """
    Here two utility tools are loaded. The first one enables the creation
    of ordered dictionary and the second ensure that a directory exist.
    Note that the directory will be created if necessary.
    """
    from capsul.utils.sorted_dictionary import SortedDictionary
    from nsap.lib.base import ensure_is_dir

    """
    Load the toy dataset
    --------------------

    We want to perform BET on a diffusion sequence.
    To do so, we use the *get_sample_data* function to load this
    dataset.

    .. seealso::

        For a complete description of the *get_sample_data* function, see the
        :ref:`Toy Datasets documentation <toy_datasets_guide>`
    """
    toy_dataset = get_sample_data("dwi")

    """
    The *toy_dataset* is an Enum structure with some specific
    elements of interest *dwi*, *bvals* that contain the nifti diffusion
    image and the b-values respectively.
    """
    print(toy_dataset.dwi, toy_dataset.bvals)

    """
    Will return:

    .. code-block:: python

        /home/ag239446/git/nsap-src/nsap/data/DTI30s010.nii
        /home/ag239446/git/nsap-src/nsap/data/DTI30s010.bval

    We can see that the image has been found in a local directory

    Processing definition
    ---------------------

    Now we need to define the processing step that will perform BET on
    diffusion sequence.
    """
    bet_pipeline = dBET()

    """
    It is possible to access the ipeline input specification.
    """
    print(bet_pipeline.get_input_spec())

    """
    Will return the input parameters the user can set:

    .. code-block:: python

        INPUT SPECIFICATIONS

        dw_image: ['File']
        bvals: ['File']
        specified_index_of_ref_image: ['Int']
        terminal_output: ['Enum']
        generate_binary_mask: ['Bool']
        use_4d_input: ['Bool']
        generate_mesh: ['Bool']
        generate_skull: ['Bool']
        bet_threshold: ['Float']

    We can now tune the pipeline parameters.
    We first set the input dwi file:
    """
    bet_pipeline.dw_image = toy_dataset.dwi

    """
    And set the b-values file
    """
    bet_pipeline.bvals = toy_dataset.bvals

    """
    Study Configuration
    -------------------

    The pipeline is now set up and ready to be executed.
    For a complete description of a study execution, see the
    :ref:`Study Configuration description <study_configuration_guide>`
    """
    bet_working_dir = os.path.join(working_dir, "diffusion_bet")
    ensure_is_dir(bet_working_dir)
    default_config = SortedDictionary(
        ("output_directory", bet_working_dir),
        ("fsl_config", "/etc/fsl/4.1/fsl.sh"),
        ("use_fsl", True),
        ("use_smart_caching", True),
        ("generate_logging", True)
    )
    study = StudyConfig(default_config)
    study.run(bet_pipeline)

    """
    Results
    -------

    Finally, we print the pipeline outputs
    """
    print("\nOUTPUTS\n")
    for trait_name, trait_value in bet_pipeline.get_outputs().iteritems():
        print("{0}: {1}".format(trait_name, trait_value))

    """
# Create the subject output directory
soutdir = os.path.join(args.outdir, args.sid, "EPI_stop_signal")
capsulwd = os.path.join(soutdir, "capsul")
if args.erase and os.path.isdir(soutdir):
    shutil.rmtree(soutdir)
if not os.path.isdir(capsulwd):
    os.makedirs(capsulwd)

# Create the study configuration
print "Study_config init..."
study_config = StudyConfig(
    modules=["MatlabConfig", "SPMConfig", "FSLConfig", "NipypeConfig"],
    use_smart_caching=False,
    use_fsl=True,
    fsl_config=args.fslconfig,
    use_matlab=True,
    matlab_exec=args.matlabexec,
    use_spm=True,
    spm_directory=args.spmdir,
    use_nipype=True,
    output_directory=capsulwd)
print "    ... done."

# Get the pipeline
pipeline = get_process_instance(
    "clinfmri.statistics.spm_first_level_pipeline.xml")

# unzip nifti file (to be destroyed after)
fmri_session_unizp = os.path.join(
    capsulwd,
    os.path.basename(args.inputvolume).replace(".gz", ""))
Пример #6
0
    def __init__(self, pipeline_menu, ui_file, default_study_config=None):
        """ Method to initialize the Capsul main window class.

        Parameters
        ----------
        pipeline_menu: hierachic dict
            each key is a sub module of the module. Leafs contain a list with
            the url to the documentation.
        ui_file: str (mandatory)
            a filename containing the user interface description
        default_study_config: ordered dict (madatory)
            some parameters for the study configuration
        """
        # Inheritance: load user interface window
        MyQUiLoader.__init__(self, ui_file)

        # Class parameters
        self.pipeline_menu = pipeline_menu
        self.pipelines = {}
        self.pipeline = None
        self.path_to_pipeline_doc = {}

        # Define dynamic controls
        self.controls = {
            QtGui.QAction: ["actionHelp", "actionQuit", "actionBrowse",
                            "actionLoad", "actionChangeView",
                            "actionParameters", "actionRun",
                            "actionStudyConfig", "actionQualityControl"],
            QtGui.QTabWidget: ["display", ],
            QtGui.QDockWidget: ["dockWidgetBrowse", "dockWidgetParameters",
                                "dockWidgetStudyConfig", "dockWidgetBoard"],
            QtGui.QWidget: ["dock_browse", "dock_parameters",
                            "dock_study_config", "dock_board"],
            QtGui.QTreeWidget: ["menu_treectrl", ],
            QtGui.QLineEdit: ["search", ],
        }

        # Add ui class parameter with the dynamic controls and initialize
        # default values
        self.add_controls_to_ui()
        self.ui.display.setTabsClosable(True)

        # Create the study configuration
        self.study_config = StudyConfig(default_study_config)

        # Create the controller widget associated to the study
        # configuration controller
        self.study_config_widget = ScrollControllerWidget(
            self.study_config, live=True)
        self.ui.dockWidgetStudyConfig.setWidget(self.study_config_widget)

        # Create the pipeline menu
        fill_treectrl(self.ui.menu_treectrl, self.pipeline_menu)

        # Signal for window interface
        self.ui.actionHelp.triggered.connect(self.onHelpClicked)
        self.ui.actionChangeView.triggered.connect(self.onChangeViewClicked)

        # Signal for tab widget
        self.ui.display.currentChanged.connect(self.onCurrentTabChanged)
        self.ui.display.tabCloseRequested.connect(self.onCloseTabClicked)

        # Signal for dock widget
        self.ui.actionBrowse.triggered.connect(self.onBrowseClicked)
        self.ui.actionParameters.triggered.connect(self.onParametersClicked)
        self.ui.actionStudyConfig.triggered.connect(self.onStudyConfigClicked)
        self.ui.actionQualityControl.triggered.connect(self.onQualityControlClicked)

        # Initialize properly the visibility of each dock widget
        self.onBrowseClicked()
        self.onParametersClicked()
        self.onStudyConfigClicked()
        self.onQualityControlClicked()

        # Signal for the pipeline creation
        self.ui.search.textChanged.connect(self.onSearchClicked)
        self.ui.menu_treectrl.currentItemChanged.connect(
            self.onTreeSelectionChanged)
        self.ui.actionLoad.triggered.connect(self.onLoadClicked)

        # Signal for the execution
        self.ui.actionRun.triggered.connect(self.onRunClicked)
Пример #7
0
class CapsulMainWindow(MyQUiLoader):
    """ Capsul main window.
    """
    def __init__(self, pipeline_menu, ui_file, default_study_config=None):
        """ Method to initialize the Capsul main window class.

        Parameters
        ----------
        pipeline_menu: hierachic dict
            each key is a sub module of the module. Leafs contain a list with
            the url to the documentation.
        ui_file: str (mandatory)
            a filename containing the user interface description
        default_study_config: ordered dict (madatory)
            some parameters for the study configuration
        """
        # Inheritance: load user interface window
        MyQUiLoader.__init__(self, ui_file)

        # Class parameters
        self.pipeline_menu = pipeline_menu
        self.pipelines = {}
        self.pipeline = None
        self.path_to_pipeline_doc = {}

        # Define dynamic controls
        self.controls = {
            QtGui.QAction: ["actionHelp", "actionQuit", "actionBrowse",
                            "actionLoad", "actionChangeView",
                            "actionParameters", "actionRun",
                            "actionStudyConfig", "actionQualityControl"],
            QtGui.QTabWidget: ["display", ],
            QtGui.QDockWidget: ["dockWidgetBrowse", "dockWidgetParameters",
                                "dockWidgetStudyConfig", "dockWidgetBoard"],
            QtGui.QWidget: ["dock_browse", "dock_parameters",
                            "dock_study_config", "dock_board"],
            QtGui.QTreeWidget: ["menu_treectrl", ],
            QtGui.QLineEdit: ["search", ],
        }

        # Add ui class parameter with the dynamic controls and initialize
        # default values
        self.add_controls_to_ui()
        self.ui.display.setTabsClosable(True)

        # Create the study configuration
        self.study_config = StudyConfig(default_study_config)

        # Create the controller widget associated to the study
        # configuration controller
        self.study_config_widget = ScrollControllerWidget(
            self.study_config, live=True)
        self.ui.dockWidgetStudyConfig.setWidget(self.study_config_widget)

        # Create the pipeline menu
        fill_treectrl(self.ui.menu_treectrl, self.pipeline_menu)

        # Signal for window interface
        self.ui.actionHelp.triggered.connect(self.onHelpClicked)
        self.ui.actionChangeView.triggered.connect(self.onChangeViewClicked)

        # Signal for tab widget
        self.ui.display.currentChanged.connect(self.onCurrentTabChanged)
        self.ui.display.tabCloseRequested.connect(self.onCloseTabClicked)

        # Signal for dock widget
        self.ui.actionBrowse.triggered.connect(self.onBrowseClicked)
        self.ui.actionParameters.triggered.connect(self.onParametersClicked)
        self.ui.actionStudyConfig.triggered.connect(self.onStudyConfigClicked)
        self.ui.actionQualityControl.triggered.connect(self.onQualityControlClicked)

        # Initialize properly the visibility of each dock widget
        self.onBrowseClicked()
        self.onParametersClicked()
        self.onStudyConfigClicked()
        self.onQualityControlClicked()

        # Signal for the pipeline creation
        self.ui.search.textChanged.connect(self.onSearchClicked)
        self.ui.menu_treectrl.currentItemChanged.connect(
            self.onTreeSelectionChanged)
        self.ui.actionLoad.triggered.connect(self.onLoadClicked)

        # Signal for the execution
        self.ui.actionRun.triggered.connect(self.onRunClicked)

        # Set default values

        # Set some tooltips

    def show(self):
        """ Shows the widget and its child widgets.
        """
        self.ui.show()

    def add_controls_to_ui(self):
        """ Method to find dynamic controls
        """
        # Error message template
        error_message = "{0} has no attribute '{1}'"

        # Got through the class dynamic controls
        for control_type, control_item in self.controls.iteritems():

            # Get the dynamic control name
            for control_name in control_item:

                # Try to set the control value to the ui class parameter
                try:
                    value = self.ui.findChild(control_type, control_name)
                    if value is None:
                        logger.error(error_message.format(
                            type(self.ui), control_name))
                    setattr(self.ui, control_name, value)
                except:
                    logger.error(error_message.format(
                        type(self.ui), control_name))

    ###########################################################################
    # Slots   
    ###########################################################################

    def onRunClicked(self):
        """ Event to execute the process/pipeline.
        """
        self.study_config.run(self.pipeline, executer_qc_nodes=True, verbose=1)

    def onBrowseClicked(self):
        """ Event to show / hide the browse dock widget.
        """
        # Show browse dock widget
        if self.ui.actionBrowse.isChecked():
            self.ui.dockWidgetBrowse.show()

        # Hide browse dock widget
        else:
            self.ui.dockWidgetBrowse.hide()

    def onParametersClicked(self):
        """ Event to show / hide the parameters dock widget.
        """
        # Show parameters dock widget
        if self.ui.actionParameters.isChecked():
            self.ui.dockWidgetParameters.show()

        # Hide parameters dock widget
        else:
            self.ui.dockWidgetParameters.hide()

    def onStudyConfigClicked(self):
        """ Event to show / hide the study config dock widget.
        """
        # Show study configuration dock widget
        if self.ui.actionStudyConfig.isChecked():
            self.ui.dockWidgetStudyConfig.show()

        # Hide study configuration dock widget
        else:
            self.ui.dockWidgetStudyConfig.hide()

    def onQualityControlClicked(self):
        """ Event to show / hide the board dock widget.
        """
        # Create and show board dock widget
        if self.ui.actionQualityControl.isChecked():

            # Create the board widget associated to the pipeline controller
            # Create on the fly in order to get the last status
            # ToDo: add callbacks
            if self.pipeline is not None:
                # board_widget = BoardWidget(
                #     self.pipeline, parent=self.ui.dockWidgetParameters,
                #     name="board")
                board_widget = ScrollControllerWidget(
                    self.pipeline, name="outputs", live=True,
                    hide_labels=False, select_controls="outputs",
                    disable_controller_widget=True)
                #board_widget.setEnabled(False)
                self.ui.dockWidgetBoard.setWidget(board_widget)

            # Show the board widget
            self.ui.dockWidgetBoard.show()

        # Hide board dock widget
        else:
            self.ui.dockWidgetBoard.hide()

    def onSearchClicked(self):
        """ Event to refresh the menu tree control that contains the pipeline
        modules.
        """
        # Clear the current tree control
        self.ui.menu_treectrl.clear()

        # Build the new filtered tree control
        fill_treectrl(self.ui.menu_treectrl, self.pipeline_menu,
                      self.ui.search.text().lower())

    def onTreeSelectionChanged(self):
        """ Event to refresh the pipeline load button status.
        """
        # Get the cuurent item
        item = self.ui.menu_treectrl.currentItem()
        if item is None:
            return

        # Check if we have selected a pipeline in the tree and enable / disable
        # the load button
        url = item.text(2)
        if url == "None":
            self.ui.actionLoad.setEnabled(False)
        else:
            self.ui.actionLoad.setEnabled(True)

    def onRunStatus(self):
        """ Event to refresh the run button status.

        When all the controller widget controls are correctly filled, enable
        the user to execute the pipeline.
        """
        # Get the controller widget
        controller_widget = self.ui.dockWidgetParameters.widget().controller_widget

        # Get the controller widget status
        is_valid = controller_widget.is_valid()

        # Depending on the controller widget status enable / disable
        # the run button
        self.ui.actionRun.setEnabled(is_valid)

    def onLoadClicked(self):
        """ Event to load and display a pipeline.
        """
        # Get the pipeline instance from its string description
        item = self.ui.menu_treectrl.currentItem()
        description_list = [str(x) for x in [item.text(1), item.text(0)]
                            if x != ""]
        process_description = ".".join(description_list)
        self.pipeline = get_process_instance(process_description)

        # Create the controller widget associated to the pipeline
        # controller
        pipeline_widget = ScrollControllerWidget(
            self.pipeline, live=True, select_controls="inputs")
        self.ui.dockWidgetParameters.setWidget(pipeline_widget)

        # Add observer to refresh the run button
        controller_widget = pipeline_widget.controller_widget
        for control_name, control in controller_widget._controls.iteritems():

            # Unpack the control item
            trait, control_class, control_instance, control_label = control

            # Add the new callback
            control_class.add_callback(self.onRunStatus, control_instance)

        # Refresh manually the run button status the first time
        self.onRunStatus()

        # Store the pipeline documentation root path
        self.path_to_pipeline_doc[self.pipeline.id] = item.text(2)

        # Store the pipeline instance
        self.pipelines[self.pipeline.name] = (
            self.pipeline, pipeline_widget)

        # Create the widget
        widget = PipelineDevelopperView(self.pipeline)
        self._insert_widget_in_tab(widget)

        # Connect the subpipeline clicked signal to the
        # onLoadSubPipelineClicked slot
        widget.subpipeline_clicked.connect(self.onLoadSubPipelineClicked)

    def onLoadSubPipelineClicked(self, name, sub_pipeline, modifiers):
        """ Event to load and display a sub pipeline.
        """
        # Store the pipeline instance in class parameters
        self.pipeline = self.pipeline.nodes[name].process

        # Create the controller widget associated to the sub pipeline
        # controller: if the sub pipeline is an IterativePipeline, disable
        # the correspondind controller widget since this pipeline is generated
        # on the fly an is not directly synchronized with the rest of the
        # pipeline.
        is_iterative_pipeline = False
        if isinstance(self.pipeline, IterativePipeline):
            is_iterative_pipeline = True
        pipeline_widget = ScrollControllerWidget(
            self.pipeline, live=True, select_controls="inputs",
            disable_controller_widget=is_iterative_pipeline)
        self.ui.dockWidgetParameters.setWidget(pipeline_widget)

        # Store the sub pipeline instance
        self.pipelines[self.pipeline.name] = (
            self.pipeline, pipeline_widget)

        # Create the widget
        widget = PipelineDevelopperView(self.pipeline)
        self._insert_widget_in_tab(widget)

        # Connect the subpipeline clicked signal to the
        # onLoadSubPipelineClicked slot
        widget.subpipeline_clicked.connect(self.onLoadSubPipelineClicked)

    def onCloseTabClicked(self, index):
        """ Event to close a pipeline view.
        """
        # Remove the pipeline from the intern pipeline list
        pipeline, pipeline_widget = self.pipelines[
            self.ui.display.tabText(index)]
        pipeline_widget.close()
        pipeline_widget.deleteLater()
        del self.pipelines[self.ui.display.tabText(index)]

        # Remove the table that contains the pipeline
        self.ui.display.removeTab(index)

    def onCurrentTabChanged(self, index):
        """ Event to refresh the controller controller widget when a new
        tab is selected
        """
        # If no valid tab index has been passed
        if index < 0:
            self.ui.actionRun.setEnabled(False)

        # A new valid tab is selected
        else:
            # Get the selected pipeline widget
            self.pipeline, pipeline_widget = self.pipelines[
                self.ui.display.tabText(index)]

            # Set the controller widget associated to the pipeline
            # controller
            self.ui.dockWidgetParameters.setWidget(pipeline_widget)

            # Refresh manually the run button status the first time
            self.onRunStatus()

    def onHelpClicked(self):
        """ Event to display the documentation of the active pipeline.
        """
        # Create a dialog box to display the html documentation
        win = QtGui.QDialog()
        win.setWindowTitle("Pipeline Help")

        # Build the pipeline documentation location
        # Possible since common tools generate the sphinx documentation
        if self.pipeline:

            # Generate the url to the active pipeline documentation
            path_to_active_pipeline_doc = os.path.join(
                self.path_to_pipeline_doc[self.pipeline.id], "generated",
                self.pipeline.id.split(".")[1], "pipeline",
                self.pipeline.id + ".html")

            # Create and fill a QWebView
            help = QtWebKit.QWebView()
            help.load(QtCore.QUrl(path_to_active_pipeline_doc))
            help.show()

            # Create and set a layout with the web view
            layout = QtGui.QHBoxLayout()
            layout.addWidget(help)
            win.setLayout(layout)

            # Display the window
            win.exec_()

        # No Pipeline loaded, cant't show the documentation message
        # Display a message box
        else:
            QtGui.QMessageBox.information(
                self.ui, "Information", "First load a pipeline!")

    def onChangeViewClicked(self):
        """ Event to switch between simple and full pipeline views.
        """
        # Check if a pipeline has been loaded
        if self._is_active_pipeline_valid():

            # Check the current display mode
            # Case PipelineDevelopperView
            if isinstance(self.ui.display.currentWidget(),
                          PipelineDevelopperView):

                # Switch to PipelineUserView display mode
                widget = PipelineUserView(self.pipeline)
                self._insert_widget_in_tab(widget)

            # Case PipelineUserView
            else:

                # Switch to PipelineDevelopperView display mode
                widget = PipelineDevelopperView(self.pipeline)
                self._insert_widget_in_tab(widget)

        # No pipeline loaded error
        else:
            logger.error("No active pipeline selected. "
                          "Have you forgotten to click the load pipeline "
                          "button?")

    #####################
    # Private interface #
    #####################

    def _insert_widget_in_tab(self, widget):
        """ Insert a new widget or replace an existing widget.

        Parameters
        ----------
        widget: a widget (mandatory)
            the widget we want to draw
        """
        # Search if the tab corresponding to the widget has already been created
        already_created = False
        index = 0

        # Go through all the tabs
        for index in range(self.ui.display.count()):

            # Check if we have a match: the tab name is equal to the current
            #pipeline name
            if (self.ui.display.tabText(index) == self.pipeline.name):
                already_created = True
                break

        # If no match found, add a new tab with the widget
        if not already_created:
            self.ui.display.addTab(
                widget, unicode(self.pipeline.name))
            self.ui.display.setCurrentIndex(
                self.ui.display.count() - 1)

        # Otherwise, replace the widget from the match tab
        else:
            # Delete the tab
            self.ui.display.removeTab(index)

            # Insert the new tab
            self.ui.display.insertTab(
                index, widget, unicode(self.pipeline.name))

            # Set the corresponding index
            self.ui.display.setCurrentIndex(index)



    def _is_active_pipeline_valid(self):
        """ Method to ceack that the active pipeline is valid

        Returns
        -------
        is_valid: bool
            True if the active pipeline is valid
        """
        return self.pipeline is not None
Пример #8
0
 def setUp(self):
     """ Initialize the TestQCNodes class
     """
     self.pipeline = MyPipeline()
     self.output_directory = tempfile.mkdtemp()
     self.study_config = StudyConfig(output_directory=self.output_directory)
Пример #9
0
def pilot_bet(enable_display=False):
    """
    Brain extractio Tool
    ====================
    """
    import os
    from mmutils.toy_datasets import get_sample_data
    from capsul.study_config import StudyConfig
    from capsul.api import get_process_instance

    working_dir = "/volatile/nsap/catalogue/pclinfmri/fmri_bet"
    if not os.path.isdir(working_dir):
        os.makedirs(working_dir)

    """
    Then define the study configuration:
    """
    study_config = StudyConfig(
        modules=["MatlabConfig", "SPMConfig", "FSLConfig", "NipypeConfig"],
        use_smart_caching=False,
        fsl_config="/etc/fsl/4.1/fsl.sh",
        use_fsl=True,
        matlab_exec="/neurospin/local/bin/matlab",
        use_matlab=False,
        spm_directory="/i2bm/local/spm8",
        use_spm=False,
        output_directory=working_dir)

    """
    Load the toy dataset
    --------------------

    To do so, we use the get_sample_data function to download the toy
    dataset on the local file system (here localizer data):
    """
    template_dataset = get_sample_data("mni_1mm")

    """
    Processing definition
    ---------------------
    """
    pipeline = get_process_instance("clinfmri.utils.converted_fsl_bet")
    print pipeline.get_input_spec()

    """
    Now we need now to parametrize this pipeline:
    """
    pipeline.input_image_file = template_dataset.brain
    pipeline.generate_binary_mask = True

    """
    It is possible to display the pipeline.
    """
    if enable_display:
        import sys
        from PySide import QtGui
        from capsul.qt_gui.widgets import PipelineDevelopperView

        app = QtGui.QApplication(sys.argv)
        view = PipelineDevelopperView(pipeline)
        view.show()
        app.exec_()

    """
    The pipeline is now ready to be run:
    """
    study_config.run(pipeline, executer_qc_nodes=False, verbose=1)

    """
    Results
    -------

    Finally, we print the pipeline outputs:
    """
    print("\nOUTPUTS\n")
    for trait_name, trait_value in pipeline.get_outputs().items():
        print("{0}: {1}".format(trait_name, trait_value))
Пример #10
0
def pilot_new_segment(enable_display=False):
    """ 
    New Segment
    ===========

    Unifed SPM segmentation: segments, bias corrects and spatially normalises. 

    Start to import required modules:
    """
    import os
    from mmutils.toy_datasets import get_sample_data
    from capsul.study_config import StudyConfig
    from capsul.process import get_process_instance

    """
    Study configuration
    -------------------

    We first define the working directory and guarantee this folder exists on
    the file system:
    """
    working_dir = "/volatile/nsap/catalogue/pclinfmri/spm_newsegment"
    if not os.path.isdir(working_dir):
        os.makedirs(working_dir)

    """
    And then define the study configuration:
    """
    study_config = StudyConfig(
        modules=["MatlabConfig", "SPMConfig", "FSLConfig", "NipypeConfig"],
        use_smart_caching=False,
        matlab_exec="/neurospin/local/bin/matlab",
        use_matlab=True,
        spm_directory="/i2bm/local/spm8",
        use_spm=True,
        output_directory=working_dir,
        number_of_cpus=1,
        generate_logging=True,
        use_scheduler=True)

    """
    Load the toy dataset
    --------------------

    To do so, we use the get_sample_data function to download the toy
    dataset on the local file system (here localizer data):
    """
    toy_dataset = get_sample_data("localizer")
    template_dataset = get_sample_data("mni_1mm")

    """
    The toy_dataset is an Enum structure with some specific elements of
    interest:

        * fmri: the functional volume.
        * anat: the structural volume.
        * TR: the repetition time.

    Processing definition
    ---------------------

    First create the
    :ref:`slice timing pipeline <clinfmri.utils.SpmNewSegment>`
    that define the different step of the processings:
    """
    pipeline = get_process_instance("clinfmri.utils.spm_new_segment.xml")
    print pipeline.get_input_spec()

    """
    It is possible to display the pipeline.
    """
    if enable_display:
        import sys
        from PySide import QtGui
        from capsul.qt_gui.widgets import PipelineDevelopperView

        app = QtGui.QApplication(sys.argv)
        view = PipelineDevelopperView(pipeline)
        view.show()
        app.exec_()

    """
    Now we need now to parametrize this pipeline:
    """
    pipeline.channel_files = [toy_dataset.mean]
    pipeline.reference_volume = template_dataset.brain

    """
    The pipeline is now ready to be run:
    """
    study_config.run(pipeline, executer_qc_nodes=False, verbose=1)

    """
    Results
    -------

    Finally, we print the pipeline outputs:
    """
    print("\nOUTPUTS\n")
    for trait_name, trait_value in pipeline.get_outputs().items():
        print("{0}: {1}".format(trait_name, trait_value))
Пример #11
0
    os.mkdir(logdir)
elif args.erase:
    shutil.rmtree(logdir)
    os.mkdir(logdir)

"""
First create a study configuration.
"""
study_config = StudyConfig(
    modules=["MatlabConfig", "SPMConfig", "FSLConfig", "NipypeConfig"],
    use_smart_caching=False,
    fsl_config=args.fslconfig,
    use_fsl=True,
    use_matlab=False,
    use_spm=False,
    spm_exec=args.spmbin,
    spm_standalone=True,
    use_nipype=True,
    output_directory=tmp_capsul,
    number_of_cpus=1,
    generate_logging=True,
    use_scheduler=True,
)

"""
Processing definition: create the <clinfmri.preproc.FmriPreproc> that
define the different step of the processings.
"""
pipeline = get_process_instance("clinfmri.preproc.fmri_preproc.xml")

"""
Пример #12
0
# Create the subject output directory
soutdir = os.path.join(args.outdir, args.sid)
capsulwd = os.path.join(soutdir, "capsul")
if args.erase and os.path.isdir(soutdir):
    shutil.rmtree(soutdir)
if not os.path.isdir(capsulwd):
    os.makedirs(capsulwd)

   
# Create the study configuration
print "Study_config init..."
study_config = StudyConfig(
    modules=["MatlabConfig", "SPMConfig", "NipypeConfig"],
    use_smart_caching=False,
    use_matlab=False,
    use_spm=True,
    spm_exec=args.spmbin,
    spm_standalone=True,
    use_nipype=True,
    output_directory=capsulwd)
print "    ... done."

# Get the pipeline
pipeline = get_process_instance("clinfmri.utils.spm_new_segment_only.xml")

# Configure the pipeline
pipeline.channel_files = [args.t1file]
#to find the template TPM.nii from the standalone distrib
pipeline.spm_dir = args.spmdir

# Execute the pipeline
Пример #13
0
def pilot_preproc():
    """
    FMRI preprocessings
    ===================
    """
    # Pilot imports
    import os
    from caps.toy_datasets import get_sample_data
    from capsul.study_config import StudyConfig
    from capsul.process import get_process_instance

    """
    Study configuration
    -------------------

    We first define the working directory and guarantee this folder exists on
    the file system:
    """
    working_dir = "/volatile/nsap/catalogue/pclinfmri/fmri_preproc"
    if not os.path.isdir(working_dir):
        os.makedirs(working_dir)

    """
    Now we get the pipeline from its definition (xml file)
    """
    pipeline = get_process_instance(
        "clinfmri.preproc.pipeline.fmri_preproc.xml")

    """
    And then define the study configuration (here we activate the smart
    caching module that will be able to remember which process has already been
    processed):
    """
    study_config = StudyConfig(
        modules=["SmartCachingConfig", "MatlabConfig", "SPMConfig",
                 "FSLConfig",
                 "NipypeConfig"],
        use_smart_caching=True,
        fsl_config="/etc/fsl/4.1/fsl.sh",
        use_fsl=True,
        matlab_exec="/neurospin/local/bin/matlab",
        use_matlab=True,
        spm_directory="/i2bm/local/spm8",
        use_spm=True,
        output_directory=working_dir)

    """
    Load the toy dataset
    --------------------

    To do so, we use the get_sample_data function to download the toy
    dataset on the local file system (here localizer data):
    """
    toy_dataset = get_sample_data("localizer")
    template_dataset = get_sample_data("mni_1mm")

    """
    The toy_dataset is an Enum structure with some specific elements of
    interest:

        * **??**: ??.

    Processing definition
    ---------------------

    First create the
    :ref:`slice timing pipeline <pclinfmri.preproc.pipeline.SliceTiming>` that
    define the different step of the processings:
    """
    pipeline = get_process_instance("pclinfmri.preproc.fmri_preproc.xml")
    print pipeline.get_input_spec()

    """
    Now we need now to parametrize this pipeline:
    """
    pipeline.fmri_file = toy_dataset.fmri
    pipeline.structural_file = toy_dataset.anat
    pipeline.realign_register_to_mean = True
    pipeline.select_slicer = "none"
    pipeline.select_registration = "template"
    pipeline.template_file = template_dataset.brain
    pipeline.force_repetition_time = toy_dataset.TR
    pipeline.force_slice_orders = [index + 1 for index in range(40)]

    """
    The pipeline is now ready to be run:
    """
    study_config.run(pipeline, executer_qc_nodes=True, verbose=1)

    """
    Results
    -------

    Finally, we print the pipeline outputs:
    """
    print("\nOUTPUTS\n")
    for trait_name, trait_value in pipeline.get_outputs().items():
        print("{0}: {1}".format(trait_name, trait_value))
Пример #14
0
def pilot(working_dir="/volatile/nsap/caps", **kwargs):
    """
    ===============================
    Diffusion Brain Extraction Tool
    ===============================
    .. topic:: Objective

        We propose to extract the brain mask from a diffusion sequence.

    Import
    ------

    First we load the function that enables us to access the toy datasets
    """
    from caps.toy_datasets import get_sample_data
    """
    From capsul we then load the class to configure the study we want to
    perform
    """
    from capsul.study_config import StudyConfig
    """
    Here two utility tools are loaded. The first one enables the creation
    of ordered dictionary and the second ensure that a directory exist.
    Note that the directory will be created if necessary.
    """
    from capsul.utils.sorted_dictionary import SortedDictionary
    from nsap.lib.base import ensure_is_dir
    """
    Load the toy dataset
    --------------------

    We want to perform BET on a diffusion sequence.
    To do so, we use the *get_sample_data* function to load this
    dataset.

    .. seealso::

        For a complete description of the *get_sample_data* function, see the
        :ref:`Toy Datasets documentation <toy_datasets_guide>`
    """
    toy_dataset = get_sample_data("dwi")
    """
    The *toy_dataset* is an Enum structure with some specific
    elements of interest *dwi*, *bvals* that contain the nifti diffusion
    image and the b-values respectively.
    """
    print(toy_dataset.dwi, toy_dataset.bvals)
    """
    Will return:

    .. code-block:: python

        /home/ag239446/git/nsap-src/nsap/data/DTI30s010.nii
        /home/ag239446/git/nsap-src/nsap/data/DTI30s010.bval

    We can see that the image has been found in a local directory

    Processing definition
    ---------------------

    Now we need to define the processing step that will perform BET on
    diffusion sequence.
    """
    bet_pipeline = dBET()
    """
    It is possible to access the ipeline input specification.
    """
    print(bet_pipeline.get_input_spec())
    """
    Will return the input parameters the user can set:

    .. code-block:: python

        INPUT SPECIFICATIONS

        dw_image: ['File']
        bvals: ['File']
        specified_index_of_ref_image: ['Int']
        terminal_output: ['Enum']
        generate_binary_mask: ['Bool']
        use_4d_input: ['Bool']
        generate_mesh: ['Bool']
        generate_skull: ['Bool']
        bet_threshold: ['Float']

    We can now tune the pipeline parameters.
    We first set the input dwi file:
    """
    bet_pipeline.dw_image = toy_dataset.dwi
    """
    And set the b-values file
    """
    bet_pipeline.bvals = toy_dataset.bvals
    """
    Study Configuration
    -------------------

    The pipeline is now set up and ready to be executed.
    For a complete description of a study execution, see the
    :ref:`Study Configuration description <study_configuration_guide>`
    """
    bet_working_dir = os.path.join(working_dir, "diffusion_bet")
    ensure_is_dir(bet_working_dir)
    default_config = SortedDictionary(
        ("output_directory", bet_working_dir),
        ("fsl_config", "/etc/fsl/4.1/fsl.sh"), ("use_fsl", True),
        ("use_smart_caching", True), ("generate_logging", True))
    study = StudyConfig(default_config)
    study.run(bet_pipeline)
    """
    Results
    -------

    Finally, we print the pipeline outputs
    """
    print("\nOUTPUTS\n")
    for trait_name, trait_value in bet_pipeline.get_outputs().iteritems():
        print("{0}: {1}".format(trait_name, trait_value))
    """
Пример #15
0
def pilot_preproc_spm_fmri(enable_display=False):
    """
    FMRI preprocessings
    ===================

    Preprocessing with the SPM slice timing and a normalization to a given
    template.

    Start to import required modules:
    """
    import os
    from mmutils.toy_datasets import get_sample_data
    from capsul.study_config import StudyConfig
    from capsul.api import get_process_instance

    """
    Study configuration
    -------------------

    We first define the working directory and guarantee this folder exists on
    the file system:
    """
    working_dir = "/volatile/nsap/catalogue/pclinfmri/fmri_preproc_spm_fmri"
    if not os.path.isdir(working_dir):
        os.makedirs(working_dir)

    """
    Then define the study configuration:
    """
    study_config = StudyConfig(
        modules=["MatlabConfig", "SPMConfig", "FSLConfig", "NipypeConfig"],
        use_smart_caching=False,
        fsl_config="/etc/fsl/4.1/fsl.sh",
        use_fsl=True,
        matlab_exec="/neurospin/local/bin/matlab",
        use_matlab=True,
        spm_directory="/i2bm/local/spm8",
        use_spm=True,
        output_directory=working_dir,
        number_of_cpus=1,
        generate_logging=True,
        use_scheduler=True,)

    """
    Load the toy dataset
    --------------------

    To do so, we use the get_sample_data function to download the toy
    dataset on the local file system (here localizer data):
    """
    toy_dataset = get_sample_data("localizer")
    template_dataset = get_sample_data("mni_1mm")

    """
    The toy_dataset is an Enum structure with some specific elements of
    interest:

        * fmri: the functional volume.
        * anat: the structural volume.
        * TR: the repetition time.

    Processing definition
    ---------------------

    First create the
    :ref:`slice timing pipeline <clinfmri.preproc.FmriPreproc>` that
    define the different step of the processings:
    """
    pipeline = get_process_instance("clinfmri.preproc.converted_fmri_preproc")
    print pipeline.get_input_spec()

    """
    Now we need now to parametrize this pipeline:
    """
    pipeline.fmri_file = toy_dataset.fmri
    pipeline.structural_file = toy_dataset.anat
    pipeline.realign_register_to_mean = True
    pipeline.select_slicer = "spm"
    pipeline.select_normalization = "fmri"
    pipeline.template_file = template_dataset.brain
    pipeline.force_repetition_time = toy_dataset.TR
    pipeline.force_slice_orders = [index + 1 for index in range(40)]

    """
    It is possible to display the pipeline.
    """
    if enable_display:
        import sys
        from PySide import QtGui
        from capsul.qt_gui.widgets import PipelineDevelopperView

        app = QtGui.QApplication(sys.argv)
        view = PipelineDevelopperView(pipeline)
        view.show()
        app.exec_()

    """
    The pipeline is now ready to be run:
    """
    study_config.run(pipeline, executer_qc_nodes=False, verbose=1)

    """
    Results
    -------

    Finally, we print the pipeline outputs:
    """
    print("\nOUTPUTS\n")
    for trait_name, trait_value in pipeline.get_outputs().items():
        print("{0}: {1}".format(trait_name, trait_value))
Пример #16
0
class TestSomaWorkflow(unittest.TestCase):

    def setUp(self):
        default_config = SortedDictionary(
            ("use_soma_workflow", True)
        )
        # use a custom temporary soma-workflow dir to avoid concurrent
        # access problems
        tmpdb = tempfile.mkstemp('', prefix='soma_workflow')
        os.close(tmpdb[0])
        os.unlink(tmpdb[1])
        self.soma_workflow_temp_dir = tmpdb[1]
        os.mkdir(self.soma_workflow_temp_dir)
        swf_conf = StringIO.StringIO('[%s]\nSOMA_WORKFLOW_DIR = %s\n' \
            % (socket.gethostname(), tmpdb[1]))
        swconfig.Configuration.search_config_path \
            = staticmethod(lambda : swf_conf)
        self.study_config = StudyConfig(init_config=default_config)
        self.atomic_pipeline = MyAtomicPipeline()
        self.composite_pipeline = MyCompositePipeline()

    def tearDown(self):
        shutil.rmtree(self.soma_workflow_temp_dir)

    def test_atomic_dependencies(self):
        workflow = workflow_from_pipeline(self.atomic_pipeline)
        dependencies = [(x.name, y.name) for x, y in workflow.dependencies]
        self.assertTrue(len(dependencies) == 4)
        self.assertTrue(("node1", "node2") in dependencies)
        self.assertTrue(("node1", "node3") in dependencies)
        self.assertTrue(("node2", "node4") in dependencies)
        self.assertTrue(("node3", "node4") in dependencies)
        self.assertEqual(workflow.groups, [])

    def test_atomic_execution(self):
        self.atomic_pipeline.workflow_ordered_nodes()
        if sys.version_info >= (2, 7):
            self.assertIn(self.atomic_pipeline.workflow_repr,
                          ('node1->node3->node2->node4',
                           'node1->node2->node3->node4'))
        else: # python 2.6 unittest does not have assertIn()
            self.assertTrue(self.atomic_pipeline.workflow_repr in \
                ('node1->node3->node2->node4',
                'node1->node2->node3->node4'))
        self.study_config.run(self.atomic_pipeline)

    def test_composite_dependencies(self):
        workflow = workflow_from_pipeline(self.composite_pipeline)
        dependencies = [(x.name, y.name) for x, y in workflow.dependencies]
        self.assertTrue(len(dependencies) == 16)
        self.assertEqual(dependencies.count(("node1", "node2")), 1)
        self.assertEqual(dependencies.count(("node1", "node3")), 2)
        self.assertEqual(dependencies.count(("node2", "node4")), 1)
        self.assertEqual(dependencies.count(("node3", "node4")), 2)
        self.assertEqual(dependencies.count(("node1", "node2_input")), 1)
        self.assertEqual(dependencies.count(("node2_output", "node4")), 1)
        self.assertTrue(len(workflow.groups) == 1)

    def test_composite_execution(self):
        self.composite_pipeline.workflow_ordered_nodes()
        self.assertEqual(self.composite_pipeline.workflow_repr,
                         "node1->node3->node2->node4")
        self.study_config.run(self.composite_pipeline)
Пример #17
0
def pilot_bet(enable_display=False):
    """ 
    BET
    ===

    Brain extraction with FSL. 

    Start to import required modules:
    """
    import os
    from mmutils.toy_datasets import get_sample_data
    from capsul.study_config import StudyConfig
    from capsul.process import get_process_instance

    """
    Study configuration
    -------------------

    We first define the working directory and guarantee this folder exists on
    the file system:
    """
    working_dir = "/volatile/nsap/catalogue/pclinfmri/fsl_bet"
    if not os.path.isdir(working_dir):
        os.makedirs(working_dir)

    """
    And then define the study configuration:
    """
    study_config = StudyConfig(
        modules=["MatlabConfig", "SPMConfig", "FSLConfig", "NipypeConfig"],
        use_smart_caching=False,
        fsl_config="/etc/fsl/4.1/fsl.sh",
        use_fsl=True,
        output_directory=working_dir,
        number_of_cpus=1,
        generate_logging=True,
        use_scheduler=True)

    """
    Load the toy dataset
    --------------------

    To do so, we use the get_sample_data function to download the toy
    dataset on the local file system (here localizer data):
    """
    toy_dataset = get_sample_data("localizer")

    """
    The toy_dataset is an Enum structure with some specific elements of
    interest:

        * fmri: the functional volume.
        * anat: the structural volume.
        * TR: the repetition time.

    Processing definition
    ---------------------

    First create the
    :ref:`slice timing pipeline <clinfmri.preproc.FslBet>` that
    define the different step of the processings:
    """
    pipeline = get_process_instance("clinfmri.utils.fsl_bet.xml")
    print pipeline.get_input_spec()

    """
    It is possible to display the pipeline.
    """
    if enable_display:
        import sys
        from PySide import QtGui
        from capsul.qt_gui.widgets import PipelineDevelopperView

        app = QtGui.QApplication(sys.argv)
        view = PipelineDevelopperView(pipeline)
        view.show()
        app.exec_()

    """
    Now we need now to parametrize this pipeline:
    """
    pipeline.input_image_file = toy_dataset.anat
    pipeline.generate_binary_mask = True
    pipeline.bet_threshold = 0.5

    """
    The pipeline is now ready to be run:
    """
    study_config.run(pipeline, executer_qc_nodes=False, verbose=1)

    """
    Results
    -------

    Finally, we print the pipeline outputs:
    """
    print("\nOUTPUTS\n")
    for trait_name, trait_value in pipeline.get_outputs().items():
        print("{0}: {1}".format(trait_name, trait_value))
Пример #18
0
def pilot_newsegment():
    """ 
    New Segment
    ===========
    """
    # Pilot imports
    import os
    from caps.toy_datasets import get_sample_data
    from capsul.study_config import StudyConfig
    from pclinfmri.utils.pipeline import SpmNewSegment

    """
    Study configuration
    -------------------

    We first define the working directory and guarantee this folder exists on
    the file system:
    """
    working_dir = "/volatile/nsap/pclinfmri/spmnewsegment"
    if not os.path.isdir(working_dir):
        os.makedirs(working_dir)

    """
    And then define the study configuration (here we activate the smart
    caching module that will be able to remember which process has already been
    processed):
    """
    study_config = StudyConfig(
        modules=["SmartCachingConfig", "MatlabConfig", "SPMConfig",
                 "NipypeConfig"],
        use_smart_caching=True,
        matlab_exec="/neurospin/local/bin/matlab",
        use_matlab=True,
        spm_directory="/i2bm/local/spm8",
        use_spm=True,
        output_directory=working_dir)

    """
    Load the toy dataset
    --------------------

    To do so, we use the get_sample_data function to download the toy
    dataset on the local file system (here localizer data):
    """
    toy_dataset = get_sample_data("localizer")

    """
    The toy_dataset is an Enum structure with some specific elements of
    interest:

        * **??**: ??.

    Processing definition
    ---------------------

    First create the
    :ref:`slice timing pipeline <pclinfmri.preproc.pipeline.SliceTiming>` that
    define the different step of the processings:
    """
    pipeline = SpmNewSegment()
    print pipeline.get_input_spec()

    """
    Now we need now to parametrize this pipeline:
    """
    pipeline.coregistered_struct_file = toy_dataset.mean

    """
    The pipeline is now ready to be run:
    """
    study_config.run(pipeline, executer_qc_nodes=True, verbose=1)

    """
    Results
    -------

    Finally, we print the pipeline outputs:
    """
    print("\nOUTPUTS\n")
    for trait_name, trait_value in pipeline.get_outputs().items():
        print("{0}: {1}".format(trait_name, trait_value))