def test_simple_run(self): """ Method to test a simple 1 cpu call with the scheduler. """ # Configure the environment study_config = StudyConfig( modules=[], use_smart_caching=True, output_directory=self.outdir, number_of_cpus=1, generate_logging=True, use_scheduler=True) # Create pipeline pipeline = get_process_instance(self.pipeline_name) pipeline.date_in_filename = True # Set pipeline input parameters dicom_dataset = get_sample_data("dicom") dcmfolder = os.path.join(self.outdir, "dicom") if not os.path.isdir(dcmfolder): os.makedirs(dcmfolder) shutil.copy(dicom_dataset.barre, os.path.join(dcmfolder, "heart.dcm")) pipeline.source_dir = dcmfolder # View pipeline if 0: from capsul.qt_gui.widgets import PipelineDevelopperView from PySide import QtGui app = QtGui.QApplication(sys.argv) view1 = PipelineDevelopperView(pipeline) view1.show() app.exec_() # Execute the pipeline in the configured study study_config.run(pipeline)
def pilot_gdti_estimation(): """ Generalized diffusion tensor estimation ======================================= """ # System import import os import sys import datetime import PySide.QtGui as QtGui # CAPSUL import from capsul.qt_gui.widgets import PipelineDevelopperView from capsul.study_config.study_config import StudyConfig from capsul.process.loader import get_process_instance """ Study configuration ------------------- We first define the working directory and guarantee this folder exists on the file system: """ working_dir = "/volatile/nsap/clindmri/gdti" if not os.path.isdir(working_dir): os.makedirs(working_dir) """ And then define the study configuration (here we activate the smart caching module that will be able to remember which process has already been processed): """ study_config = StudyConfig(modules=["SmartCachingConfig"], use_smart_caching=True, output_directory=working_dir) # Create pipeline start_time = datetime.datetime.now() print "Start Pipeline Creation", start_time pipeline = get_process_instance("clindmri.estimation.gdti.xml") print "Done in {0} seconds.".format(datetime.datetime.now() - start_time) # View pipeline if 0: app = QtGui.QApplication(sys.argv) view1 = PipelineDevelopperView(pipeline) view1.show() app.exec_() del view1 # Set pipeline input parameters pipeline.dfile = "/volatile/imagen/dmritest/000000022453/DTI/000000022453s011a1001.nii.gz" pipeline.bvalfile = "/volatile/imagen/dmritest/000000022453/DTI/000000022453s011a1001.bval" pipeline.bvecfile = "/volatile/imagen/dmritest/000000022453/DTI/000000022453s011a1001.bvec" pipeline.order = 2 pipeline.odf = False print "Done in {0} seconds.".format(datetime.datetime.now() - start_time) # Execute the pipeline in the configured study study_config.run(pipeline, verbose=1)
class TestRunProcess(unittest.TestCase): """ Execute a process. """ def test_execution_with_cache(self): """ Execute a process with cache. """ # Create a study configuration self.output_directory = tempfile.mkdtemp() self.study_config = StudyConfig( modules=["SmartCachingConfig"], use_smart_caching=True, output_directory=self.output_directory) # Call the test self.execution_dummy() # Rm temporary folder shutil.rmtree(self.output_directory) def test_execution_without_cache(self): """ Execute a process without cache. """ # Create a study configuration self.output_directory = tempfile.mkdtemp() self.study_config = StudyConfig( modules=["SmartCachingConfig"], use_smart_caching=False, output_directory=self.output_directory) # Call the test self.execution_dummy() # Rm temporary folder shutil.rmtree(self.output_directory) def execution_dummy(self): """ Test to execute DummyProcess. """ # Create a process instance process = DummyProcess() # Test the cache mechanism for param in [(1., 2.3), (2., 2.), (1., 2.3)]: self.study_config.run(process, executer_qc_nodes=False, verbose=1, f1=param[0], f2=param[1]) self.assertEqual(process.res, param[0] * param[1]) self.assertEqual( process.output_directory, os.path.join(self.output_directory, "{0}-{1}".format( self.study_config.process_counter - 1, process.name)))
class TestRunProcess(unittest.TestCase): """ Execute a process. """ def test_execution_with_cache(self): """ Execute a process with cache. """ # Create a study configuration self.output_directory = tempfile.mkdtemp() self.study_config = StudyConfig(modules=["SmartCachingConfig"], use_smart_caching=True, output_directory=self.output_directory) # Call the test self.execution_dummy() # Rm temporary folder shutil.rmtree(self.output_directory) def test_execution_without_cache(self): """ Execute a process without cache. """ # Create a study configuration self.output_directory = tempfile.mkdtemp() self.study_config = StudyConfig(modules=["SmartCachingConfig"], use_smart_caching=False, output_directory=self.output_directory) # Call the test self.execution_dummy() # Rm temporary folder shutil.rmtree(self.output_directory) def execution_dummy(self): """ Test to execute DummyProcess. """ # Create a process instance process = get_process_instance(DummyProcess, output_directory=self.output_directory) # Test the cache mechanism for param in [(1., 2.3), (2., 2.), (1., 2.3)]: self.study_config.run(process, executer_qc_nodes=False, verbose=1, f1=param[0], f2=param[1]) self.assertEqual(process.res, param[0] * param[1]) self.assertEqual(process.output_directory, self.output_directory)
("Cognitive - Motor","T", ['clicDaudio','clicGaudio','clicDvideo','clicGvideo','calculaudio','calculvideo', 'phrasevideo','phraseaudio'],[-0.25,-0.25,-0.25,-0.25,0.25,0.25,0.25,0.25]), ("Audio Computation - Audio Sentences","T",['calculaudio','phraseaudio'],[1,-1]), ("Video Computation - Video Sentences","T",['calculvideo','phrasevideo'],[1,-1]), ("Computation - Sentences","T", ['calculaudio','calculvideo','phraseaudio','phrasevideo'],[0.5,0.5,-0.5,-0.5]), ("Video - Checkerboard","T", ['clicDvideo','clicGvideo','calculvideo','phrasevideo','damier_H','damier_V'], [0.25,0.25,0.25,0.25,-0.5,-0.5]), ("Video Sentences - Checkerboard","T", ['phrasevideo','damier_H','damier_V'],[1,-0.5,-0.5]), ("Audio Click - Audio Sentences","T", ['clicDaudio','clicGaudio','phraseaudio'],[0.5,0.5,-1]), ("Video Click - Video Sentences","T", ['clicDvideo','clicGvideo','phrasevideo'],[0.5,0.5,-1]), ] print "Done in {0} seconds.".format(datetime.datetime.now() - start_time) # View pipeline if 0: app = QtGui.QApplication(sys.argv) view1 = PipelineDevelopperView(pipeline) view1.show() app.exec_() del view1 # Execute the pipeline in the configured study study_config.run(pipeline, verbose=1)
def pilot_qa_fmri(): """ Imports ------- This code needs 'capsul' and 'mmutils' package in order to instanciate and execute the pipeline and to get a toy dataset. These packages are available in the 'neurospin' source list or in pypi. """ # Capsul import from capsul.study_config.study_config import StudyConfig from capsul.process.loader import get_process_instance # Mmutils import from mmutils.toy_datasets import get_sample_data """ Parameters ---------- The 'pipeline_name' parameter contains the location of the pipeline XML description that will perform the DICOMs conversion, and the 'outdir' the location of the pipeline's results: in this case a temporary directory. """ pipeline_name = "mmqa.fmri.fmri_quality_assurance_bbox.xml" outdir = tempfile.mkdtemp() """ Capsul configuration -------------------- A 'StudyConfig' has to be instantiated in order to execute the pipeline properly. It enables us to define the results directory through the 'output_directory' attribute, the number of CPUs to be used through the 'number_of_cpus' attributes, and to specify that we want a log of the processing step through the 'generate_logging'. The 'use_scheduler' must be set to True if more than 1 CPU is used. """ study_config = StudyConfig(number_of_cpus=1, generate_logging=True, use_scheduler=True, output_directory=outdir) """ Get the toy dataset ------------------- The toy dataset is composed of a functional image that is downloaded if it is necessary throught the 'get_sample_data' function and exported locally. """ localizer_dataset = get_sample_data("localizer_extra") """ Pipeline definition ------------------- The pipeline XML description is first imported throught the 'get_process_instance' method, and the resulting pipeline instance is parametrized: in this example we decided to set the date in the converted file name and we set two DICOM directories to be converted in Nifti format. """ pipeline = get_process_instance(pipeline_name) pipeline.image_file = localizer_dataset.fmri pipeline.repetition_time = 2.0 pipeline.exclude_volume = [] pipeline.roi_size = 21 pipeline.score_file = os.path.join(outdir, "scores.json") """ Pipeline representation ----------------------- By executing this block of code, a pipeline representation can be displayed. This representation is composed of boxes connected to each other. """ if 0: from capsul.qt_gui.widgets import PipelineDevelopperView from PySide import QtGui app = QtGui.QApplication(sys.argv) view1 = PipelineDevelopperView(pipeline) view1.show() app.exec_() """ Pipeline execution ------------------ Finally the pipeline is eecuted in the defined 'study_config'. """ study_config.run(pipeline) """ Access the result ----------------- Display the computed scores """ scores_file = pipeline.scores_file with open(scores_file, "r") as _file: scores = json.load(_file) for key, value in scores.iteritems(): print "{0} = {1}".format(key, value)
def pilot_qa_fmri(): """ Imports ------- This code needs 'capsul' and 'mmutils' package in order to instanciate and execute the pipeline and to get a toy dataset. These packages are available in the 'neurospin' source list or in pypi. """ # Capsul import from capsul.study_config.study_config import StudyConfig from capsul.process.loader import get_process_instance # Mmutils import from mmutils.toy_datasets import get_sample_data """ Parameters ---------- The 'pipeline_name' parameter contains the location of the pipeline XML description that will perform the DICOMs conversion, and the 'outdir' the location of the pipeline's results: in this case a temporary directory. """ pipeline_name = "mmqa.fmri.fmri_quality_assurance_bbox.xml" outdir = tempfile.mkdtemp() """ Capsul configuration -------------------- A 'StudyConfig' has to be instantiated in order to execute the pipeline properly. It enables us to define the results directory through the 'output_directory' attribute, the number of CPUs to be used through the 'number_of_cpus' attributes, and to specify that we want a log of the processing step through the 'generate_logging'. The 'use_scheduler' must be set to True if more than 1 CPU is used. """ study_config = StudyConfig( number_of_cpus=1, generate_logging=True, use_scheduler=True, output_directory=outdir) """ Get the toy dataset ------------------- The toy dataset is composed of a functional image that is downloaded if it is necessary throught the 'get_sample_data' function and exported locally. """ localizer_dataset = get_sample_data("localizer_extra") """ Pipeline definition ------------------- The pipeline XML description is first imported throught the 'get_process_instance' method, and the resulting pipeline instance is parametrized: in this example we decided to set the date in the converted file name and we set two DICOM directories to be converted in Nifti format. """ pipeline = get_process_instance(pipeline_name) pipeline.image_file = localizer_dataset.fmri pipeline.repetition_time = 2.0 pipeline.exclude_volume = [] pipeline.roi_size = 21 pipeline.score_file = os.path.join(outdir, "scores.json") """ Pipeline representation ----------------------- By executing this block of code, a pipeline representation can be displayed. This representation is composed of boxes connected to each other. """ if 0: from capsul.qt_gui.widgets import PipelineDevelopperView from PySide import QtGui app = QtGui.QApplication(sys.argv) view1 = PipelineDevelopperView(pipeline) view1.show() app.exec_() """ Pipeline execution ------------------ Finally the pipeline is eecuted in the defined 'study_config'. """ study_config.run(pipeline) """ Access the result ----------------- Display the computed scores """ scores_file = pipeline.scores_file with open(scores_file, "r") as _file: scores = json.load(_file) for key, value in scores.iteritems(): print "{0} = {1}".format(key, value)
class CapsulMainWindow(MyQUiLoader): """ Capsul main window. """ def __init__(self, pipeline_menu, ui_file, default_study_config=None): """ Method to initialize the Capsul main window class. Parameters ---------- pipeline_menu: hierachic dict each key is a sub module of the module. Leafs contain a list with the url to the documentation. ui_file: str (mandatory) a filename containing the user interface description default_study_config: ordered dict (madatory) some parameters for the study configuration """ # Inheritance: load user interface window MyQUiLoader.__init__(self, ui_file) # Class parameters self.pipeline_menu = pipeline_menu self.pipelines = {} self.pipeline = None self.path_to_pipeline_doc = {} # Define dynamic controls self.controls = { QtGui.QAction: ["actionHelp", "actionQuit", "actionBrowse", "actionLoad", "actionChangeView", "actionParameters", "actionRun", "actionStudyConfig", "actionQualityControl"], QtGui.QTabWidget: ["display", ], QtGui.QDockWidget: ["dockWidgetBrowse", "dockWidgetParameters", "dockWidgetStudyConfig", "dockWidgetBoard"], QtGui.QWidget: ["dock_browse", "dock_parameters", "dock_study_config", "dock_board"], QtGui.QTreeWidget: ["menu_treectrl", ], QtGui.QLineEdit: ["search", ], } # Add ui class parameter with the dynamic controls and initialize # default values self.add_controls_to_ui() self.ui.display.setTabsClosable(True) # Create the study configuration self.study_config = StudyConfig(default_study_config) # Create the controller widget associated to the study # configuration controller self.study_config_widget = ScrollControllerWidget( self.study_config, live=True) self.ui.dockWidgetStudyConfig.setWidget(self.study_config_widget) # Create the pipeline menu fill_treectrl(self.ui.menu_treectrl, self.pipeline_menu) # Signal for window interface self.ui.actionHelp.triggered.connect(self.onHelpClicked) self.ui.actionChangeView.triggered.connect(self.onChangeViewClicked) # Signal for tab widget self.ui.display.currentChanged.connect(self.onCurrentTabChanged) self.ui.display.tabCloseRequested.connect(self.onCloseTabClicked) # Signal for dock widget self.ui.actionBrowse.triggered.connect(self.onBrowseClicked) self.ui.actionParameters.triggered.connect(self.onParametersClicked) self.ui.actionStudyConfig.triggered.connect(self.onStudyConfigClicked) self.ui.actionQualityControl.triggered.connect(self.onQualityControlClicked) # Initialize properly the visibility of each dock widget self.onBrowseClicked() self.onParametersClicked() self.onStudyConfigClicked() self.onQualityControlClicked() # Signal for the pipeline creation self.ui.search.textChanged.connect(self.onSearchClicked) self.ui.menu_treectrl.currentItemChanged.connect( self.onTreeSelectionChanged) self.ui.actionLoad.triggered.connect(self.onLoadClicked) # Signal for the execution self.ui.actionRun.triggered.connect(self.onRunClicked) # Set default values # Set some tooltips def show(self): """ Shows the widget and its child widgets. """ self.ui.show() def add_controls_to_ui(self): """ Method to find dynamic controls """ # Error message template error_message = "{0} has no attribute '{1}'" # Got through the class dynamic controls for control_type, control_item in six.iteritems(self.controls): # Get the dynamic control name for control_name in control_item: # Try to set the control value to the ui class parameter try: value = self.ui.findChild(control_type, control_name) if value is None: logger.error(error_message.format( type(self.ui), control_name)) setattr(self.ui, control_name, value) except: logger.error(error_message.format( type(self.ui), control_name)) ########################################################################### # Slots ########################################################################### def onRunClicked(self): """ Event to execute the process/pipeline. """ self.study_config.run(self.pipeline, executer_qc_nodes=True, verbose=1) def onBrowseClicked(self): """ Event to show / hide the browse dock widget. """ # Show browse dock widget if self.ui.actionBrowse.isChecked(): self.ui.dockWidgetBrowse.show() # Hide browse dock widget else: self.ui.dockWidgetBrowse.hide() def onParametersClicked(self): """ Event to show / hide the parameters dock widget. """ # Show parameters dock widget if self.ui.actionParameters.isChecked(): self.ui.dockWidgetParameters.show() # Hide parameters dock widget else: self.ui.dockWidgetParameters.hide() def onStudyConfigClicked(self): """ Event to show / hide the study config dock widget. """ # Show study configuration dock widget if self.ui.actionStudyConfig.isChecked(): self.ui.dockWidgetStudyConfig.show() # Hide study configuration dock widget else: self.ui.dockWidgetStudyConfig.hide() def onQualityControlClicked(self): """ Event to show / hide the board dock widget. """ # Create and show board dock widget if self.ui.actionQualityControl.isChecked(): # Create the board widget associated to the pipeline controller # Create on the fly in order to get the last status # ToDo: add callbacks if self.pipeline is not None: # board_widget = BoardWidget( # self.pipeline, parent=self.ui.dockWidgetParameters, # name="board") board_widget = ScrollControllerWidget( self.pipeline, name="outputs", live=True, hide_labels=False, select_controls="outputs", disable_controller_widget=True) #board_widget.setEnabled(False) self.ui.dockWidgetBoard.setWidget(board_widget) # Show the board widget self.ui.dockWidgetBoard.show() # Hide board dock widget else: self.ui.dockWidgetBoard.hide() def onSearchClicked(self): """ Event to refresh the menu tree control that contains the pipeline modules. """ # Clear the current tree control self.ui.menu_treectrl.clear() # Build the new filtered tree control fill_treectrl(self.ui.menu_treectrl, self.pipeline_menu, self.ui.search.text().lower()) def onTreeSelectionChanged(self): """ Event to refresh the pipeline load button status. """ # Get the cuurent item item = self.ui.menu_treectrl.currentItem() if item is None: return # Check if we have selected a pipeline in the tree and enable / disable # the load button url = item.text(2) if url == "None": self.ui.actionLoad.setEnabled(False) else: self.ui.actionLoad.setEnabled(True) def onRunStatus(self): """ Event to refresh the run button status. When all the controller widget controls are correctly filled, enable the user to execute the pipeline. """ # Get the controller widget controller_widget = self.ui.dockWidgetParameters.widget().controller_widget # Get the controller widget status is_valid = controller_widget.is_valid() # Depending on the controller widget status enable / disable # the run button self.ui.actionRun.setEnabled(is_valid) def onLoadClicked(self): """ Event to load and display a pipeline. """ # Get the pipeline instance from its string description item = self.ui.menu_treectrl.currentItem() description_list = [str(x) for x in [item.text(1), item.text(0)] if x != ""] process_description = ".".join(description_list) self.pipeline = get_process_instance(process_description) # Create the controller widget associated to the pipeline # controller pipeline_widget = ScrollControllerWidget( self.pipeline, live=True, select_controls="inputs") self.ui.dockWidgetParameters.setWidget(pipeline_widget) # Add observer to refresh the run button controller_widget = pipeline_widget.controller_widget for control_name, control \ in six.iteritems(controller_widget._controls): # Unpack the control item trait, control_class, control_instance, control_label = control # Add the new callback control_class.add_callback(self.onRunStatus, control_instance) # Refresh manually the run button status the first time self.onRunStatus() # Store the pipeline documentation root path self.path_to_pipeline_doc[self.pipeline.id] = item.text(2) # Store the pipeline instance self.pipelines[self.pipeline.name] = ( self.pipeline, pipeline_widget) # Create the widget widget = PipelineDevelopperView(self.pipeline) self._insert_widget_in_tab(widget) # Connect the subpipeline clicked signal to the # onLoadSubPipelineClicked slot widget.subpipeline_clicked.connect(self.onLoadSubPipelineClicked) def onLoadSubPipelineClicked(self, name, sub_pipeline, modifiers): """ Event to load and display a sub pipeline. """ # Store the pipeline instance in class parameters self.pipeline = self.pipeline.nodes[name].process # Create the controller widget associated to the sub pipeline # controller: if the sub pipeline is a ProcessIteration, disable # the correspondind controller widget since this pipeline is generated # on the fly an is not directly synchronized with the rest of the # pipeline. is_iterative_pipeline = False if isinstance(self.pipeline, ProcessIteration): is_iterative_pipeline = True pipeline_widget = ScrollControllerWidget( self.pipeline, live=True, select_controls="inputs", disable_controller_widget=is_iterative_pipeline) self.ui.dockWidgetParameters.setWidget(pipeline_widget) # Store the sub pipeline instance self.pipelines[self.pipeline.name] = ( self.pipeline, pipeline_widget) # Create the widget widget = PipelineDevelopperView(self.pipeline) self._insert_widget_in_tab(widget) # Connect the subpipeline clicked signal to the # onLoadSubPipelineClicked slot widget.subpipeline_clicked.connect(self.onLoadSubPipelineClicked) def onCloseTabClicked(self, index): """ Event to close a pipeline view. """ # Remove the pipeline from the intern pipeline list pipeline, pipeline_widget = self.pipelines[ self.ui.display.tabText(index)] pipeline_widget.close() pipeline_widget.deleteLater() del self.pipelines[self.ui.display.tabText(index)] # Remove the table that contains the pipeline self.ui.display.removeTab(index) def onCurrentTabChanged(self, index): """ Event to refresh the controller controller widget when a new tab is selected """ # If no valid tab index has been passed if index < 0: self.ui.actionRun.setEnabled(False) # A new valid tab is selected else: # Get the selected pipeline widget self.pipeline, pipeline_widget = self.pipelines[ self.ui.display.tabText(index)] # Set the controller widget associated to the pipeline # controller self.ui.dockWidgetParameters.setWidget(pipeline_widget) # Refresh manually the run button status the first time self.onRunStatus() def onHelpClicked(self): """ Event to display the documentation of the active pipeline. """ # Create a dialog box to display the html documentation win = QtGui.QDialog() win.setWindowTitle("Pipeline Help") # Build the pipeline documentation location # Possible since common tools generate the sphinx documentation if self.pipeline: # Generate the url to the active pipeline documentation path_to_active_pipeline_doc = os.path.join( self.path_to_pipeline_doc[self.pipeline.id], "generated", self.pipeline.id.split(".")[1], "pipeline", self.pipeline.id + ".html") # Create and fill a QWebView help = QtWebKit.QWebView() help.load(QtCore.QUrl(path_to_active_pipeline_doc)) help.show() # Create and set a layout with the web view layout = QtGui.QHBoxLayout() layout.addWidget(help) win.setLayout(layout) # Display the window win.exec_() # No Pipeline loaded, cant't show the documentation message # Display a message box else: QtGui.QMessageBox.information( self.ui, "Information", "First load a pipeline!") def onChangeViewClicked(self): """ Event to switch between simple and full pipeline views. """ # Check if a pipeline has been loaded if self._is_active_pipeline_valid(): # Check the current display mode # Case PipelineDevelopperView if isinstance(self.ui.display.currentWidget(), PipelineDevelopperView): # Switch to PipelineUserView display mode widget = PipelineUserView(self.pipeline) self._insert_widget_in_tab(widget) # Case PipelineUserView else: # Switch to PipelineDevelopperView display mode widget = PipelineDevelopperView(self.pipeline) self._insert_widget_in_tab(widget) # No pipeline loaded error else: logger.error("No active pipeline selected. " "Have you forgotten to click the load pipeline " "button?") ##################### # Private interface # ##################### def _insert_widget_in_tab(self, widget): """ Insert a new widget or replace an existing widget. Parameters ---------- widget: a widget (mandatory) the widget we want to draw """ # Search if the tab corresponding to the widget has already been created already_created = False index = 0 # Go through all the tabs for index in range(self.ui.display.count()): # Check if we have a match: the tab name is equal to the current #pipeline name if (self.ui.display.tabText(index) == self.pipeline.name): already_created = True break # If no match found, add a new tab with the widget if not already_created: self.ui.display.addTab( widget, unicode(self.pipeline.name)) self.ui.display.setCurrentIndex( self.ui.display.count() - 1) # Otherwise, replace the widget from the match tab else: # Delete the tab self.ui.display.removeTab(index) # Insert the new tab self.ui.display.insertTab( index, widget, unicode(self.pipeline.name)) # Set the corresponding index self.ui.display.setCurrentIndex(index) def _is_active_pipeline_valid(self): """ Method to ceack that the active pipeline is valid Returns ------- is_valid: bool True if the active pipeline is valid """ return self.pipeline is not None
modules=["SmartCachingConfig"], use_smart_caching=True, output_directory="/volatile/nsap/catalogue/quality_assurance/") print "Done in {0} seconds".format(datetime.datetime.now() - start_time) # Create pipeline start_time = datetime.datetime.now() print "Start Pipeline Creation", start_time pipeline = get_process_instance("mmqa.fmri.fmri_quality_assurance.xml") print "Done in {0} seconds.".format(datetime.datetime.now() - start_time) # Set pipeline input parameters start_time = datetime.datetime.now() print "Start Parametrization", start_time localizer_dataset = get_sample_data("localizer") pipeline.image_file = localizer_dataset.fmri pipeline.repetition_time = localizer_dataset.TR print "Done in {0} seconds.".format(datetime.datetime.now() - start_time) # View pipeline app = QtGui.QApplication(sys.argv) view1 = PipelineDevelopperView(pipeline) view1.show() app.exec_() # Execute the pipeline in the configured study study_config.run(pipeline, executer_qc_nodes=True, verbose=1)
class CapsulMainWindow(MyQUiLoader): """ Capsul main window. """ def __init__(self, pipeline_menu, ui_file, default_study_config=None): """ Method to initialize the Capsul main window class. Parameters ---------- pipeline_menu: hierachic dict each key is a sub module of the module. Leafs contain a list with the url to the documentation. ui_file: str (mandatory) a filename containing the user interface description default_study_config: ordered dict (madatory) some parameters for the study configuration """ # Inheritance: load user interface window MyQUiLoader.__init__(self, ui_file) # Class parameters self.pipeline_menu = pipeline_menu self.pipelines = {} self.pipeline = None self.path_to_pipeline_doc = {} # Define dynamic controls self.controls = { QtGui.QAction: [ "actionHelp", "actionQuit", "actionBrowse", "actionLoad", "actionChangeView", "actionParameters", "actionRun", "actionStudyConfig", "actionQualityControl" ], QtGui.QTabWidget: [ "display", ], QtGui.QDockWidget: [ "dockWidgetBrowse", "dockWidgetParameters", "dockWidgetStudyConfig", "dockWidgetBoard" ], QtGui.QWidget: [ "dock_browse", "dock_parameters", "dock_study_config", "dock_board" ], QtGui.QTreeWidget: [ "menu_treectrl", ], QtGui.QLineEdit: [ "search", ], } # Add ui class parameter with the dynamic controls and initialize # default values self.add_controls_to_ui() self.ui.display.setTabsClosable(True) # Create the study configuration self.study_config = StudyConfig(default_study_config) # Create the controller widget associated to the study # configuration controller self.study_config_widget = ScrollControllerWidget(self.study_config, live=True) self.ui.dockWidgetStudyConfig.setWidget(self.study_config_widget) # Create the pipeline menu fill_treectrl(self.ui.menu_treectrl, self.pipeline_menu) # Signal for window interface self.ui.actionHelp.triggered.connect(self.onHelpClicked) self.ui.actionChangeView.triggered.connect(self.onChangeViewClicked) # Signal for tab widget self.ui.display.currentChanged.connect(self.onCurrentTabChanged) self.ui.display.tabCloseRequested.connect(self.onCloseTabClicked) # Signal for dock widget self.ui.actionBrowse.triggered.connect(self.onBrowseClicked) self.ui.actionParameters.triggered.connect(self.onParametersClicked) self.ui.actionStudyConfig.triggered.connect(self.onStudyConfigClicked) self.ui.actionQualityControl.triggered.connect( self.onQualityControlClicked) # Initialize properly the visibility of each dock widget self.onBrowseClicked() self.onParametersClicked() self.onStudyConfigClicked() self.onQualityControlClicked() # Signal for the pipeline creation self.ui.search.textChanged.connect(self.onSearchClicked) self.ui.menu_treectrl.currentItemChanged.connect( self.onTreeSelectionChanged) self.ui.actionLoad.triggered.connect(self.onLoadClicked) # Signal for the execution self.ui.actionRun.triggered.connect(self.onRunClicked) # Set default values # Set some tooltips def show(self): """ Shows the widget and its child widgets. """ self.ui.show() def add_controls_to_ui(self): """ Method to find dynamic controls """ # Error message template error_message = "{0} has no attribute '{1}'" # Got through the class dynamic controls for control_type, control_item in six.iteritems(self.controls): # Get the dynamic control name for control_name in control_item: # Try to set the control value to the ui class parameter try: value = self.ui.findChild(control_type, control_name) if value is None: logger.error( error_message.format(type(self.ui), control_name)) setattr(self.ui, control_name, value) except: logger.error( error_message.format(type(self.ui), control_name)) ########################################################################### # Slots ########################################################################### def onRunClicked(self): """ Event to execute the process/pipeline. """ self.study_config.run(self.pipeline, executer_qc_nodes=True, verbose=1) def onBrowseClicked(self): """ Event to show / hide the browse dock widget. """ # Show browse dock widget if self.ui.actionBrowse.isChecked(): self.ui.dockWidgetBrowse.show() # Hide browse dock widget else: self.ui.dockWidgetBrowse.hide() def onParametersClicked(self): """ Event to show / hide the parameters dock widget. """ # Show parameters dock widget if self.ui.actionParameters.isChecked(): self.ui.dockWidgetParameters.show() # Hide parameters dock widget else: self.ui.dockWidgetParameters.hide() def onStudyConfigClicked(self): """ Event to show / hide the study config dock widget. """ # Show study configuration dock widget if self.ui.actionStudyConfig.isChecked(): self.ui.dockWidgetStudyConfig.show() # Hide study configuration dock widget else: self.ui.dockWidgetStudyConfig.hide() def onQualityControlClicked(self): """ Event to show / hide the board dock widget. """ # Create and show board dock widget if self.ui.actionQualityControl.isChecked(): # Create the board widget associated to the pipeline controller # Create on the fly in order to get the last status # ToDo: add callbacks if self.pipeline is not None: # board_widget = BoardWidget( # self.pipeline, parent=self.ui.dockWidgetParameters, # name="board") board_widget = ScrollControllerWidget( self.pipeline, name="outputs", live=True, hide_labels=False, select_controls="outputs", disable_controller_widget=True) #board_widget.setEnabled(False) self.ui.dockWidgetBoard.setWidget(board_widget) # Show the board widget self.ui.dockWidgetBoard.show() # Hide board dock widget else: self.ui.dockWidgetBoard.hide() def onSearchClicked(self): """ Event to refresh the menu tree control that contains the pipeline modules. """ # Clear the current tree control self.ui.menu_treectrl.clear() # Build the new filtered tree control fill_treectrl(self.ui.menu_treectrl, self.pipeline_menu, self.ui.search.text().lower()) def onTreeSelectionChanged(self): """ Event to refresh the pipeline load button status. """ # Get the cuurent item item = self.ui.menu_treectrl.currentItem() if item is None: return # Check if we have selected a pipeline in the tree and enable / disable # the load button url = item.text(2) if url == "None": self.ui.actionLoad.setEnabled(False) else: self.ui.actionLoad.setEnabled(True) def onRunStatus(self): """ Event to refresh the run button status. When all the controller widget controls are correctly filled, enable the user to execute the pipeline. """ # Get the controller widget controller_widget = self.ui.dockWidgetParameters.widget( ).controller_widget # Get the controller widget status is_valid = controller_widget.is_valid() # Depending on the controller widget status enable / disable # the run button self.ui.actionRun.setEnabled(is_valid) def onLoadClicked(self): """ Event to load and display a pipeline. """ # Get the pipeline instance from its string description item = self.ui.menu_treectrl.currentItem() description_list = [ str(x) for x in [item.text(1), item.text(0)] if x != "" ] process_description = ".".join(description_list) self.pipeline = get_process_instance(process_description) # Create the controller widget associated to the pipeline # controller pipeline_widget = ScrollControllerWidget(self.pipeline, live=True, select_controls="inputs") self.ui.dockWidgetParameters.setWidget(pipeline_widget) # Add observer to refresh the run button controller_widget = pipeline_widget.controller_widget for control_name, control \ in six.iteritems(controller_widget._controls): # Unpack the control item trait, control_class, control_instance, control_label = control # Add the new callback control_class.add_callback(self.onRunStatus, control_instance) # Refresh manually the run button status the first time self.onRunStatus() # Store the pipeline documentation root path self.path_to_pipeline_doc[self.pipeline.id] = item.text(2) # Store the pipeline instance self.pipelines[self.pipeline.name] = (self.pipeline, pipeline_widget) # Create the widget widget = PipelineDevelopperView(self.pipeline) self._insert_widget_in_tab(widget) # Connect the subpipeline clicked signal to the # onLoadSubPipelineClicked slot widget.subpipeline_clicked.connect(self.onLoadSubPipelineClicked) def onLoadSubPipelineClicked(self, name, sub_pipeline, modifiers): """ Event to load and display a sub pipeline. """ # Store the pipeline instance in class parameters self.pipeline = self.pipeline.nodes[name].process # Create the controller widget associated to the sub pipeline # controller: if the sub pipeline is a ProcessIteration, disable # the correspondind controller widget since this pipeline is generated # on the fly an is not directly synchronized with the rest of the # pipeline. is_iterative_pipeline = False if isinstance(self.pipeline, ProcessIteration): is_iterative_pipeline = True pipeline_widget = ScrollControllerWidget( self.pipeline, live=True, select_controls="inputs", disable_controller_widget=is_iterative_pipeline) self.ui.dockWidgetParameters.setWidget(pipeline_widget) # Store the sub pipeline instance self.pipelines[self.pipeline.name] = (self.pipeline, pipeline_widget) # Create the widget widget = PipelineDevelopperView(self.pipeline) self._insert_widget_in_tab(widget) # Connect the subpipeline clicked signal to the # onLoadSubPipelineClicked slot widget.subpipeline_clicked.connect(self.onLoadSubPipelineClicked) def onCloseTabClicked(self, index): """ Event to close a pipeline view. """ # Remove the pipeline from the intern pipeline list pipeline, pipeline_widget = self.pipelines[self.ui.display.tabText( index)] pipeline_widget.close() pipeline_widget.deleteLater() del self.pipelines[self.ui.display.tabText(index)] # Remove the table that contains the pipeline self.ui.display.removeTab(index) def onCurrentTabChanged(self, index): """ Event to refresh the controller controller widget when a new tab is selected """ # If no valid tab index has been passed if index < 0: self.ui.actionRun.setEnabled(False) # A new valid tab is selected else: # Get the selected pipeline widget self.pipeline, pipeline_widget = self.pipelines[ self.ui.display.tabText(index)] # Set the controller widget associated to the pipeline # controller self.ui.dockWidgetParameters.setWidget(pipeline_widget) # Refresh manually the run button status the first time self.onRunStatus() def onHelpClicked(self): """ Event to display the documentation of the active pipeline. """ # Create a dialog box to display the html documentation win = QtGui.QDialog() win.setWindowTitle("Pipeline Help") # Build the pipeline documentation location # Possible since common tools generate the sphinx documentation if self.pipeline: # Generate the url to the active pipeline documentation path_to_active_pipeline_doc = os.path.join( self.path_to_pipeline_doc[self.pipeline.id], "generated", self.pipeline.id.split(".")[1], "pipeline", self.pipeline.id + ".html") # Create and fill a QWebView help = QtWebKit.QWebView() help.load(QtCore.QUrl(path_to_active_pipeline_doc)) help.show() # Create and set a layout with the web view layout = QtGui.QHBoxLayout() layout.addWidget(help) win.setLayout(layout) # Display the window win.exec_() # No Pipeline loaded, cant't show the documentation message # Display a message box else: QtGui.QMessageBox.information(self.ui, "Information", "First load a pipeline!") def onChangeViewClicked(self): """ Event to switch between simple and full pipeline views. """ # Check if a pipeline has been loaded if self._is_active_pipeline_valid(): # Check the current display mode # Case PipelineDevelopperView if isinstance(self.ui.display.currentWidget(), PipelineDevelopperView): # Switch to PipelineUserView display mode widget = PipelineUserView(self.pipeline) self._insert_widget_in_tab(widget) # Case PipelineUserView else: # Switch to PipelineDevelopperView display mode widget = PipelineDevelopperView(self.pipeline) self._insert_widget_in_tab(widget) # No pipeline loaded error else: logger.error("No active pipeline selected. " "Have you forgotten to click the load pipeline " "button?") ##################### # Private interface # ##################### def _insert_widget_in_tab(self, widget): """ Insert a new widget or replace an existing widget. Parameters ---------- widget: a widget (mandatory) the widget we want to draw """ # Search if the tab corresponding to the widget has already been created already_created = False index = 0 # Go through all the tabs for index in range(self.ui.display.count()): # Check if we have a match: the tab name is equal to the current #pipeline name if (self.ui.display.tabText(index) == self.pipeline.name): already_created = True break # If no match found, add a new tab with the widget if not already_created: self.ui.display.addTab(widget, unicode(self.pipeline.name)) self.ui.display.setCurrentIndex(self.ui.display.count() - 1) # Otherwise, replace the widget from the match tab else: # Delete the tab self.ui.display.removeTab(index) # Insert the new tab self.ui.display.insertTab(index, widget, unicode(self.pipeline.name)) # Set the corresponding index self.ui.display.setCurrentIndex(index) def _is_active_pipeline_valid(self): """ Method to ceack that the active pipeline is valid Returns ------- is_valid: bool True if the active pipeline is valid """ return self.pipeline is not None
def pilot_dcm2nii(): """ Imports ------- This code needs 'capsul' and 'mmutils' package in order to instanciate and execute the pipeline and to get a toy dataset. These packages are available in the 'neurospin' source list or in pypi. """ import os import sys import shutil import tempfile from capsul.study_config.study_config import StudyConfig from capsul.process.loader import get_process_instance from mmutils.toy_datasets import get_sample_data """ Parameters ---------- The 'pipeline_name' parameter contains the location of the pipeline XML description that will perform the DICOMs conversion, and the 'outdir' the location of the pipeline's results: in this case a temporary directory. """ pipeline_name = "dcmio.dcmconverter.dcm_to_nii.xml" outdir = tempfile.mkdtemp() """ Capsul configuration -------------------- A 'StudyConfig' has to be instantiated in order to execute the pipeline properly. It enables us to define the results directory through the 'output_directory' attribute, the number of CPUs to be used through the 'number_of_cpus' attributes, and to specify that we want a log of the processing step through the 'generate_logging'. The 'use_scheduler' must be set to True if more than 1 CPU is used. """ study_config = StudyConfig( modules=[], output_directory=outdir, number_of_cpus=1, generate_logging=True, use_scheduler=True) """ Get the toy dataset ------------------- The toy dataset is composed of a 3D heart dicom image that is downloaded if it is necessary throught the 'get_sample_data' function and exported locally in a 'heart.dcm' file. """ dicom_dataset = get_sample_data("dicom") dcmfolder = os.path.join(outdir, "dicom") if not os.path.isdir(dcmfolder): os.makedirs(dcmfolder) shutil.copy(dicom_dataset.barre, os.path.join(dcmfolder, "heart.dcm")) """ Pipeline definition ------------------- The pipeline XML description is first imported throught the 'get_process_instance' method, and the resulting pipeline instance is parametrized: in this example we decided to set the date in the converted file name and we set two DICOM directories to be converted in Nifti format. """ pipeline = get_process_instance(pipeline_name) pipeline.date_in_filename = True pipeline.dicom_directories = [dcmfolder, dcmfolder] pipeline.additional_informations = [[("Provided by", "Neurospin@2015")], [("Provided by", "Neurospin@2015"), ("TR", "1500")]] pipeline.dcm_tags = [("TR", [("0x0018", "0x0080")]), ("TE", [("0x0018", "0x0081")])] """ Pipeline representation ----------------------- By executing this block of code, a pipeline representation can be displayed. This representation is composed of boxes connected to each other. """ if 0: from capsul.qt_gui.widgets import PipelineDevelopperView from PySide import QtGui app = QtGui.QApplication(sys.argv) view1 = PipelineDevelopperView(pipeline) view1.show() app.exec_() """ Pipeline execution ------------------ Finally the pipeline is eecuted in the defined 'study_config'. """ study_config.run(pipeline) """ Access the result ----------------- The 'nibabel' package is used to load the generated images. We display the numpy array shape and the stored repetiton and echo times: in order to load the 'descrip' image field we use the 'json' package. """ import json import copy import nibabel generated_images = pipeline.filled_converted_files for fnames in generated_images: print(">>>", fnames, "...") im = nibabel.load(fnames[0]) print("shape=", im.get_data().shape) header = im.get_header() a = str(header["descrip"]) a = a.strip() description = json.loads(copy.deepcopy(a)) print("TE=", description["TE"]) print("TR=", description["TR"]) print("Provided by=", description["Provided by"])
def pilot_dcm2nii(): """ Imports ------- This code needs 'capsul' and 'mmutils' package in order to instanciate and execute the pipeline and to get a toy dataset. These packages are available in the 'neurospin' source list or in pypi. """ import os import sys import shutil import tempfile from capsul.study_config.study_config import StudyConfig from capsul.process.loader import get_process_instance from mmutils.toy_datasets import get_sample_data """ Parameters ---------- The 'pipeline_name' parameter contains the location of the pipeline XML description that will perform the DICOMs conversion, and the 'outdir' the location of the pipeline's results: in this case a temporary directory. """ pipeline_name = "dcmio.dcmconverter.dcm_to_nii.xml" outdir = tempfile.mkdtemp() """ Capsul configuration -------------------- A 'StudyConfig' has to be instantiated in order to execute the pipeline properly. It enables us to define the results directory through the 'output_directory' attribute, the number of CPUs to be used through the 'number_of_cpus' attributes, and to specify that we want a log of the processing step through the 'generate_logging'. The 'use_scheduler' must be set to True if more than 1 CPU is used. """ study_config = StudyConfig(modules=[], output_directory=outdir, number_of_cpus=1, generate_logging=True, use_scheduler=True) """ Get the toy dataset ------------------- The toy dataset is composed of a 3D heart dicom image that is downloaded if it is necessary throught the 'get_sample_data' function and exported locally in a 'heart.dcm' file. """ dicom_dataset = get_sample_data("dicom") dcmfolder = os.path.join(outdir, "dicom") if not os.path.isdir(dcmfolder): os.makedirs(dcmfolder) shutil.copy(dicom_dataset.barre, os.path.join(dcmfolder, "heart.dcm")) """ Pipeline definition ------------------- The pipeline XML description is first imported throught the 'get_process_instance' method, and the resulting pipeline instance is parametrized: in this example we decided to set the date in the converted file name and we set two DICOM directories to be converted in Nifti format. """ pipeline = get_process_instance(pipeline_name) pipeline.date_in_filename = True pipeline.dicom_directories = [dcmfolder, dcmfolder] pipeline.additional_informations = [[("Provided by", "Neurospin@2015")], [("Provided by", "Neurospin@2015"), ("TR", "1500")]] pipeline.dcm_tags = [("TR", [("0x0018", "0x0080")]), ("TE", [("0x0018", "0x0081")])] """ Pipeline representation ----------------------- By executing this block of code, a pipeline representation can be displayed. This representation is composed of boxes connected to each other. """ if 0: from capsul.qt_gui.widgets import PipelineDevelopperView from PySide import QtGui app = QtGui.QApplication(sys.argv) view1 = PipelineDevelopperView(pipeline) view1.show() app.exec_() """ Pipeline execution ------------------ Finally the pipeline is eecuted in the defined 'study_config'. """ study_config.run(pipeline) """ Access the result ----------------- The 'nibabel' package is used to load the generated images. We display the numpy array shape and the stored repetiton and echo times: in order to load the 'descrip' image field we use the 'json' package. """ import json import copy import nibabel generated_images = pipeline.filled_converted_files for fnames in generated_images: print(">>>", fnames, "...") im = nibabel.load(fnames[0]) print("shape=", im.get_data().shape) header = im.get_header() a = str(header["descrip"]) a = a.strip() description = json.loads(copy.deepcopy(a)) print("TE=", description["TE"]) print("TR=", description["TR"]) print("Provided by=", description["Provided by"])
def pilot_gdti_estimation(): """ Generalized diffusion tensor estimation ======================================= """ # System import import os import sys import datetime import PySide.QtGui as QtGui # CAPSUL import from capsul.qt_gui.widgets import PipelineDevelopperView from capsul.study_config.study_config import StudyConfig from capsul.process.loader import get_process_instance """ Study configuration ------------------- We first define the working directory and guarantee this folder exists on the file system: """ working_dir = "/volatile/nsap/clindmri/gdti" if not os.path.isdir(working_dir): os.makedirs(working_dir) """ And then define the study configuration (here we activate the smart caching module that will be able to remember which process has already been processed): """ study_config = StudyConfig( modules=["SmartCachingConfig"], use_smart_caching=True, output_directory=working_dir) # Create pipeline start_time = datetime.datetime.now() print "Start Pipeline Creation", start_time pipeline = get_process_instance("clindmri.estimation.gdti.xml") print "Done in {0} seconds.".format(datetime.datetime.now() - start_time) # View pipeline if 0: app = QtGui.QApplication(sys.argv) view1 = PipelineDevelopperView(pipeline) view1.show() app.exec_() del view1 # Set pipeline input parameters pipeline.dfile = "/volatile/imagen/dmritest/000000022453/DTI/000000022453s011a1001.nii.gz" pipeline.bvalfile = "/volatile/imagen/dmritest/000000022453/DTI/000000022453s011a1001.bval" pipeline.bvecfile = "/volatile/imagen/dmritest/000000022453/DTI/000000022453s011a1001.bvec" pipeline.order = 2 pipeline.odf = False print "Done in {0} seconds.".format(datetime.datetime.now() - start_time) # Execute the pipeline in the configured study study_config.run(pipeline, verbose=1)
def pilot_fsl_preproc(): """ FSL preprocessings ================== """ # System import import os import sys import datetime import PySide.QtGui as QtGui # CAPSUL import from capsul.qt_gui.widgets import PipelineDevelopperView from capsul.study_config.study_config import StudyConfig from capsul.process.loader import get_process_instance """ Study configuration ------------------- We first define the working directory and guarantee this folder exists on the file system: """ working_dir = "/volatile/nsap/clindmri/fslpreproc" if not os.path.isdir(working_dir): os.makedirs(working_dir) """ And then define the study configuration (here we activate the smart caching module that will be able to remember which process has already been processed): """ study_config = StudyConfig( modules=["SmartCachingConfig", "FSLConfig", "MatlabConfig", "SPMConfig", "NipypeConfig"], use_smart_caching=True, fsl_config="/etc/fsl/4.1/fsl.sh", use_fsl=True, output_directory=working_dir) # Create pipeline start_time = datetime.datetime.now() print "Start Pipeline Creation", start_time pipeline = get_process_instance("clindmri.preproc.fsl_preproc.xml") print "Done in {0} seconds.".format(datetime.datetime.now() - start_time) # View pipeline if 0: app = QtGui.QApplication(sys.argv) view1 = PipelineDevelopperView(pipeline) view1.show() app.exec_() del view1 # Set pipeline input parameters pipeline.dfile = "/volatile/imagen/dmritest/000000022453/DTI/000000022453s011a1001.nii.gz" pipeline.bvalfile = "/volatile/imagen/dmritest/000000022453/DTI/000000022453s011a1001.bval" pipeline.bvecfile = "/volatile/imagen/dmritest/000000022453/DTI/000000022453s011a1001.bvec" print "Done in {0} seconds.".format(datetime.datetime.now() - start_time) #print pipeline.nodes["eddy"].process._nipype_interface.inputs print pipeline.nodes["eddy"].process._nipype_interface.cmdline # Execute the pipeline in the configured study study_config.run(pipeline, verbose=1)