Ejemplo n.º 1
0
    def init_window(self):
        """ Method to initialize the main window.
        """
        # First set some meta informations
        self.setApplicationName(self._application_name)
        self.setOrganizationName(self._organisation_name)
        self.setApplicationVersion(self._version)

        # Get the user interface description from capsul resources
        ui_file = os.path.join(os.path.dirname(__file__),
                               "activation_inspector.ui")
        #ui_file = os.path.join(resources.__path__[0], "activation_inspector.ui")

        # Create and show the activation/pipeline/controller windows
        self.pipeline_window = PipelineDevelopperView(self.pipeline,
                                                      show_sub_pipelines=True)
        self.controller_window = ScrollControllerWidget(self.pipeline,
                                                        live=True)
        self.activation_window = ActivationInspector(
            self.pipeline,
            ui_file,
            self.record_file,
            developper_view=self.pipeline_window)
        self.pipeline_window.show()
        self.activation_window.show()
        self.controller_window.show()

        return True
Ejemplo n.º 2
0
    def test_simple_run(self):
        """ Method to test a simple 1 cpu call with the scheduler.
        """
        # Configure the environment
        study_config = StudyConfig(
            modules=[],
            use_smart_caching=True,
            output_directory=self.outdir,
            number_of_cpus=1,
            generate_logging=True,
            use_scheduler=True)

        # Create pipeline
        pipeline = get_process_instance(self.pipeline_name)
        pipeline.date_in_filename = True

        # Set pipeline input parameters
        dicom_dataset = get_sample_data("dicom")
        dcmfolder = os.path.join(self.outdir, "dicom")
        if not os.path.isdir(dcmfolder):
            os.makedirs(dcmfolder)
        shutil.copy(dicom_dataset.barre, os.path.join(dcmfolder, "heart.dcm"))
        pipeline.source_dir = dcmfolder

        # View pipeline
        if 0:
            from capsul.qt_gui.widgets import PipelineDevelopperView
            from PySide import QtGui
            app = QtGui.QApplication(sys.argv)
            view1 = PipelineDevelopperView(pipeline)
            view1.show()
            app.exec_()

        # Execute the pipeline in the configured study
        study_config.run(pipeline)
def run_spm_preprocessing(funcfile,
                          outdir,
                          repetition_time,
                          ref_slice,
                          slice_order,
                          template,
                          timings_corr_algo,
                          normalization,
                          spm_bin,
                          fsl_config,
                          enable_display=False):
    """
    """
    print "Study_config init..."
    study_config = StudyConfig(
        modules=["MatlabConfig", "SPMConfig", "FSLConfig", "NipypeConfig"],
        use_smart_caching=False,
        fsl_config=fsl_config,
        use_fsl=True,
        use_matlab=False,
        use_spm=True,
        spm_exec=spm_bin,
        spm_standalone=True,
        use_nipype=True,
        output_directory=outdir,
    )
    print "    ... done."

    # Processing definition: create the <clinfmri.preproc.FmriPreproc> that
    # define the different step of the processings.
    pipeline = get_process_instance(
        "clinfmri.preproc.converted_fmri_preproc.xml")

    # It is possible to display the pipeline.
    if enable_display:
        import sys
        from PySide import QtGui
        from capsul.qt_gui.widgets import PipelineDevelopperView

        app = QtGui.QApplication(sys.argv)
        view = PipelineDevelopperView(pipeline)
        view.show()
        app.exec_()

    # Now to parametrize the pipeline pipeline.
    pipeline.fmri_file = funcfile
    pipeline.realign_register_to_mean = True
    pipeline.select_slicer = timings_corr_algo
    pipeline.select_normalization = normalization
    pipeline.force_repetition_time = repetition_time
    pipeline.force_slice_orders = slice_order
    pipeline.realign_wrap = [0, 1, 0]
    pipeline.realign_write_wrap = [0, 1, 0]
    pipeline.ref_slice = ref_slice
    if template is not None:
        pipeline.template_file = template

    # The pipeline is now ready to be executed.
    study_config.run(pipeline, executer_qc_nodes=False, verbose=1)
Ejemplo n.º 4
0
def pilot_gdti_estimation():
    """
    Generalized diffusion tensor estimation
    =======================================
    """
    # System import
    import os
    import sys
    import datetime
    import PySide.QtGui as QtGui

    # CAPSUL import
    from capsul.qt_gui.widgets import PipelineDevelopperView
    from capsul.study_config.study_config import StudyConfig
    from capsul.process.loader import get_process_instance
    """
    Study configuration
    -------------------

    We first define the working directory and guarantee this folder exists on
    the file system:
    """
    working_dir = "/volatile/nsap/clindmri/gdti"
    if not os.path.isdir(working_dir):
        os.makedirs(working_dir)
    """
    And then define the study configuration (here we activate the smart
    caching module that will be able to remember which process has already been
    processed):
    """
    study_config = StudyConfig(modules=["SmartCachingConfig"],
                               use_smart_caching=True,
                               output_directory=working_dir)

    # Create pipeline
    start_time = datetime.datetime.now()
    print "Start Pipeline Creation", start_time
    pipeline = get_process_instance("clindmri.estimation.gdti.xml")
    print "Done in {0} seconds.".format(datetime.datetime.now() - start_time)

    # View pipeline
    if 0:
        app = QtGui.QApplication(sys.argv)
        view1 = PipelineDevelopperView(pipeline)
        view1.show()
        app.exec_()
        del view1

    # Set pipeline input parameters
    pipeline.dfile = "/volatile/imagen/dmritest/000000022453/DTI/000000022453s011a1001.nii.gz"
    pipeline.bvalfile = "/volatile/imagen/dmritest/000000022453/DTI/000000022453s011a1001.bval"
    pipeline.bvecfile = "/volatile/imagen/dmritest/000000022453/DTI/000000022453s011a1001.bvec"
    pipeline.order = 2
    pipeline.odf = False
    print "Done in {0} seconds.".format(datetime.datetime.now() - start_time)

    # Execute the pipeline in the configured study
    study_config.run(pipeline, verbose=1)
Ejemplo n.º 5
0
class ActivationInspectorApp(Application):
    """ ActiovationInspector Application.
    """
    # Load some meta informations
    from capsul.info import __version__ as _version
    from capsul.info import NAME as _application_name
    from capsul.info import ORGANISATION as _organisation_name

    def __init__(self, pipeline_path, record_file=None, *args, **kwargs):
        """ Method to initialize the ActivationInspectorApp class.

        Parameters
        ----------
        pipeline_path: str (mandatory)
            the name of the pipeline we want to load.
        record_file: str (optional)
            a file where the pipeline activation steps are stored.
        """
        # Inhetritance
        super(ActivationInspectorApp, self).__init__(*args, **kwargs)

        # Load the pipeline
        self.pipeline = get_process_instance(pipeline_path)

        # Initialize the application
        self.record_file = record_file
        self.window = None
        self.init_window()

    def init_window(self):
        """ Method to initialize the main window.
        """
        # First set some meta informations
        self.setApplicationName(self._application_name)
        self.setOrganizationName(self._organisation_name)
        self.setApplicationVersion(self._version)

        # Get the user interface description from capsul resources
        ui_file = os.path.join(os.path.dirname(__file__),
                               "activation_inspector.ui")
        #ui_file = os.path.join(resources.__path__[0], "activation_inspector.ui")

        # Create and show the activation/pipeline/controller windows
        self.pipeline_window = PipelineDevelopperView(self.pipeline,
                                                      show_sub_pipelines=True)
        self.controller_window = ScrollControllerWidget(self.pipeline,
                                                        live=True)
        self.activation_window = ActivationInspector(
            self.pipeline,
            ui_file,
            self.record_file,
            developper_view=self.pipeline_window)
        self.pipeline_window.show()
        self.activation_window.show()
        self.controller_window.show()

        return True
Ejemplo n.º 6
0
class ActivationInspectorApp(Application):
    """ ActivationInspector Application.
    While developing a pipeline, nodes are connected through links. Nodes will be automatically activated or disabled depending on their connections (a mandatory link to a disabled node will disable the current one).
    You will often wonder why a node will not be activated. This tool helps to determine when and why by "playing" activation rules sequences step-by-step and displaying nodes which activate or deactivate at each step.
    """
    # Load some meta informations
    from capsul.info import __version__ as _version
    from capsul.info import NAME as _application_name
    from capsul.info import ORGANISATION as _organisation_name

    def __init__(self, pipeline_path, record_file=None, *args, **kwargs):
        """ Method to initialize the ActivationInspectorApp class.

        Parameters
        ----------
        pipeline_path: str (mandatory)
            the name of the pipeline we want to load.
        record_file: str (optional)
            a file where the pipeline activation steps are stored.
        """
        # Inhetritance
        super(ActivationInspectorApp, self).__init__(*args, **kwargs)

        # Load the pipeline
        self.pipeline = get_process_instance(pipeline_path)

        # Initialize the application
        self.record_file = record_file
        self.window = None
        self.init_window()

    def init_window(self):
        """ Method to initialize the main window.
        """
        # First set some meta informations
        self.setApplicationName(self._application_name)
        self.setOrganizationName(self._organisation_name)
        self.setApplicationVersion(self._version)

        # Get the user interface description from capsul resources
        ui_file = os.path.join(
            os.path.dirname(__file__), "activation_inspector.ui")
        #ui_file = os.path.join(resources.__path__[0], "activation_inspector.ui")

        # Create and show the activation/pipeline/controller windows
        self.pipeline_window = PipelineDevelopperView(self.pipeline, show_sub_pipelines=True)
        self.controller_window = ScrollControllerWidget(self.pipeline,live=True)
        self.activation_window = ActivationInspector(
            self.pipeline, ui_file, self.record_file,
            developper_view=self.pipeline_window)
        self.pipeline_window.show()
        self.activation_window.show()
        self.controller_window.show()

        return True
Ejemplo n.º 7
0
class ActivationInspectorApp(Application):
    """ ActiovationInspector Application.
    """
    # Load some meta informations
    from capsul.info import __version__ as _version
    from capsul.info import NAME as _application_name
    from capsul.info import ORGANISATION as _organisation_name

    def __init__(self, pipeline_path, record_file=None, *args, **kwargs):
        """ Method to initialize the ActivationInspectorApp class.

        Parameters
        ----------
        pipeline_path: str (mandatory)
            the name of the pipeline we want to load.
        record_file: str (optional)
            a file where the pipeline activation steps are stored.
        """
        # Inhetritance
        super(ActivationInspectorApp, self).__init__(*args, **kwargs)

        # Load the pipeline
        self.pipeline = get_process_instance(pipeline_path)

        # Initialize the application
        self.record_file = record_file
        self.window = None
        self.init_window()

    def init_window(self):
        """ Method to initialize the main window.
        """
        # First set some meta informations
        self.setApplicationName(self._application_name)
        self.setOrganizationName(self._organisation_name)
        self.setApplicationVersion(self._version)

        # Get the user interface description from capsul resources
        ui_file = os.path.join(
            os.path.dirname(__file__), "activation_inspector.ui")
        #ui_file = os.path.join(resources.__path__[0], "activation_inspector.ui")

        # Create and show the activation/pipeline/controller windows
        self.pipeline_window = PipelineDevelopperView(self.pipeline)
        self.controller_window = ScrollControllerWidget(self.pipeline,live=True)
        self.activation_window = ActivationInspector(
            self.pipeline, ui_file, self.record_file,
            developper_view=self.pipeline_window)
        self.pipeline_window.show()
        self.activation_window.show()
        self.controller_window.show()

        return True
Ejemplo n.º 8
0
    def run_pipeline_io(self, filename):
        pipeline = MyPipeline()
        from capsul.pipeline import pipeline_tools
        pipeline_tools.save_pipeline(pipeline, filename)
        pipeline2 = get_process_instance(filename)
        pipeline2.workflow_ordered_nodes()

        if self.debug:
            from soma.qt_gui.qt_backend import QtGui
            from capsul.qt_gui.widgets import PipelineDevelopperView
            import sys
            app = QtGui.QApplication.instance()
            if not app:
                app = QtGui.QApplication(sys.argv)
            view1 = PipelineDevelopperView(pipeline,
                                           allow_open_controller=True,
                                           enable_edition=True,
                                           show_sub_pipelines=True)

            view2 = PipelineDevelopperView(pipeline2,
                                           allow_open_controller=True,
                                           enable_edition=True,
                                           show_sub_pipelines=True)
            view1.show()
            view2.show()
            app.exec_()

        self.assertTrue(
            pipeline2.workflow_repr
            in ("constant->node1->node2", "node1->constant->node2"),
            '%s not in ("constant->node1->node2", "node1->constant->node2")' %
            pipeline2.workflow_repr)
        d1 = pipeline_tools.dump_pipeline_state_as_dict(pipeline)
        d2 = pipeline_tools.dump_pipeline_state_as_dict(pipeline2)
        self.assertEqual(d1, d2)
Ejemplo n.º 9
0
    def onChangeViewClicked(self):
        """ Event to switch between simple and full pipeline views.
        """
        # Check if a pipeline has been loaded
        if self._is_active_pipeline_valid():

            # Check the current display mode
            # Case PipelineDevelopperView
            if isinstance(self.ui.display.currentWidget(),
                          PipelineDevelopperView):

                # Switch to PipelineUserView display mode
                widget = PipelineUserView(self.pipeline)
                self._insert_widget_in_tab(widget)

            # Case PipelineUserView
            else:

                # Switch to PipelineDevelopperView display mode
                widget = PipelineDevelopperView(self.pipeline)
                self._insert_widget_in_tab(widget)

        # No pipeline loaded error
        else:
            logger.error("No active pipeline selected. "
                         "Have you forgotten to click the load pipeline "
                         "button?")
Ejemplo n.º 10
0
    def onLoadSubPipelineClicked(self, name, sub_pipeline, modifiers):
        """ Event to load and display a sub pipeline.
        """
        # Store the pipeline instance in class parameters
        self.pipeline = self.pipeline.nodes[name].process

        # Create the controller widget associated to the sub pipeline
        # controller: if the sub pipeline is a ProcessIteration, disable
        # the correspondind controller widget since this pipeline is generated
        # on the fly an is not directly synchronized with the rest of the
        # pipeline.
        is_iterative_pipeline = False
        if isinstance(self.pipeline, ProcessIteration):
            is_iterative_pipeline = True
        pipeline_widget = ScrollControllerWidget(
            self.pipeline,
            live=True,
            select_controls="inputs",
            disable_controller_widget=is_iterative_pipeline)
        self.ui.dockWidgetParameters.setWidget(pipeline_widget)

        # Store the sub pipeline instance
        self.pipelines[self.pipeline.name] = (self.pipeline, pipeline_widget)

        # Create the widget
        widget = PipelineDevelopperView(self.pipeline)
        self._insert_widget_in_tab(widget)

        # Connect the subpipeline clicked signal to the
        # onLoadSubPipelineClicked slot
        widget.subpipeline_clicked.connect(self.onLoadSubPipelineClicked)
Ejemplo n.º 11
0
    def onLoadClicked(self):
        """ Event to load and display a pipeline.
        """
        # Get the pipeline instance from its string description
        item = self.ui.menu_treectrl.currentItem()
        description_list = [
            str(x) for x in [item.text(1), item.text(0)] if x != ""
        ]
        process_description = ".".join(description_list)
        self.pipeline = get_process_instance(process_description)

        # Create the controller widget associated to the pipeline
        # controller
        pipeline_widget = ScrollControllerWidget(self.pipeline,
                                                 live=True,
                                                 select_controls="inputs")
        self.ui.dockWidgetParameters.setWidget(pipeline_widget)

        # Add observer to refresh the run button
        controller_widget = pipeline_widget.controller_widget
        for control_name, control \
                in six.iteritems(controller_widget._controls):

            # Unpack the control item
            trait, control_class, control_instance, control_label = control

            # Add the new callback
            control_class.add_callback(self.onRunStatus, control_instance)

        # Refresh manually the run button status the first time
        self.onRunStatus()

        # Store the pipeline documentation root path
        self.path_to_pipeline_doc[self.pipeline.id] = item.text(2)

        # Store the pipeline instance
        self.pipelines[self.pipeline.name] = (self.pipeline, pipeline_widget)

        # Create the widget
        widget = PipelineDevelopperView(self.pipeline)
        self._insert_widget_in_tab(widget)

        # Connect the subpipeline clicked signal to the
        # onLoadSubPipelineClicked slot
        widget.subpipeline_clicked.connect(self.onLoadSubPipelineClicked)
Ejemplo n.º 12
0
    def init_window(self):
        """ Method to initialize the main window.
        """
        # First set some meta informations
        self.setApplicationName(self._application_name)
        self.setOrganizationName(self._organisation_name)
        self.setApplicationVersion(self._version)

        # Get the user interface description from capsul resources
        ui_file = os.path.join(
            os.path.dirname(__file__), "activation_inspector.ui")
        #ui_file = os.path.join(resources.__path__[0], "activation_inspector.ui")

        # Create and show the activation/pipeline/controller windows
        self.pipeline_window = PipelineDevelopperView(self.pipeline)
        self.controller_window = ScrollControllerWidget(self.pipeline,live=True)
        self.activation_window = ActivationInspector(
            self.pipeline, ui_file, self.record_file,
            developper_view=self.pipeline_window)
        self.pipeline_window.show()
        self.activation_window.show()
        self.controller_window.show()

        return True
Ejemplo n.º 13
0
def pilot_bet(enable_display=False):
    """ 
    BET
    ===

    Brain extraction with FSL. 

    Start to import required modules:
    """
    import os
    from mmutils.toy_datasets import get_sample_data
    from capsul.study_config import StudyConfig
    from capsul.process import get_process_instance

    """
    Study configuration
    -------------------

    We first define the working directory and guarantee this folder exists on
    the file system:
    """
    working_dir = "/volatile/nsap/catalogue/pclinfmri/fsl_bet"
    if not os.path.isdir(working_dir):
        os.makedirs(working_dir)

    """
    And then define the study configuration:
    """
    study_config = StudyConfig(
        modules=["MatlabConfig", "SPMConfig", "FSLConfig", "NipypeConfig"],
        use_smart_caching=False,
        fsl_config="/etc/fsl/4.1/fsl.sh",
        use_fsl=True,
        output_directory=working_dir,
        number_of_cpus=1,
        generate_logging=True,
        use_scheduler=True)

    """
    Load the toy dataset
    --------------------

    To do so, we use the get_sample_data function to download the toy
    dataset on the local file system (here localizer data):
    """
    toy_dataset = get_sample_data("localizer")

    """
    The toy_dataset is an Enum structure with some specific elements of
    interest:

        * fmri: the functional volume.
        * anat: the structural volume.
        * TR: the repetition time.

    Processing definition
    ---------------------

    First create the
    :ref:`slice timing pipeline <clinfmri.preproc.FslBet>` that
    define the different step of the processings:
    """
    pipeline = get_process_instance("clinfmri.utils.fsl_bet.xml")
    print pipeline.get_input_spec()

    """
    It is possible to display the pipeline.
    """
    if enable_display:
        import sys
        from PySide import QtGui
        from capsul.qt_gui.widgets import PipelineDevelopperView

        app = QtGui.QApplication(sys.argv)
        view = PipelineDevelopperView(pipeline)
        view.show()
        app.exec_()

    """
    Now we need now to parametrize this pipeline:
    """
    pipeline.input_image_file = toy_dataset.anat
    pipeline.generate_binary_mask = True
    pipeline.bet_threshold = 0.5

    """
    The pipeline is now ready to be run:
    """
    study_config.run(pipeline, executer_qc_nodes=False, verbose=1)

    """
    Results
    -------

    Finally, we print the pipeline outputs:
    """
    print("\nOUTPUTS\n")
    for trait_name, trait_value in pipeline.get_outputs().items():
        print("{0}: {1}".format(trait_name, trait_value))
Ejemplo n.º 14
0
def pilot_new_segment(enable_display=False):
    """ 
    New Segment
    ===========

    Unifed SPM segmentation: segments, bias corrects and spatially normalises. 

    Start to import required modules:
    """
    import os
    from mmutils.toy_datasets import get_sample_data
    from capsul.study_config import StudyConfig
    from capsul.process import get_process_instance

    """
    Study configuration
    -------------------

    We first define the working directory and guarantee this folder exists on
    the file system:
    """
    working_dir = "/volatile/nsap/catalogue/pclinfmri/spm_newsegment"
    if not os.path.isdir(working_dir):
        os.makedirs(working_dir)

    """
    And then define the study configuration:
    """
    study_config = StudyConfig(
        modules=["MatlabConfig", "SPMConfig", "FSLConfig", "NipypeConfig"],
        use_smart_caching=False,
        matlab_exec="/neurospin/local/bin/matlab",
        use_matlab=True,
        spm_directory="/i2bm/local/spm8",
        use_spm=True,
        output_directory=working_dir,
        number_of_cpus=1,
        generate_logging=True,
        use_scheduler=True)

    """
    Load the toy dataset
    --------------------

    To do so, we use the get_sample_data function to download the toy
    dataset on the local file system (here localizer data):
    """
    toy_dataset = get_sample_data("localizer")
    template_dataset = get_sample_data("mni_1mm")

    """
    The toy_dataset is an Enum structure with some specific elements of
    interest:

        * fmri: the functional volume.
        * anat: the structural volume.
        * TR: the repetition time.

    Processing definition
    ---------------------

    First create the
    :ref:`slice timing pipeline <clinfmri.utils.SpmNewSegment>`
    that define the different step of the processings:
    """
    pipeline = get_process_instance("clinfmri.utils.spm_new_segment.xml")
    print pipeline.get_input_spec()

    """
    It is possible to display the pipeline.
    """
    if enable_display:
        import sys
        from PySide import QtGui
        from capsul.qt_gui.widgets import PipelineDevelopperView

        app = QtGui.QApplication(sys.argv)
        view = PipelineDevelopperView(pipeline)
        view.show()
        app.exec_()

    """
    Now we need now to parametrize this pipeline:
    """
    pipeline.channel_files = [toy_dataset.mean]
    pipeline.reference_volume = template_dataset.brain

    """
    The pipeline is now ready to be run:
    """
    study_config.run(pipeline, executer_qc_nodes=False, verbose=1)

    """
    Results
    -------

    Finally, we print the pipeline outputs:
    """
    print("\nOUTPUTS\n")
    for trait_name, trait_value in pipeline.get_outputs().items():
        print("{0}: {1}".format(trait_name, trait_value))
Ejemplo n.º 15
0
def pilot_qa_fmri():
    """
    Imports
    -------

    This code needs 'capsul' and 'mmutils' package in order to instanciate and
    execute the pipeline and to get a toy dataset.
    These packages are available in the 'neurospin' source list or in pypi.
    """
    # Capsul import
    from capsul.study_config.study_config import StudyConfig
    from capsul.process.loader import get_process_instance

    # Mmutils import
    from mmutils.toy_datasets import get_sample_data
    """
    Parameters
    ----------

    The 'pipeline_name' parameter contains the location of the pipeline XML
    description that will perform the DICOMs conversion, and the 'outdir' the
    location of the pipeline's results: in this case a temporary directory.
    """

    pipeline_name = "mmqa.fmri.fmri_quality_assurance_bbox.xml"
    outdir = tempfile.mkdtemp()
    """
    Capsul configuration
    --------------------

    A 'StudyConfig' has to be instantiated in order to execute the pipeline
    properly. It enables us to define the results directory through the
    'output_directory' attribute, the number of CPUs to be used through the
    'number_of_cpus' attributes, and to specify that we want a log of the
    processing step through the 'generate_logging'. The 'use_scheduler'
    must be set to True if more than 1 CPU is used.
    """
    study_config = StudyConfig(number_of_cpus=1,
                               generate_logging=True,
                               use_scheduler=True,
                               output_directory=outdir)
    """
    Get the toy dataset
    -------------------

    The toy dataset is composed of a functional image that is downloaded
    if it is necessary throught the 'get_sample_data' function and exported
    locally.
    """

    localizer_dataset = get_sample_data("localizer_extra")
    """
    Pipeline definition
    -------------------

    The pipeline XML description is first imported throught the
    'get_process_instance' method, and the resulting pipeline instance is
    parametrized: in this example we decided to set the date in the converted
    file name and we set two DICOM directories to be converted in Nifti
    format.
    """

    pipeline = get_process_instance(pipeline_name)
    pipeline.image_file = localizer_dataset.fmri
    pipeline.repetition_time = 2.0
    pipeline.exclude_volume = []
    pipeline.roi_size = 21
    pipeline.score_file = os.path.join(outdir, "scores.json")
    """
    Pipeline representation
    -----------------------

    By executing this block of code, a pipeline representation can be
    displayed. This representation is composed of boxes connected to each
    other.
    """
    if 0:
        from capsul.qt_gui.widgets import PipelineDevelopperView
        from PySide import QtGui
        app = QtGui.QApplication(sys.argv)
        view1 = PipelineDevelopperView(pipeline)
        view1.show()
        app.exec_()
    """
    Pipeline execution
    ------------------

    Finally the pipeline is eecuted in the defined 'study_config'.
    """
    study_config.run(pipeline)
    """
    Access the result
    -----------------

    Display the computed scores
    """

    scores_file = pipeline.scores_file

    with open(scores_file, "r") as _file:
        scores = json.load(_file)

    for key, value in scores.iteritems():
        print "{0} = {1}".format(key, value)
Ejemplo n.º 16
0
    if 0:
        def write_state():
            state_file_name = '/tmp/state.json'
            json.dump(pipeline.pipeline_state(), open(state_file_name,'w'))
            print 'Wrote', state_file_name

        import sys
        #from PySide import QtGui
        from soma.qt_gui import qt_backend
        qt_backend.set_qt_backend('PyQt4')
        from soma.qt_gui.qt_backend import QtGui
        from capsul.qt_gui.widgets import PipelineDevelopperView
        #from capsul.qt_gui.widgets import PipelineUserView
        from capsul.process import get_process_instance

        app = QtGui.QApplication(sys.argv)
        pipeline = get_process_instance(MainTestPipeline)
        pipeline.on_trait_change(write_state,'selection_changed')
        view1 = PipelineDevelopperView(pipeline, show_sub_pipelines=True, allow_open_controller=True)
        view1.add_embedded_subpipeline('switch_pipeline', scale=0.7)
        view1.add_embedded_subpipeline('way1_1', scale=0.4)
        view1.add_embedded_subpipeline('way2_1', scale=0.4)
        view1.show()
        #view2 = PipelineUserView(pipeline)
        #view2.show()
        app.exec_()
        del view1
        #del view2

Ejemplo n.º 17
0
    suite = unittest.TestLoader().loadTestsFromTestCase(TestPipeline)
    runtime = unittest.TextTestRunner(verbosity=2).run(suite)
    return runtime.wasSuccessful()


if __name__ == "__main__":
    test()

    if 1:
        from PySide import QtGui
        from capsul.qt_gui.widgets import PipelineDevelopperView

        app = QtGui.QApplication(sys.argv)
        pipeline = MyPipeline()
        pipeline.input_image = ["toto", "tutu", "titi"]
        pipeline.dynamic_parameter = [3, 1, 4]
        pipeline.other_input = 0
        pipeline2 = pipeline.nodes["iterative"].process
        pipeline2.scene_scale_factor = 0.5
        pipeline.node_position = {'inputs': (50.0, 50.0),
                                  'iterative': (267.0, 56.0),
                                  'outputs': (1124.0, 96.0)}

        view1 = PipelineDevelopperView(pipeline, show_sub_pipelines=True,
                                       allow_open_controller=True)
        view1.add_embedded_subpipeline('iterative')

        view1.show()
        app.exec_()
        del view1
Ejemplo n.º 18
0
def pilot_preproc_spm_fmri(enable_display=False):
    """
    FMRI preprocessings
    ===================

    Preprocessing with the SPM slice timing and a normalization to a given
    template.

    Start to import required modules:
    """
    import os
    from mmutils.toy_datasets import get_sample_data
    from capsul.study_config import StudyConfig
    from capsul.api import get_process_instance

    """
    Study configuration
    -------------------

    We first define the working directory and guarantee this folder exists on
    the file system:
    """
    working_dir = "/volatile/nsap/catalogue/pclinfmri/fmri_preproc_spm_fmri"
    if not os.path.isdir(working_dir):
        os.makedirs(working_dir)

    """
    Then define the study configuration:
    """
    study_config = StudyConfig(
        modules=["MatlabConfig", "SPMConfig", "FSLConfig", "NipypeConfig"],
        use_smart_caching=False,
        fsl_config="/etc/fsl/4.1/fsl.sh",
        use_fsl=True,
        matlab_exec="/neurospin/local/bin/matlab",
        use_matlab=True,
        spm_directory="/i2bm/local/spm8",
        use_spm=True,
        output_directory=working_dir,
        number_of_cpus=1,
        generate_logging=True,
        use_scheduler=True,)

    """
    Load the toy dataset
    --------------------

    To do so, we use the get_sample_data function to download the toy
    dataset on the local file system (here localizer data):
    """
    toy_dataset = get_sample_data("localizer")
    template_dataset = get_sample_data("mni_1mm")

    """
    The toy_dataset is an Enum structure with some specific elements of
    interest:

        * fmri: the functional volume.
        * anat: the structural volume.
        * TR: the repetition time.

    Processing definition
    ---------------------

    First create the
    :ref:`slice timing pipeline <clinfmri.preproc.FmriPreproc>` that
    define the different step of the processings:
    """
    pipeline = get_process_instance("clinfmri.preproc.converted_fmri_preproc")
    print pipeline.get_input_spec()

    """
    Now we need now to parametrize this pipeline:
    """
    pipeline.fmri_file = toy_dataset.fmri
    pipeline.structural_file = toy_dataset.anat
    pipeline.realign_register_to_mean = True
    pipeline.select_slicer = "spm"
    pipeline.select_normalization = "fmri"
    pipeline.template_file = template_dataset.brain
    pipeline.force_repetition_time = toy_dataset.TR
    pipeline.force_slice_orders = [index + 1 for index in range(40)]

    """
    It is possible to display the pipeline.
    """
    if enable_display:
        import sys
        from PySide import QtGui
        from capsul.qt_gui.widgets import PipelineDevelopperView

        app = QtGui.QApplication(sys.argv)
        view = PipelineDevelopperView(pipeline)
        view.show()
        app.exec_()

    """
    The pipeline is now ready to be run:
    """
    study_config.run(pipeline, executer_qc_nodes=False, verbose=1)

    """
    Results
    -------

    Finally, we print the pipeline outputs:
    """
    print("\nOUTPUTS\n")
    for trait_name, trait_value in pipeline.get_outputs().items():
        print("{0}: {1}".format(trait_name, trait_value))
Ejemplo n.º 19
0
if __name__ == "__main__":
    if '-d' in sys.argv[1:] or '--debug' in sys.argv[1:]:
        debug = True

    test()

    if '-v' in sys.argv[1:] or '--verbose' in sys.argv[1:]:
        from soma.qt_gui.qt_backend import QtGui
        from capsul.qt_gui.widgets import PipelineDevelopperView

        app = QtGui.QApplication.instance()
        if not app:
            app = QtGui.QApplication(sys.argv)
        pipeline = MySmallPipeline()
        pipeline.files_to_create = ["toto", "tutu", "titi"]
        pipeline.output_image = ['toto_out', 'tutu_out', 'tata_out']
        pipeline.dynamic_parameter = [3, 1, 4]
        pipeline.other_output = [0, 0, 0]
        pipeline.other_input = 0
        pipeline2 = pipeline.nodes["iterative"].process
        pipeline2.scene_scale_factor = 0.5

        view1 = PipelineDevelopperView(pipeline, show_sub_pipelines=True,
                                       allow_open_controller=True)
        view1.add_embedded_subpipeline('iterative')
        view1.auto_dot_node_positions()

        view1.show()
        app.exec_()
        del view1
Ejemplo n.º 20
0
def pilot_fsl_preproc():
    """
    FSL preprocessings
    ==================
    """
    # System import
    import os
    import sys
    import datetime
    import PySide.QtGui as QtGui

    # CAPSUL import
    from capsul.qt_gui.widgets import PipelineDevelopperView
    from capsul.study_config.study_config import StudyConfig
    from capsul.process.loader import get_process_instance

    """
    Study configuration
    -------------------

    We first define the working directory and guarantee this folder exists on
    the file system:
    """
    working_dir = "/volatile/nsap/clindmri/fslpreproc"
    if not os.path.isdir(working_dir):
        os.makedirs(working_dir)

    """
    And then define the study configuration (here we activate the smart
    caching module that will be able to remember which process has already been
    processed):
    """
    study_config = StudyConfig(
        modules=["SmartCachingConfig", "FSLConfig", "MatlabConfig",
                 "SPMConfig", "NipypeConfig"],
        use_smart_caching=True,
        fsl_config="/etc/fsl/4.1/fsl.sh",
        use_fsl=True,        
        output_directory=working_dir)

    # Create pipeline
    start_time = datetime.datetime.now()
    print "Start Pipeline Creation", start_time
    pipeline = get_process_instance("clindmri.preproc.fsl_preproc.xml")
    print "Done in {0} seconds.".format(datetime.datetime.now() - start_time)

    # View pipeline
    if 0:
        app = QtGui.QApplication(sys.argv)
        view1 = PipelineDevelopperView(pipeline)
        view1.show()
        app.exec_()
        del view1

    # Set pipeline input parameters
    pipeline.dfile = "/volatile/imagen/dmritest/000000022453/DTI/000000022453s011a1001.nii.gz"
    pipeline.bvalfile = "/volatile/imagen/dmritest/000000022453/DTI/000000022453s011a1001.bval"
    pipeline.bvecfile = "/volatile/imagen/dmritest/000000022453/DTI/000000022453s011a1001.bvec"
    print "Done in {0} seconds.".format(datetime.datetime.now() - start_time)

    #print pipeline.nodes["eddy"].process._nipype_interface.inputs
    print pipeline.nodes["eddy"].process._nipype_interface.cmdline

    # Execute the pipeline in the configured study
    study_config.run(pipeline, verbose=1)
Ejemplo n.º 21
0
        self.assertFalse(self.pipeline.nodes["way22"].activated)
        self.pipeline.workflow_ordered_nodes()
        self.assertEqual(self.pipeline.workflow_repr, "way11->way12")


def test():
    """ Function to execute unitest
    """
    suite = unittest.TestLoader().loadTestsFromTestCase(TestPipeline)
    runtime = unittest.TextTestRunner(verbosity=2).run(suite)
    return runtime.wasSuccessful()


if __name__ == "__main__":
    print("RETURNCODE: ", test())

    if 1:
        import sys
        from soma.qt_gui.qt_backend import QtGui
        from capsul.qt_gui.widgets import PipelineDevelopperView

        app = QtGui.QApplication.instance()
        if not app:
            app = QtGui.QApplication(sys.argv)
        pipeline = MyPipeline()
        setattr(pipeline.nodes_activation, "way11", False)
        view1 = PipelineDevelopperView(pipeline)
        view1.show()
        app.exec_()
        del view1
Ejemplo n.º 22
0
    if len(sys.argv) > 1 and '-v' in sys.argv[1:] \
            or '--verbose' in sys.argv[1:]:
        import sys
        from soma.qt_gui import qt_backend
        qt_backend.set_qt_backend('PyQt4')
        from soma.qt_gui.qt_backend import QtGui
        from capsul.qt_gui.widgets import PipelineDevelopperView

        #app = QtGui.QApplication(sys.argv)
        #pipeline = MyCompositePipeline()
        #view1 = PipelineDevelopperView(pipeline, show_sub_pipelines=True)
        #view1.show()
        #app.exec_()
        #del view1

        from capsul.qt_gui.widgets import PipelineUserView
        if QtGui.QApplication.instance() is not None:
            has_qapp = True
            app = QtGui.QApplication.instance()
        else:
            has_qapp = False
            app = QtGui.QApplication(sys.argv)
        pipeline = MyCompositePipeline()
        view1 = PipelineDevelopperView(pipeline, show_sub_pipelines=True)
        view1.show()
        view2 = PipelineUserView(pipeline)
        view2.show()
        if not has_qapp:
            app.exec_()
            del view1
Ejemplo n.º 23
0
def pilot_bet(enable_display=False):
    """
    Brain extractio Tool
    ====================
    """
    import os
    from mmutils.toy_datasets import get_sample_data
    from capsul.study_config import StudyConfig
    from capsul.api import get_process_instance

    working_dir = "/volatile/nsap/catalogue/pclinfmri/fmri_bet"
    if not os.path.isdir(working_dir):
        os.makedirs(working_dir)

    """
    Then define the study configuration:
    """
    study_config = StudyConfig(
        modules=["MatlabConfig", "SPMConfig", "FSLConfig", "NipypeConfig"],
        use_smart_caching=False,
        fsl_config="/etc/fsl/4.1/fsl.sh",
        use_fsl=True,
        matlab_exec="/neurospin/local/bin/matlab",
        use_matlab=False,
        spm_directory="/i2bm/local/spm8",
        use_spm=False,
        output_directory=working_dir)

    """
    Load the toy dataset
    --------------------

    To do so, we use the get_sample_data function to download the toy
    dataset on the local file system (here localizer data):
    """
    template_dataset = get_sample_data("mni_1mm")

    """
    Processing definition
    ---------------------
    """
    pipeline = get_process_instance("clinfmri.utils.converted_fsl_bet")
    print pipeline.get_input_spec()

    """
    Now we need now to parametrize this pipeline:
    """
    pipeline.input_image_file = template_dataset.brain
    pipeline.generate_binary_mask = True

    """
    It is possible to display the pipeline.
    """
    if enable_display:
        import sys
        from PySide import QtGui
        from capsul.qt_gui.widgets import PipelineDevelopperView

        app = QtGui.QApplication(sys.argv)
        view = PipelineDevelopperView(pipeline)
        view.show()
        app.exec_()

    """
    The pipeline is now ready to be run:
    """
    study_config.run(pipeline, executer_qc_nodes=False, verbose=1)

    """
    Results
    -------

    Finally, we print the pipeline outputs:
    """
    print("\nOUTPUTS\n")
    for trait_name, trait_value in pipeline.get_outputs().items():
        print("{0}: {1}".format(trait_name, trait_value))
Ejemplo n.º 24
0
def pilot_dcm2nii():
    """
    Imports
    -------

    This code needs 'capsul' and 'mmutils' package in order to instanciate and
    execute the pipeline and to get a toy dataset.
    These packages are available in the 'neurospin' source list or in pypi.
    """
    import os
    import sys
    import shutil
    import tempfile
    from capsul.study_config.study_config import StudyConfig
    from capsul.process.loader import get_process_instance
    from mmutils.toy_datasets import get_sample_data
    """
    Parameters
    ----------

    The 'pipeline_name' parameter contains the location of the pipeline XML
    description that will perform the DICOMs conversion, and the 'outdir' the
    location of the pipeline's results: in this case a temporary directory.
    """
    pipeline_name = "dcmio.dcmconverter.dcm_to_nii.xml"
    outdir = tempfile.mkdtemp()
    """
    Capsul configuration
    --------------------

    A 'StudyConfig' has to be instantiated in order to execute the pipeline
    properly. It enables us to define the results directory through the
    'output_directory' attribute, the number of CPUs to be used through the
    'number_of_cpus' attributes, and to specify that we want a log of the
    processing step through the 'generate_logging'. The 'use_scheduler'
    must be set to True if more than 1 CPU is used.
    """
    study_config = StudyConfig(modules=[],
                               output_directory=outdir,
                               number_of_cpus=1,
                               generate_logging=True,
                               use_scheduler=True)
    """
    Get the toy dataset
    -------------------

    The toy dataset is composed of a 3D heart dicom image that is downloaded
    if it is necessary throught the 'get_sample_data' function and exported
    locally in a 'heart.dcm' file.
    """
    dicom_dataset = get_sample_data("dicom")
    dcmfolder = os.path.join(outdir, "dicom")
    if not os.path.isdir(dcmfolder):
        os.makedirs(dcmfolder)
    shutil.copy(dicom_dataset.barre, os.path.join(dcmfolder, "heart.dcm"))
    """
    Pipeline definition
    -------------------

    The pipeline XML description is first imported throught the
    'get_process_instance' method, and the resulting pipeline instance is
    parametrized: in this example we decided to set the date in the converted
    file name and we set two DICOM directories to be converted in Nifti
    format.
    """
    pipeline = get_process_instance(pipeline_name)
    pipeline.date_in_filename = True
    pipeline.dicom_directories = [dcmfolder, dcmfolder]
    pipeline.additional_informations = [[("Provided by", "Neurospin@2015")],
                                        [("Provided by", "Neurospin@2015"),
                                         ("TR", "1500")]]

    pipeline.dcm_tags = [("TR", [("0x0018", "0x0080")]),
                         ("TE", [("0x0018", "0x0081")])]
    """
    Pipeline representation
    -----------------------

    By executing this block of code, a pipeline representation can be
    displayed. This representation is composed of boxes connected to each
    other.
    """
    if 0:
        from capsul.qt_gui.widgets import PipelineDevelopperView
        from PySide import QtGui
        app = QtGui.QApplication(sys.argv)
        view1 = PipelineDevelopperView(pipeline)
        view1.show()
        app.exec_()
    """
    Pipeline execution
    ------------------

    Finally the pipeline is eecuted in the defined 'study_config'.
    """
    study_config.run(pipeline)
    """
    Access the result
    -----------------

    The 'nibabel' package is used to load the generated images. We display the
    numpy array shape and the stored repetiton and echo times: in order
    to load the 'descrip' image field we use the 'json' package.
    """
    import json
    import copy
    import nibabel

    generated_images = pipeline.filled_converted_files

    for fnames in generated_images:
        print(">>>", fnames, "...")
        im = nibabel.load(fnames[0])
        print("shape=", im.get_data().shape)
        header = im.get_header()
        a = str(header["descrip"])
        a = a.strip()
        description = json.loads(copy.deepcopy(a))
        print("TE=", description["TE"])
        print("TR=", description["TR"])
        print("Provided by=", description["Provided by"])
Ejemplo n.º 25
0
def pilot_gdti_estimation():
    """
    Generalized diffusion tensor estimation
    =======================================
    """
    # System import
    import os
    import sys
    import datetime
    import PySide.QtGui as QtGui

    # CAPSUL import
    from capsul.qt_gui.widgets import PipelineDevelopperView
    from capsul.study_config.study_config import StudyConfig
    from capsul.process.loader import get_process_instance

    """
    Study configuration
    -------------------

    We first define the working directory and guarantee this folder exists on
    the file system:
    """
    working_dir = "/volatile/nsap/clindmri/gdti"
    if not os.path.isdir(working_dir):
        os.makedirs(working_dir)

    """
    And then define the study configuration (here we activate the smart
    caching module that will be able to remember which process has already been
    processed):
    """
    study_config = StudyConfig(
        modules=["SmartCachingConfig"],
        use_smart_caching=True,   
        output_directory=working_dir)

    # Create pipeline
    start_time = datetime.datetime.now()
    print "Start Pipeline Creation", start_time
    pipeline = get_process_instance("clindmri.estimation.gdti.xml")
    print "Done in {0} seconds.".format(datetime.datetime.now() - start_time)

    # View pipeline
    if 0:
        app = QtGui.QApplication(sys.argv)
        view1 = PipelineDevelopperView(pipeline)
        view1.show()
        app.exec_()
        del view1

    # Set pipeline input parameters
    pipeline.dfile = "/volatile/imagen/dmritest/000000022453/DTI/000000022453s011a1001.nii.gz"
    pipeline.bvalfile = "/volatile/imagen/dmritest/000000022453/DTI/000000022453s011a1001.bval"
    pipeline.bvecfile = "/volatile/imagen/dmritest/000000022453/DTI/000000022453s011a1001.bvec"
    pipeline.order = 2
    pipeline.odf = False
    print "Done in {0} seconds.".format(datetime.datetime.now() - start_time)

    # Execute the pipeline in the configured study
    study_config.run(pipeline, verbose=1)
Ejemplo n.º 26
0
def pilot_qa_fmri():
    """
    Imports
    -------

    This code needs 'capsul' and 'mmutils' package in order to instanciate and
    execute the pipeline and to get a toy dataset.
    These packages are available in the 'neurospin' source list or in pypi.
    """
    # Capsul import
    from capsul.study_config.study_config import StudyConfig
    from capsul.process.loader import get_process_instance

    # Mmutils import
    from mmutils.toy_datasets import get_sample_data

    """
    Parameters
    ----------

    The 'pipeline_name' parameter contains the location of the pipeline XML
    description that will perform the DICOMs conversion, and the 'outdir' the
    location of the pipeline's results: in this case a temporary directory.
    """

    pipeline_name = "mmqa.fmri.fmri_quality_assurance_bbox.xml"
    outdir = tempfile.mkdtemp()

    """
    Capsul configuration
    --------------------

    A 'StudyConfig' has to be instantiated in order to execute the pipeline
    properly. It enables us to define the results directory through the
    'output_directory' attribute, the number of CPUs to be used through the
    'number_of_cpus' attributes, and to specify that we want a log of the
    processing step through the 'generate_logging'. The 'use_scheduler'
    must be set to True if more than 1 CPU is used.
    """
    study_config = StudyConfig(
        number_of_cpus=1,
        generate_logging=True,
        use_scheduler=True,
        output_directory=outdir)
    """
    Get the toy dataset
    -------------------

    The toy dataset is composed of a functional image that is downloaded
    if it is necessary throught the 'get_sample_data' function and exported
    locally.
    """

    localizer_dataset = get_sample_data("localizer_extra")

    """
    Pipeline definition
    -------------------

    The pipeline XML description is first imported throught the
    'get_process_instance' method, and the resulting pipeline instance is
    parametrized: in this example we decided to set the date in the converted
    file name and we set two DICOM directories to be converted in Nifti
    format.
    """

    pipeline = get_process_instance(pipeline_name)
    pipeline.image_file = localizer_dataset.fmri
    pipeline.repetition_time = 2.0
    pipeline.exclude_volume = []
    pipeline.roi_size = 21
    pipeline.score_file = os.path.join(outdir, "scores.json")

    """
    Pipeline representation
    -----------------------

    By executing this block of code, a pipeline representation can be
    displayed. This representation is composed of boxes connected to each
    other.
    """
    if 0:
        from capsul.qt_gui.widgets import PipelineDevelopperView
        from PySide import QtGui
        app = QtGui.QApplication(sys.argv)
        view1 = PipelineDevelopperView(pipeline)
        view1.show()
        app.exec_()

    """
    Pipeline execution
    ------------------

    Finally the pipeline is eecuted in the defined 'study_config'.
    """
    study_config.run(pipeline)

    """
    Access the result
    -----------------

    Display the computed scores
    """

    scores_file = pipeline.scores_file

    with open(scores_file, "r") as _file:
        scores = json.load(_file)

    for key, value in scores.iteritems():
        print "{0} = {1}".format(key, value)
Ejemplo n.º 27
0
        app = QtGui.QApplication.instance()
        if not app:
            app = QtGui.QApplication(sys.argv)
        #pipeline = Pipeline1()
        #pipeline.main_inputs = ['/dir/file%d' % i for i in range(4)]
        #pipeline.left_out = pipeline.main_inputs[2]
        #pipeline.subject = 'subject2'
        #pipeline.output_directory = '/dir/out_dir'
        #view1 = PipelineDevelopperView(pipeline, allow_open_controller=True,
        #show_sub_pipelines=True,
        #enable_edition=True)
        #view1.show()

        pipeline2 = PipelineLOO()
        pipeline2.main_inputs = ['/dir/file%d' % i for i in range(4)]
        pipeline2.left_out = pipeline2.main_inputs[2]
        pipeline2.subjects = ['subject%d' % i for i in range(4)]
        pipeline2.output_directory = '/dir/out_dir'
        wf = pipeline_workflow.workflow_from_pipeline(pipeline2,
                                                      create_directories=False)
        view2 = PipelineDevelopperView(pipeline2,
                                       allow_open_controller=True,
                                       show_sub_pipelines=True,
                                       enable_edition=True)
        view2.show()

        app.exec_()
        #del view1
        del view2
Ejemplo n.º 28
0
def pilot_dcm2nii():
    """
    Imports
    -------

    This code needs 'capsul' and 'mmutils' package in order to instanciate and
    execute the pipeline and to get a toy dataset.
    These packages are available in the 'neurospin' source list or in pypi.
    """
    import os
    import sys
    import shutil
    import tempfile
    from capsul.study_config.study_config import StudyConfig
    from capsul.process.loader import get_process_instance
    from mmutils.toy_datasets import get_sample_data

    """
    Parameters
    ----------

    The 'pipeline_name' parameter contains the location of the pipeline XML
    description that will perform the DICOMs conversion, and the 'outdir' the
    location of the pipeline's results: in this case a temporary directory.
    """
    pipeline_name = "dcmio.dcmconverter.dcm_to_nii.xml"
    outdir = tempfile.mkdtemp()

    """
    Capsul configuration
    --------------------

    A 'StudyConfig' has to be instantiated in order to execute the pipeline
    properly. It enables us to define the results directory through the
    'output_directory' attribute, the number of CPUs to be used through the
    'number_of_cpus' attributes, and to specify that we want a log of the
    processing step through the 'generate_logging'. The 'use_scheduler'
    must be set to True if more than 1 CPU is used.
    """
    study_config = StudyConfig(
        modules=[],
        output_directory=outdir,
        number_of_cpus=1,
        generate_logging=True,
        use_scheduler=True)

    """
    Get the toy dataset
    -------------------

    The toy dataset is composed of a 3D heart dicom image that is downloaded
    if it is necessary throught the 'get_sample_data' function and exported
    locally in a 'heart.dcm' file.
    """
    dicom_dataset = get_sample_data("dicom")
    dcmfolder = os.path.join(outdir, "dicom")
    if not os.path.isdir(dcmfolder):
        os.makedirs(dcmfolder)
    shutil.copy(dicom_dataset.barre, os.path.join(dcmfolder, "heart.dcm"))

    """
    Pipeline definition
    -------------------

    The pipeline XML description is first imported throught the
    'get_process_instance' method, and the resulting pipeline instance is
    parametrized: in this example we decided to set the date in the converted
    file name and we set two DICOM directories to be converted in Nifti
    format.
    """
    pipeline = get_process_instance(pipeline_name)
    pipeline.date_in_filename = True
    pipeline.dicom_directories = [dcmfolder, dcmfolder]
    pipeline.additional_informations = [[("Provided by", "Neurospin@2015")],
                                        [("Provided by", "Neurospin@2015"),
                                         ("TR", "1500")]]

    pipeline.dcm_tags = [("TR", [("0x0018", "0x0080")]),
                         ("TE", [("0x0018", "0x0081")])]

    """
    Pipeline representation
    -----------------------

    By executing this block of code, a pipeline representation can be
    displayed. This representation is composed of boxes connected to each
    other.
    """
    if 0:
        from capsul.qt_gui.widgets import PipelineDevelopperView
        from PySide import QtGui
        app = QtGui.QApplication(sys.argv)
        view1 = PipelineDevelopperView(pipeline)
        view1.show()
        app.exec_()

    """
    Pipeline execution
    ------------------

    Finally the pipeline is eecuted in the defined 'study_config'.
    """
    study_config.run(pipeline)

    """
    Access the result
    -----------------

    The 'nibabel' package is used to load the generated images. We display the
    numpy array shape and the stored repetiton and echo times: in order
    to load the 'descrip' image field we use the 'json' package.
    """
    import json
    import copy
    import nibabel

    generated_images = pipeline.filled_converted_files

    for fnames in generated_images:
        print(">>>", fnames, "...")
        im = nibabel.load(fnames[0])
        print("shape=", im.get_data().shape)
        header = im.get_header()
        a = str(header["descrip"])
        a = a.strip()
        description = json.loads(copy.deepcopy(a))
        print("TE=", description["TE"])
        print("TR=", description["TR"])
        print("Provided by=", description["Provided by"])
Ejemplo n.º 29
0
    suite = unittest.TestLoader().loadTestsFromTestCase(TestTemporary)
    runtime = unittest.TextTestRunner(verbosity=2).run(suite)
    return runtime.wasSuccessful()


if __name__ == "__main__":
    verbose = False
    if len(sys.argv) >= 2 and sys.argv[1] in ('-v', '--verbose'):
        verbose = True

    print("RETURNCODE: ", test())

    if verbose:
        import sys
        from soma.qt_gui import qt_backend
        qt_backend.set_qt_backend('PyQt4')
        from soma.qt_gui.qt_backend import QtGui
        from capsul.qt_gui.widgets import PipelineDevelopperView

        app = QtGui.QApplication(sys.argv)
        pipeline = DummyPipeline()
        pipeline.input = '/tmp/file_in.nii'
        pipeline.output = '/tmp/file_out3.nii'
        pipeline.nb_outputs = 3
        view1 = PipelineDevelopperView(pipeline,
                                       show_sub_pipelines=True,
                                       allow_open_controller=True)
        view1.show()
        app.exec_()
        del view1
Ejemplo n.º 30
0
"""
Processing definition: create the <clinfmri.preproc.FmriPreproc> that
define the different step of the processings.
"""
pipeline = get_process_instance("clinfmri.preproc.fmri_preproc.xml")

"""
It is possible to display the pipeline.
"""
if args.display:
    import sys
    from PySide import QtGui
    from capsul.qt_gui.widgets import PipelineDevelopperView

    app = QtGui.QApplication(sys.argv)
    view = PipelineDevelopperView(pipeline)
    view.show()
    app.exec_()

"""
Now to parametrize the pipeline pipeline.
"""
pipeline.fmri_file = funcfile
pipeline.realign_register_to_mean = True
pipeline.select_slicer = args.timings
pipeline.select_normalization = args.normalization
pipeline.force_repetition_time = args.repetition_time
pipeline.force_slice_orders = args.slice_order
pipeline.realign_wrap = [0, 1, 0]
pipeline.realign_write_wrap = [0, 1, 0]
pipeline.ref_slice = args.ref_slice
Ejemplo n.º 31
0
            except: pass
            try:
                os.unlink(output_name)
            except: pass


def test():
    """ Function to execute unitest
    """
    suite = unittest.TestLoader().loadTestsFromTestCase(TestPipelineWithTemp)
    runtime = unittest.TextTestRunner(verbosity=2).run(suite)
    return runtime.wasSuccessful()


if __name__ == "__main__":
    print "RETURNCODE: ", test()

    if 1:
        import sys
        from soma.qt_gui.qt_backend import QtGui
        from capsul.qt_gui.widgets import PipelineDevelopperView

        app = QtGui.QApplication(sys.argv)
        pipeline = MyPipeline()
        pipeline.input_image = '/data/file.txt'
        pipeline.output_image = '/data/output_file.txt'
        view1 = PipelineDevelopperView(pipeline)
        view1.show()
        app.exec_()
        del view1
Ejemplo n.º 32
0
    if 0:
        def write_state():
            state_file_name = '/tmp/state.json'
            json.dump(pipeline.pipeline_state(), open(state_file_name,'w'))
            print('Wrote', state_file_name)

        import sys
        #from PySide import QtGui
        from soma.qt_gui import qt_backend
        qt_backend.set_qt_backend('PyQt4')
        from soma.qt_gui.qt_backend import QtGui
        from capsul.qt_gui.widgets import PipelineDevelopperView
        #from capsul.qt_gui.widgets import PipelineUserView
        from capsul.process import get_process_instance

        app = QtGui.QApplication(sys.argv)
        pipeline = get_process_instance(MainTestPipeline)
        pipeline.on_trait_change(write_state,'selection_changed')
        view1 = PipelineDevelopperView(pipeline, show_sub_pipelines=True, allow_open_controller=True)
        view1.add_embedded_subpipeline('switch_pipeline', scale=0.7)
        view1.add_embedded_subpipeline('way1_1', scale=0.4)
        view1.add_embedded_subpipeline('way2_1', scale=0.4)
        view1.show()
        #view2 = PipelineUserView(pipeline)
        #view2.show()
        app.exec_()
        del view1
        #del view2