def test_process_adhoc_completion(self):
     study_config = self.study_config
     threshold = get_process_instance(
         'bv_capsul_ex.ex_processes.ThresholdProcess', study_config)
     athreshold = ProcessCompletionEngine.get_completion_engine(
         threshold, 'threshold')
     self.assertTrue(athreshold is not None)
     attrib = {
         'center': 'alpha_centauri',
         'subject': 'r2d2',
         'analysis': 'M0',
     }
     pinputs = {
         'capsul_attributes': attrib,
         'threshold': 0.43,
     }
     athreshold.complete_parameters(process_inputs=pinputs)
     self.assertEqual(athreshold.__class__.__name__,
                      'ThresholdProcessAdhocCompletion')
     self.assertEqual(threshold.array_file,
                      os.path.join(study_config.input_directory,
                                   'alpha_centauri_r2d2.npy'))
     self.assertEqual(threshold.threshold, 0.43)
     self.assertEqual(
         threshold.mask_inf,
         os.path.join(study_config.output_directory,
                      'alpha_centauri_r2d2_M0_thresholded_inf.npy'))
     self.assertEqual(
         threshold.mask_sup,
         os.path.join(study_config.output_directory,
                      'alpha_centauri_r2d2_M0_thresholded_sup.npy'))
     mask =  get_process_instance('bv_capsul_ex.ex_processes.Mask',
                                  study_config)
     amask = ProcessCompletionEngine.get_completion_engine(mask, 'mask')
     self.assertTrue(amask is not None)
     attrib = {
         'center': 'alpha_centauri',
         'subject': 'r2d2',
         'analysis': 'M0',
     }
     pinputs = {
         'capsul_attributes': attrib,
         'input': os.path.join(study_config.output_directory,
                               'input_data_thresholded_inf.npy'),
     }
     amask.complete_parameters(process_inputs=pinputs)
     self.assertEqual(mask.input, pinputs['input'])
     self.assertEqual(mask.mask,
                      os.path.join(study_config.shared_directory,
                                   'template_masks/mask.npy'))
     self.assertEqual(mask.output,
                      os.path.join(study_config.output_directory,
                                   'input_data_thresholded_inf_masked.npy'))
 def test_pipeline_warpping(self):
     """ Method to test the xml description to pipeline on the fly warpping.
     """
     pipeline = get_process_instance("capsul.process.test.xml_pipeline")
     self.assertTrue(isinstance(pipeline, Pipeline))
     for node_name in ["", "p1", "p2"]:
         self.assertTrue(node_name in pipeline.nodes)
Пример #3
0
 def test_return_list(self):
     process = get_process_instance(
         "capsul.process.test.test_load_from_description.join")
     process(value1="a", value2="b", value3="c")
     self.assertEqual(process.values, ["a", "b", "c"])
     process(value1="", value2="v", value3="")
     self.assertEqual(process.values, ["v"])
Пример #4
0
 def test_return_string(self):
     process = get_process_instance(
         "capsul.process.test.test_load_from_description.cat")
     process(value1="a", value2="b", value3="c")
     self.assertEqual(process.values, "a_b_c")
     process(value1="", value2="v", value3="")
     self.assertEqual(process.values, "v")
Пример #5
0
 def build_pipeline(self):
     pipeline = get_process_instance(
         'capsul.pipeline.test.test_pipeline.MyPipeline', self.study)
     pipeline.add_pipeline_step('step1', ['constant'])
     pipeline.add_pipeline_step('step2', ['node1'])
     pipeline.add_pipeline_step('step3', ['node2'])
     return pipeline
Пример #6
0
    def set_analysis_parameters(self):
        subjectname = "sujet02"
        groupname = "group1"
        
        tests_dir = os.environ.get('BRAINVISA_TEST_RUN_DATA_DIR')
        if not tests_dir:
            raise RuntimeError('BRAINVISA_TEST_RUN_DATA_DIR is not set')
        tests_dir = os.path.join(tests_dir, 'tmp_tests_brainvisa')

        filename = os.path.join(tests_dir, 'data_unprocessed', subjectname,
                                'anatomy', subjectname + ".ima")
         
        subject = Subject(subjectname, groupname, filename)
        self.analysis.set_parameters(subject=subject) 

        from capsul.api import get_process_instance
        import_step = get_process_instance(
            'morphologist.capsul.import_t1_mri.ImportT1Mri')

        import_step.input = subject.filename
        import_step.output \
            = self.analysis.pipeline.t1mri
        import_step.referential = self.analysis.pipeline.t1mri_referential
        pipeline_tools.create_output_directories(import_step)

        self.analysis.clear_results() 
    def setup_pipeline(self):
        input_dirs = glob.glob(os.path.join(
            self.input, 'database/random_matrix/lasagna/*'))
        self.groups = [os.path.basename(os.path.dirname(x))
                       for x in input_dirs]
        self.subjects = [os.path.basename(x) for x in input_dirs]
        self.input_files = [os.path.join(p, x + '.npy')
                            for p, x in zip(input_dirs, self.subjects)]
        self.pipeline = ex_processes.AveragePipeline()
        self.pipeline.set_study_config(self.study_config)
        self.pipeline2 = get_process_instance(
            'bv_capsul_ex.ex_processes.GroupAveragePipeline',
            self.study_config)
        self.pipeline.array_file = self.input_files[0]
        self.pipeline.template_mask = os.path.join(
            self.input, 'share/template_masks/amyelencephalic.npy')
        self.pipeline.threshold = 0.56
        self.pipeline.average_sup = os.path.join(
            self.output, 'oneshot_sup.npy')
        self.pipeline.average_inf = os.path.join(
            self.output, 'oneshot_inf.npy')

        self.pipeline2.input_files = [
            self.input_files[0],
            self.input_files[1],
        ]
 def test_pipeline_writing(self):
     """ Method to test the xml description saving and reloading
     """
     # get a pipeline
     pipeline1 = get_process_instance("capsul.process.test.xml_pipeline")
     # save it in a temp directory
     tmpdir = tempfile.mkdtemp()
     pdir = os.path.join(tmpdir, "pipeline_mod")
     os.mkdir(pdir)
     save_xml_pipeline(pipeline1, os.path.join(pdir, "test_pipeline.xml"))
     # make this dir become a python module
     open(os.path.join(pdir, "__init__.py"), "w")
     # point the path to it
     sys.path.append(tmpdir)
     # reload the saved pipeline
     pipeline2 = get_process_instance("pipeline_mod.test_pipeline")
     self.assertEqual(sorted(pipeline1.nodes.keys()),
                      sorted(pipeline2.nodes.keys()))
     for node_name, node1 in pipeline1.nodes.iteritems():
         node2 = pipeline2.nodes[node_name]
         self.assertEqual(node1.enabled, node2.enabled)
         self.assertEqual(node1.activated, node2.activated)
         self.assertEqual(sorted(node1.plugs.keys()),
                          sorted(node2.plugs.keys()))
         for plug_name, plug1 in node1.plugs.iteritems():
             plug2 = node2.plugs[plug_name]
             self.assertEqual(len(plug1.links_from),
                              len(plug2.links_from))
             self.assertEqual(len(plug1.links_to),
                              len(plug2.links_to))
             links1 = [l[:2] + (l[4],)
                       for l in sorted(plug1.links_from)
                           + sorted(plug1.links_to)]
             links2 = [l[:2] + (l[4],)
                       for l in sorted(plug2.links_from)
                           + sorted(plug2.links_to)]
             self.assertEqual(links1, links2)
     sys.path.pop(-1)
     shutil.rmtree(tmpdir)
 def test_process_warpping(self):
     """ Method to test the function to process on the fly warpping.
     """
     process = get_process_instance(
         "capsul.process.test.test_load_from_description.to_warp_func")
     self.assertTrue(isinstance(process, Process))
     for input_name in ["parameter1", "parameter2", "parameter3"]:
         self.assertTrue(input_name in process.traits(output=False))
     for output_name in ["output1", "output2"]:
         self.assertTrue(output_name in process.traits(output=True))
     process()
     self.assertEqual(process.output1, 1)
     self.assertEqual(process.output2, "done")
Пример #10
0
    def execution_dummy(self):
        """ Test to execute DummyProcess.
        """
        # Create a process instance
        process = get_process_instance(DummyProcess, output_directory=self.output_dir)

        # Test the cache mechanism
        for param in [(1., 2.3), (2., 2.), (1., 2.3)]:
            run_process(self.output_dir, process, cachedir=self.cachedir,
                        generate_logging=False, verbose=1,
                        f1=param[0], f2=param[1])
            self.assertEqual(process.res, param[0] * param[1])
            self.assertEqual(process.output_directory, self.output_dir)
    def setUp(self):
        """ In the setup construct set some process input parameters.
        """
        # Get the wraped test process process
        self.process = get_process_instance(
            "capsul.process.test.test_load_from_description.a_function_to_wrap")

        # Set some input parameters
        self.process.fname = "fname"
        self.process.directory = "directory"
        self.process.value = 1.2
        self.process.enum = "choice1"
        self.process.list_of_str = ["a_string"]
 def test_xml(self):
     from capsul.pipeline import xml
     temp = tempfile.mkstemp(suffix='.xml')
     try:
         os.close(temp[0])
         xml.save_xml_pipeline(self.pipeline, temp[1])
         pipeline = get_process_instance(temp[1])
         self.assertEqual(len(pipeline.nodes), len(self.pipeline.nodes))
         pipeline.workflow_ordered_nodes()
         self.assertEqual(
             isinstance(pipeline.nodes['intermediate_out'],
                        OptionalOutputSwitch), True)
         self.assertEqual(pipeline.workflow_repr, "node1->node2")
     finally:
         os.unlink(temp[1])
Пример #13
0
    def execution_dummy(self):
        """ Test to execute DummyProcess.
        """
        # Create a process instance
        process = get_process_instance(DummyProcess,
                                       output_directory=self.output_directory)

        # Test the cache mechanism
        for param in [(1., 2.3), (2., 2.), (1., 2.3)]:
            self.study_config.run(process,
                                  executer_qc_nodes=False,
                                  verbose=1,
                                  f1=param[0],
                                  f2=param[1])
            self.assertEqual(process.res, param[0] * param[1])
            self.assertEqual(process.output_directory, self.output_directory)
Пример #14
0
    def onLoadClicked(self):
        """ Event to load and display a pipeline.
        """
        # Get the pipeline instance from its string description
        item = self.ui.menu_treectrl.currentItem()
        description_list = [
            str(x) for x in [item.text(1), item.text(0)] if x != ""
        ]
        process_description = ".".join(description_list)
        self.pipeline = get_process_instance(process_description)

        # Create the controller widget associated to the pipeline
        # controller
        pipeline_widget = ScrollControllerWidget(self.pipeline,
                                                 live=True,
                                                 select_controls="inputs")
        self.ui.dockWidgetParameters.setWidget(pipeline_widget)

        # Add observer to refresh the run button
        controller_widget = pipeline_widget.controller_widget
        for control_name, control \
                in six.iteritems(controller_widget._controls):

            # Unpack the control item
            trait, control_class, control_instance, control_label = control

            # Add the new callback
            control_class.add_callback(self.onRunStatus, control_instance)

        # Refresh manually the run button status the first time
        self.onRunStatus()

        # Store the pipeline documentation root path
        self.path_to_pipeline_doc[self.pipeline.id] = item.text(2)

        # Store the pipeline instance
        self.pipelines[self.pipeline.name] = (self.pipeline, pipeline_widget)

        # Create the widget
        widget = PipelineDevelopperView(self.pipeline)
        self._insert_widget_in_tab(widget)

        # Connect the subpipeline clicked signal to the
        # onLoadSubPipelineClicked slot
        widget.subpipeline_clicked.connect(self.onLoadSubPipelineClicked)
Пример #15
0
    def execution_dummy(self):
        """ Test to execute DummyProcess.
        """
        # Create a process instance
        process = get_process_instance(DummyProcess,
                                       output_directory=self.output_dir)

        # Test the cache mechanism
        for param in [(1., 2.3), (2., 2.), (1., 2.3)]:
            run_process(self.output_dir,
                        process,
                        cachedir=self.cachedir,
                        generate_logging=False,
                        verbose=1,
                        f1=param[0],
                        f2=param[1])
            self.assertEqual(process.res, param[0] * param[1])
            self.assertEqual(process.output_directory, self.output_dir)
Пример #16
0
    def onLoadClicked(self):
        """ Event to load and display a pipeline.
        """
        # Get the pipeline instance from its string description
        item = self.ui.menu_treectrl.currentItem()
        description_list = [str(x) for x in [item.text(1), item.text(0)]
                            if x != ""]
        process_description = ".".join(description_list)
        self.pipeline = get_process_instance(process_description)

        # Create the controller widget associated to the pipeline
        # controller
        pipeline_widget = ScrollControllerWidget(
            self.pipeline, live=True, select_controls="inputs")
        self.ui.dockWidgetParameters.setWidget(pipeline_widget)

        # Add observer to refresh the run button
        controller_widget = pipeline_widget.controller_widget
        for control_name, control \
                in six.iteritems(controller_widget._controls):

            # Unpack the control item
            trait, control_class, control_instance, control_label = control

            # Add the new callback
            control_class.add_callback(self.onRunStatus, control_instance)

        # Refresh manually the run button status the first time
        self.onRunStatus()

        # Store the pipeline documentation root path
        self.path_to_pipeline_doc[self.pipeline.id] = item.text(2)

        # Store the pipeline instance
        self.pipelines[self.pipeline.name] = (
            self.pipeline, pipeline_widget)

        # Create the widget
        widget = PipelineDevelopperView(self.pipeline)
        self._insert_widget_in_tab(widget)

        # Connect the subpipeline clicked signal to the
        # onLoadSubPipelineClicked slot
        widget.subpipeline_clicked.connect(self.onLoadSubPipelineClicked)
Пример #17
0
    def __init__(self, pipeline_path, record_file=None, *args, **kwargs):
        """ Method to initialize the ActivationInspectorApp class.

        Parameters
        ----------
        pipeline_path: str (mandatory)
            the name of the pipeline we want to load.
        record_file: str (optional)
            a file where the pipeline activation steps are stored.
        """
        # Inhetritance
        super(ActivationInspectorApp, self).__init__(*args, **kwargs)

        # Load the pipeline
        self.pipeline = get_process_instance(pipeline_path)

        # Initialize the application
        self.record_file = record_file
        self.window = None
        self.init_window()
Пример #18
0
    def __init__(self, pipeline_path, record_file=None, *args, **kwargs):
        """ Method to initialize the ActivationInspectorApp class.

        Parameters
        ----------
        pipeline_path: str (mandatory)
            the name of the pipeline we want to load.
        record_file: str (optional)
            a file where the pipeline activation steps are stored.
        """
        # Inhetritance
        super(ActivationInspectorApp, self).__init__(*args, **kwargs)

        # Load the pipeline
        self.pipeline = get_process_instance(pipeline_path)

        # Initialize the application
        self.record_file = record_file
        self.window = None
        self.init_window()
Пример #19
0
 def test_named_outputs(self):
     process = get_process_instance(
         "capsul.process.test.test_load_from_description.divide_dict")
     process(a=42, b=3)
     self.assertEqual(process.quotient, 14)
     self.assertEqual(process.remainder, 0)
     process = get_process_instance(
         "capsul.process.test.test_load_from_description.divide_list")
     process(a=42, b=3)
     self.assertEqual(process.quotient, 14)
     self.assertEqual(process.remainder, 0)
     
     a = list(range(40, 50))
     b = list(range(10, 21))
     quotients = [int(i / j) for i, j in zip(list(range(40, 50)), list(range(10, 21)))]
     remainders = [i % j for i, j in zip(list(range(40, 50)), list(range(10, 21)))]
     
     process = get_process_instance(
         "capsul.process.test.test_load_from_description.divides_dict")
     process(a=a, b=b)
     self.assertEqual(process.quotients, quotients)
     self.assertEqual(process.remainders, remainders)
     process = get_process_instance(
         "capsul.process.test.test_load_from_description.divides_list")
     process(a=a, b=b)
     self.assertEqual(process.quotients, quotients)
     self.assertEqual(process.remainders, remainders)
     
     process = get_process_instance(
         "capsul.process.test.test_load_from_description.divides_single_dict")
     process(a=a, b=b)
     self.assertEqual(process.quotients, quotients)
     process = get_process_instance(
         "capsul.process.test.test_load_from_description.divides_single_list")
     process(a=a, b=b)
     self.assertEqual(process.quotients, quotients)
Пример #20
0
 def test_named_outputs(self):
     process = get_process_instance(
         "capsul.process.test.test_load_from_description.divide_dict")
     process(a=42, b=3)
     self.assertEqual(process.quotient, 14)
     self.assertEqual(process.remainder, 0)
     process = get_process_instance(
         "capsul.process.test.test_load_from_description.divide_list")
     process(a=42, b=3)
     self.assertEqual(process.quotient, 14)
     self.assertEqual(process.remainder, 0)
     
     a = list(range(40, 50))
     b = list(range(10, 21))
     quotients = [int(i / j) for i, j in zip(range(40, 50), range(10, 21))]
     remainders = [i % j for i, j in zip(range(40, 50), range(10, 21))]
     
     process = get_process_instance(
         "capsul.process.test.test_load_from_description.divides_dict")
     process(a=a, b=b)
     self.assertEqual(process.quotients, quotients)
     self.assertEqual(process.remainders, remainders)
     process = get_process_instance(
         "capsul.process.test.test_load_from_description.divides_list")
     process(a=a, b=b)
     self.assertEqual(process.quotients, quotients)
     self.assertEqual(process.remainders, remainders)
     
     process = get_process_instance(
         "capsul.process.test.test_load_from_description.divides_single_dict")
     process(a=a, b=b)
     self.assertEqual(process.quotients, quotients)
     process = get_process_instance(
         "capsul.process.test.test_load_from_description.divides_single_list")
     process(a=a, b=b)
     self.assertEqual(process.quotients, quotients)
Пример #21
0
def pilot_bet(enable_display=False):
    """
    Brain extractio Tool
    ====================
    """
    import os
    from mmutils.toy_datasets import get_sample_data
    from capsul.study_config import StudyConfig
    from capsul.api import get_process_instance

    working_dir = "/volatile/nsap/catalogue/pclinfmri/fmri_bet"
    if not os.path.isdir(working_dir):
        os.makedirs(working_dir)

    """
    Then define the study configuration:
    """
    study_config = StudyConfig(
        modules=["MatlabConfig", "SPMConfig", "FSLConfig", "NipypeConfig"],
        use_smart_caching=False,
        fsl_config="/etc/fsl/4.1/fsl.sh",
        use_fsl=True,
        matlab_exec="/neurospin/local/bin/matlab",
        use_matlab=False,
        spm_directory="/i2bm/local/spm8",
        use_spm=False,
        output_directory=working_dir)

    """
    Load the toy dataset
    --------------------

    To do so, we use the get_sample_data function to download the toy
    dataset on the local file system (here localizer data):
    """
    template_dataset = get_sample_data("mni_1mm")

    """
    Processing definition
    ---------------------
    """
    pipeline = get_process_instance("clinfmri.utils.converted_fsl_bet")
    print pipeline.get_input_spec()

    """
    Now we need now to parametrize this pipeline:
    """
    pipeline.input_image_file = template_dataset.brain
    pipeline.generate_binary_mask = True

    """
    It is possible to display the pipeline.
    """
    if enable_display:
        import sys
        from PySide import QtGui
        from capsul.qt_gui.widgets import PipelineDevelopperView

        app = QtGui.QApplication(sys.argv)
        view = PipelineDevelopperView(pipeline)
        view.show()
        app.exec_()

    """
    The pipeline is now ready to be run:
    """
    study_config.run(pipeline, executer_qc_nodes=False, verbose=1)

    """
    Results
    -------

    Finally, we print the pipeline outputs:
    """
    print("\nOUTPUTS\n")
    for trait_name, trait_value in pipeline.get_outputs().items():
        print("{0}: {1}".format(trait_name, trait_value))
print "Study_config init..."
study_config = StudyConfig(
    modules=["MatlabConfig", "SPMConfig", "FSLConfig", "NipypeConfig"],
    use_smart_caching=False,
    use_fsl=True,
    fsl_config=args.fslconfig,
    use_matlab=True,
    matlab_exec=args.matlabexec,
    use_spm=True,
    spm_directory=args.spmdir,
    use_nipype=True,
    output_directory=capsulwd)
print "    ... done."

# Get the pipeline
pipeline = get_process_instance(
    "clinfmri.statistics.spm_first_level_pipeline.xml")

# unzip nifti file (to be destroyed after)
fmri_session_unizp = os.path.join(
    capsulwd,
    os.path.basename(args.inputvolume).replace(".gz", ""))

with gzip.open(args.inputvolume, 'rb') as f:
    file_content = f.read()
    with open(fmri_session_unizp, "wb") as _file:
        _file.write(file_content)

# generate onset
log_onset, onset_file, missing_names = generate_onsets(args.behavfile,
                                                       args.timepoint, soutdir)
if not onset_file:
Пример #23
0
        self.process()
        self.assertEqual(
            getattr(self.process, "string"),
            "ALL FUNCTION PARAMETERS::\n\nfnamedirectory1.2choice1['a_string']")

def test():
    """ Function to execute unitest
    """
    suite1 = unittest.TestLoader().loadTestsFromTestCase(
        TestLoadFromDescription)
    runtime1 = unittest.TextTestRunner(verbosity=2).run(suite1)
    suite2 = unittest.TestLoader().loadTestsFromTestCase(TestProcessWrap)
    runtime2 = unittest.TextTestRunner(verbosity=2).run(suite2)
    return runtime1.wasSuccessful() and runtime2.wasSuccessful()


if __name__ == "__main__":
    print("RETURNCODE: ", test())

    if True:
        from soma.qt_gui.qt_backend import QtGui
        from capsul.qt_gui.widgets import PipelineDeveloperView

        app = QtGui.QApplication(sys.argv)
        pipeline = get_process_instance('capsul.process.test.test_pipeline')
        view1 = PipelineDeveloperView(pipeline, show_sub_pipelines=True,
                                       allow_open_controller=True)
        view1.show()
        app.exec_()
        del view1
Пример #24
0
# the CEA-CNRS-INRIA. Refer to the LICENSE file or to
# http://www.cecill.info/licences/Licence_CeCILL-B_V1-en.html
# for details.
##########################################################################

# System import
import sys
import unittest
import os
from PySide import QtGui
import logging
import datetime

logging.basicConfig(level=logging.INFO)

# CAPSUL import
from capsul.qt_gui.widgets import PipelineDevelopperView
from capsul.study_config.study_config import StudyConfig
from capsul.api import get_process_instance


pipeline = get_process_instance("clinfmri.statistics.spm_first_level_pipeline")
pipeline.smoother_switch = "no_smoothing"
pipeline.complete_regressors = "yes"
app = QtGui.QApplication(sys.argv)
view1 = PipelineDevelopperView(pipeline, allow_open_controller=True, show_sub_pipelines=True)
view1.show()
app.exec_()
del view1

Пример #25
0
    def generate_api_doc(self, pipeline, schema):
        """ Make autodoc documentation for a pipeline python module

        Parameters
        ----------
        pipeline : string
            python location of pipeline - e.g 'caps.fmri.PIPELINE'
        schema : string
            path to the pipeline representation image

        Returns
        -------
        ad : string
            contents of API doc
        title : string
            the fist line of the docstring
        """
        # Fiest get the pipeline instance from its string description
        pipeline_instance = get_process_instance(pipeline)

        # Get the header, ie. the first line of the docstring
        # Default title is ''
        header = pipeline_instance.__doc__
        title = ""
        if header:
            title = pipeline_instance.__doc__.splitlines()[0]

        # Add header to tell us that this documentation must not be edited
        ad = ".. AUTO-GENERATED FILE -- DO NOT EDIT!\n\n"

        # Generate the page title: name of the pipeline
        ad += ":orphan:\n\n"
        chap_title = pipeline
        ad += (chap_title + "\n" +
               self.rst_section_levels[1] * len(chap_title) + "\n\n")

        # Generate a bookmark (for cross references)
        pipeline_name = pipeline_instance.__class__.__name__
        label = pipeline + ":"
        ad += "\n.. _{0}\n\n".format(label)
        # ad += "\n.. index:: {0}\n\n".format(pipeline_name)

        # Add a subtitle
        ad += (pipeline_name + "\n" +
               self.rst_section_levels[2] * len(pipeline_name) + "\n\n")

        # Set the current module
        currentmodule = ".".join(pipeline_instance.id.split(".")[:-1])
        ad += ".. currentmodule:: {0}\n\n".format(currentmodule)

        # Then add the trait description
        # It will generate two sections: input and output
        ad += pipeline_instance.get_help(returnhelp=True)

        # Add schema if generated
        if schema:
            schama_title = "Pipeline schema"
            ad += ("\n" + schama_title + "\n" + "~" * len(schama_title) +
                   "\n\n")
            ad += ".. image:: {0}\n".format(schema)
            ad += "    :height: 400px\n"
            ad += "    :align: center\n\n"

        return ad, title
 def test_pipeline_adhoc_completion(self):
     study_config = self.study_config
     pipeline = get_process_instance(
         'bv_capsul_ex.ex_processes.AveragePipeline', study_config)
     apipeline = ProcessCompletionEngine.get_completion_engine(
         pipeline, 'average_pipeline')
     self.assertTrue(apipeline is not None)
     attrib = {
         'center': 'alpha_centauri',
         'subject': 'r2d2',
         'analysis': 'M0',
         'mask_type': 'amyelencephalic',
     }
     pinputs = {
         'capsul_attributes': attrib,
         'threshold': 0.75,
     }
     self.assertEqual(
         sorted(apipeline.get_attribute_values().user_traits().keys()),
         ['analysis', 'center', 'mask_type', 'subject'])
     apipeline.complete_parameters(process_inputs=pinputs)
     self.assertEqual(pipeline.threshold, 0.75)
     self.assertEqual(
         pipeline.template_mask,
         os.path.join(study_config.shared_directory,
                      'template_masks/amyelencephalic.npy'))
     #self.assertEqual(
         #pipeline.nodes['threshold'].process.mask_inf,
         #os.path.join(study_config.output_directory,
                      #'input_data_thresholded_inf.npy'))
     #self.assertEqual(
         #pipeline.nodes['threshold'].process.mask_sup,
         #os.path.join(study_config.output_directory,
                      #'input_data_thresholded_sup.npy'))
     #self.assertEqual(
         #pipeline.nodes['template_mask_inf'].process.output,
         #os.path.join(study_config.output_directory,
                      #'input_data_thresholded_inf_masked.npy'))
     #self.assertEqual(
         #pipeline.nodes['template_mask_sup'].process.output,
         #os.path.join(study_config.output_directory,
                      #'input_data_thresholded_sup_masked.npy'))
     self.assertEqual(
         pipeline.nodes['average_inf'].process.average,
         os.path.join(
             study_config.output_directory,
             'alpha_centauri_r2d2_M0_average_inf.npy'))
     self.assertEqual(
         pipeline.nodes['average_sup'].process.average,
         os.path.join(
             study_config.output_directory,
             'alpha_centauri_r2d2_M0_average_sup.npy'))
     self.assertEqual(
         pipeline.average_inf,
         os.path.join(
             study_config.output_directory,
             'alpha_centauri_r2d2_M0_average_inf.npy'))
     self.assertEqual(
         pipeline.average_sup,
         os.path.join(
             study_config.output_directory,
             'alpha_centauri_r2d2_M0_average_sup.npy'))
Пример #27
0
    def generate_api_doc(self, pipeline, schema):
        """ Make autodoc documentation for a pipeline python module

        Parameters
        ----------
        pipeline : string
            python location of pipeline - e.g 'caps.fmri.PIPELINE'
        schema : string
            path to the pipeline representation image

        Returns
        -------
        ad : string
            contents of API doc
        title : string
            the fist line of the docstring
        """
        # Fiest get the pipeline instance from its string description
        pipeline_instance = get_process_instance(pipeline)

        # Get the header, ie. the first line of the docstring
        # Default title is ''
        header = pipeline_instance.__doc__
        title = ""
        if header:
            title = pipeline_instance.__doc__.splitlines()[0]

        # Add header to tell us that this documentation must not be edited
        ad = ".. AUTO-GENERATED FILE -- DO NOT EDIT!\n\n"

        # Generate the page title: name of the pipeline
        ad += ":orphan:\n\n"
        chap_title = pipeline
        ad += (chap_title + "\n" +
               self.rst_section_levels[1] * len(chap_title) + "\n\n")

        # Generate a bookmark (for cross references)
        pipeline_name = pipeline_instance.__class__.__name__
        label = pipeline + ":"
        ad += "\n.. _{0}\n\n".format(label)
        # ad += "\n.. index:: {0}\n\n".format(pipeline_name)

        # Add a subtitle
        ad += (pipeline_name + "\n" +
               self.rst_section_levels[2] * len(pipeline_name) + "\n\n")

        # Set the current module
        currentmodule = ".".join(pipeline_instance.id.split(".")[:-1])
        ad += ".. currentmodule:: {0}\n\n".format(currentmodule)

        # Then add the trait description
        # It will generate two sections: input and output
        ad += pipeline_instance.get_help(returnhelp=True)

        # Add schema if generated
        if schema:
            schama_title = "Pipeline schema"
            ad += ("\n" + schama_title + "\n" +
                   "~" * len(schama_title) + "\n\n")
            ad += ".. image:: {0}\n".format(schema)
            ad += "    :height: 400px\n"
            ad += "    :align: center\n\n"

        return ad, title
def test():
    """ Function to execute unitest
    """
    suite = unittest.TestLoader().loadTestsFromTestCase(TestComplexPipeline)
    runtime = unittest.TextTestRunner(verbosity=2).run(suite)
    return runtime.wasSuccessful()


if __name__ == '__main__':
    print('Test return code:', test())

    if '-v' in sys.argv[1:]:
        from pprint import pprint

        pipeline = get_process_instance(ComplexPipeline)

        from soma.qt_gui.qt_backend import QtGui
        from capsul.qt_gui.widgets import PipelineDeveloperView
        #from capsul.qt_gui.widgets.activation_inspector import ActivationInspectorApp

        #app = ActivationInspectorApp(ComplexPipeline)
        app = QtGui.QApplication(sys.argv)

        view = PipelineDeveloperView(pipeline,
                                     allow_open_controller=True,
                                     show_sub_pipelines=True)
        view.show()

        app.exec_()
        del view
Пример #29
0
        # Test the cache mechanism
        proxy_process(f=2.5, i=__file__, l=[__file__])
        copied_file = os.path.join(self.workspace_dir,
                                   os.path.basename(__file__))
        self.assertEqual(
            proxy_process.s,
            "{{'i': {0}, 'l': [{0}], 'f': 2.5}}".format(repr(copied_file)))

if 0:
    # Configure the environment
    study_config = StudyConfig(modules=["FSLConfig"],
                               fsl_config="/etc/fsl/4.1/fsl.sh")

    # Create a process instance
    ifname = "/home/ag239446/.local/share/nsap/t1_localizer.nii.gz"
    instance = get_process_instance("nipype.interfaces.fsl.Merge")

    # Create a decorated instance
    dec_instance = mem.cache(instance)
    print dec_instance
    print dec_instance.__doc__

    # Set parameters
    dec_instance.in_files = [ifname, ifname]
    dec_instance.dimension = "t"
    dec_instance.output_type = "NIFTI_GZ"
    dec_instance.set_output_directory("/home/ag239446/tmp/")

    # Test the cache mechanism
    result = dec_instance()
    print dec_instance._merged_file
Пример #30
0
    def add_package(self, module_name, class_name=None):
        """Provide recursive representation of a package and its
subpackages/modules, to construct the mia's pipeline library.

        :Parameters:
            - :module_name: name of the module to add in the pipeline
               library
            - :class_name: only this pipeline will be add to the pipeline
               library (optional)

        :returns: dictionary of dictionaries containing
           package/subpackages/pipelines status.
           ex: {package: {subpackage: {pipeline: 'process_enabled'}}}

        """

        if module_name:

            # reloading the package
            if module_name in sys.modules.keys():
                del sys.modules[module_name]

            try:
                __import__(module_name)
                pkg = sys.modules[module_name]

                # check if there are subpackages, in this case explore them
                for _, modname, ispkg in pkgutil.iter_modules(pkg.__path__):

                    if ispkg:
                        print('\nExploring subpackages  of {0} ...'
                              .format(module_name))
                        print('- ', str(module_name + '.' + modname))
                        self.add_package(str(module_name + '.' + modname),
                                         class_name)

                for k, v in sorted(list(pkg.__dict__.items())):

                    if class_name and k != class_name:
                        continue

                    # checking each class in the package
                    if inspect.isclass(v):
                        try:
                            get_process_instance(
                                '%s.%s' % (module_name, v.__name__))

                            # updating the tree's dictionary
                            path_list = module_name.split('.')
                            path_list.append(k)
                            pkg_iter = self.packages

                            for element in path_list:

                                if element in pkg_iter.keys():
                                    pkg_iter = pkg_iter[element]

                                else:

                                    if element is path_list[-1]:
                                        pkg_iter[element] = 'process_enabled'

                                    else:
                                        pkg_iter[element] = {}
                                        pkg_iter = pkg_iter[element]
                        except Exception:
                            pass

            except ImportError as e:
                print('\nWhen attempting to add a package and its modules to '
                      'the package tree, the following exception was caught:')
                print('{0}'.format(e))

            return self.packages
Пример #31
0
              "{{'i': {0}, 'l': [{0}], 'f': 2.5}}".format(repr(copied_file)))
        self.assertEqual(eval(proxy_process.s), {
            'i': copied_file,
            'l': [copied_file],
            'f': 2.5
        })


if 0:
    # Configure the environment
    study_config = StudyConfig(modules=["FSLConfig"],
                               fsl_config="/etc/fsl/4.1/fsl.sh")

    # Create a process instance
    ifname = "/home/ag239446/.local/share/nsap/t1_localizer.nii.gz"
    instance = get_process_instance("nipype.interfaces.fsl.Merge")

    # Create a decorated instance
    dec_instance = mem.cache(instance)
    print(dec_instance)
    print(dec_instance.__doc__)

    # Set parameters
    dec_instance.in_files = [ifname, ifname]
    dec_instance.dimension = "t"
    dec_instance.output_type = "NIFTI_GZ"
    dec_instance.set_output_directory("/home/ag239446/tmp/")

    # Test the cache mechanism
    result = dec_instance()
    print(dec_instance._merged_file)
                                      dict(type='array'))
        self.set_parameter_attributes('mask', 'shared', 'Mask',
                                      dict(type='array', mask_type='mask'))
        self.set_parameter_attributes('output', 'output',
                                      ['Acquisition', 'Processing'],
                                      dict(type='array'))



if __name__ == '__main__':
    import six
    from capsul.api import get_process_instance
    from capsul.attributes_factory import AttributesFactory
    from pprint import pprint
    
    process = get_process_instance('bv_capsul_ex.ex_processes.AveragePipeline')
    
    factory = AttributesFactory()
    factory.module_path.append('bv_capsul_ex.schema')
    
    schema = factory.get('schema', 'bv_capsul_ex')
    schema_shared = factory.get('schema', 'bv_capsul_ex')
    
    process_attributes = AveragePipelineAttributes(process, dict(input=schema, output=schema, shared=schema_shared))
    process_attributes.center = 'the_center'
    process_attributes.subject = 'the_subject'
    process_attributes.analysis = 'the_analysis'
    for name, trait in six.iteritems(process_attributes.user_traits()):
        print(name, trait)
    pprint(process_attributes.get_parameters_attributes())
Пример #33
0
def pilot_preproc_spm_fmri(enable_display=False):
    """
    FMRI preprocessings
    ===================

    Preprocessing with the SPM slice timing and a normalization to a given
    template.

    Start to import required modules:
    """
    import os
    from mmutils.toy_datasets import get_sample_data
    from capsul.study_config import StudyConfig
    from capsul.api import get_process_instance

    """
    Study configuration
    -------------------

    We first define the working directory and guarantee this folder exists on
    the file system:
    """
    working_dir = "/volatile/nsap/catalogue/pclinfmri/fmri_preproc_spm_fmri"
    if not os.path.isdir(working_dir):
        os.makedirs(working_dir)

    """
    Then define the study configuration:
    """
    study_config = StudyConfig(
        modules=["MatlabConfig", "SPMConfig", "FSLConfig", "NipypeConfig"],
        use_smart_caching=False,
        fsl_config="/etc/fsl/4.1/fsl.sh",
        use_fsl=True,
        matlab_exec="/neurospin/local/bin/matlab",
        use_matlab=True,
        spm_directory="/i2bm/local/spm8",
        use_spm=True,
        output_directory=working_dir,
        number_of_cpus=1,
        generate_logging=True,
        use_scheduler=True,)

    """
    Load the toy dataset
    --------------------

    To do so, we use the get_sample_data function to download the toy
    dataset on the local file system (here localizer data):
    """
    toy_dataset = get_sample_data("localizer")
    template_dataset = get_sample_data("mni_1mm")

    """
    The toy_dataset is an Enum structure with some specific elements of
    interest:

        * fmri: the functional volume.
        * anat: the structural volume.
        * TR: the repetition time.

    Processing definition
    ---------------------

    First create the
    :ref:`slice timing pipeline <clinfmri.preproc.FmriPreproc>` that
    define the different step of the processings:
    """
    pipeline = get_process_instance("clinfmri.preproc.converted_fmri_preproc")
    print pipeline.get_input_spec()

    """
    Now we need now to parametrize this pipeline:
    """
    pipeline.fmri_file = toy_dataset.fmri
    pipeline.structural_file = toy_dataset.anat
    pipeline.realign_register_to_mean = True
    pipeline.select_slicer = "spm"
    pipeline.select_normalization = "fmri"
    pipeline.template_file = template_dataset.brain
    pipeline.force_repetition_time = toy_dataset.TR
    pipeline.force_slice_orders = [index + 1 for index in range(40)]

    """
    It is possible to display the pipeline.
    """
    if enable_display:
        import sys
        from PySide import QtGui
        from capsul.qt_gui.widgets import PipelineDevelopperView

        app = QtGui.QApplication(sys.argv)
        view = PipelineDevelopperView(pipeline)
        view.show()
        app.exec_()

    """
    The pipeline is now ready to be run:
    """
    study_config.run(pipeline, executer_qc_nodes=False, verbose=1)

    """
    Results
    -------

    Finally, we print the pipeline outputs:
    """
    print("\nOUTPUTS\n")
    for trait_name, trait_value in pipeline.get_outputs().items():
        print("{0}: {1}".format(trait_name, trait_value))
Пример #34
0
   
# Create the study configuration
print "Study_config init..."
study_config = StudyConfig(
    modules=["MatlabConfig", "SPMConfig", "NipypeConfig"],
    use_smart_caching=False,
    use_matlab=False,
    use_spm=True,
    spm_exec=args.spmbin,
    spm_standalone=True,
    use_nipype=True,
    output_directory=capsulwd)
print "    ... done."

# Get the pipeline
pipeline = get_process_instance("clinfmri.utils.spm_new_segment_only.xml")

# Configure the pipeline
pipeline.channel_files = [args.t1file]
#to find the template TPM.nii from the standalone distrib
pipeline.spm_dir = args.spmdir

# Execute the pipeline
study_config.run(pipeline, verbose=1)

# Keep only data of interest
batch = os.path.join(capsulwd, "3-NewSegment", "pyscript_newsegment.m")
images = glob.glob(os.path.join(capsulwd, "1-ungzipfnames", "*.nii"))
images = [item for item in images
          if not os.path.basename(item).startswith(("u", "iy_"))]
mat = glob.glob(os.path.join(capsulwd, "1-ungzipfnames", "*.mat"))[0]
        self.assertEqual(
            getattr(self.process, "string"),
            "ALL FUNCTION PARAMETERS::\n\nfnamedirectory1.2choice1['a_string']")

def test():
    """ Function to execute unitest
    """
    suite1 = unittest.TestLoader().loadTestsFromTestCase(
        TestLoadFromDescription)
    runtime1 = unittest.TextTestRunner(verbosity=2).run(suite1)
    suite2 = unittest.TestLoader().loadTestsFromTestCase(TestProcessWrap)
    runtime2 = unittest.TextTestRunner(verbosity=2).run(suite2)
    return runtime1.wasSuccessful() and runtime2.wasSuccessful()


if __name__ == "__main__":
    print "RETURNCODE: ", test()

    if True:
        import sys
        from PyQt4 import QtGui
        from capsul.qt_gui.widgets import PipelineDevelopperView

        app = QtGui.QApplication(sys.argv)
        pipeline = get_process_instance('capsul.process.test.test_pipeline')
        view1 = PipelineDevelopperView(pipeline, show_sub_pipelines=True,
                                       allow_open_controller=True)
        view1.show()
        app.exec_()
        del view1