コード例 #1
0
ファイル: controller.py プロジェクト: populse/soma-base
    def __init__(self, *args, **kwargs):
        """ Initilaize the Controller class.

        During the class initialization create a class attribute
        '_user_traits' that contains all the class traits and instance traits
        defined by user (i.e.  the traits that are not automatically
        defined by HasTraits or Controller). We can access this class
        parameter with the 'user_traits' method.

        If user trait parameters are defined directly on derived class, this
        procedure call the 'add_trait' method in order to not share
        user traits between instances.
        """
        # Inheritance
        super(Controller, self).__init__(*args, **kwargs)

        # Create a sorted dictionnary with user parameters
        # The dictionary order correspond to the definition order
        self._user_traits = SortedDictionary()

        # Get all the class traits
        class_traits = self.class_traits()

        # If some traits are defined on the controller, create a list
        # with definition ordered trait name. These names will correspond
        # to user trait sorted dictionary keys
        if class_traits:
            
            sorted_names = []
            for name, trait in six.iteritems(class_traits):
                if self.is_user_trait(trait):
                    if getattr(trait, 'order', None):
                        # Only if trait.order exists AND trait.order is no None
                        sorted_names.append((getattr(trait, 'order'), name))
                    else:
                        sorted_names.append((-1, name))
                    
            sorted_names = [sorted_name[1] for sorted_name in sorted(sorted_names)]

            # Go through all trait names that have been ordered
            for name in sorted_names:

                # If the trait is defined on the class, need to clone
                # the class trait and add the cloned trait to the instance.
                # This step avoids us to share trait objects between
                # instances.
                if name in self.__base_traits__:
                    logger.debug("Add class parameter '{0}'.".format(name))
                    trait = class_traits[name]
                    self.add_trait(name, self._clone_trait(trait))

                # If the trait is defined on the instance, just
                # add the user parameter to the '_user_traits' instance
                # parameter
                else:
                    logger.debug("Add instance parameter '{0}'.".format(name))
                    self._user_traits[name] = class_traits[name]
コード例 #2
0
    def test_sorted_dictionary(self):
        d1 = SortedDictionary(('titi', {
            'bubu': '50',
            'turlute': 12
        }), ('toto', 'val"u\'e'), ('tutu', [0, 1, 2, [u'papa', 5]]))
        d2 = SortedDictionary(('tutu', [0, 1, 2, [u'papa', 5]]),
                              ('toto', 'val"u\'e'), ('titi', {
                                  'bubu': '50',
                                  'turlute': 12
                              }))

        self.assertEqual(dict(d1), dict(d2))
        self.assertNotEqual(d1.keys(), d2.keys())

        d1['titi'] = 'babar'
        d2['titi'] = 'bubur'
        self.assertNotEqual(dict(d1), dict(d2))
        d2['titi'] = 'babar'
        self.assertEqual(dict(d1), dict(d2))
        d1['ababo'] = 43.65
        self.assertEqual(d1.keys(), ['titi', 'toto', 'tutu', 'ababo'])

        del d1['titi']
        del d1['ababo']
        del d2['titi']
        self.assertEqual(dict(d1), dict(d2))
        self.assertEqual(d2.keys(), ['tutu', 'toto'])
コード例 #3
0
    def initialize_module(self):
        '''Load configured FOMs and create FOM completion data in
        self.study_config.modules_data
        '''
        if self.study_config.use_fom is False:
            return

        soma_app = Application('capsul', plugin_modules=['soma.fom'])
        if 'soma.fom' not in soma_app.loaded_plugin_modules:
            # WARNING: this is unsafe, may erase configured things, and
            # probably not thread-safe.
            soma_app.initialize()
        self.study_config.modules_data.foms = {}
        self.study_config.modules_data.all_foms = SortedDictionary()
        self.study_config.modules_data.fom_atp = {'all': {}}
        self.study_config.modules_data.fom_pta = {'all': {}}

        foms = (('input', self.study_config.input_fom),
                ('output', self.study_config.output_fom),
                ('shared', self.study_config.shared_fom))
        for fom_type, fom_filename in foms:
            if fom_filename != "":
                fom, atp, pta = self.load_fom(fom_filename)
                self.study_config.modules_data.foms[fom_type] = fom
                self.study_config.modules_data.fom_atp[fom_type] = atp
                self.study_config.modules_data.fom_pta[fom_type] = pta

        self.study_config.use_fom = True
        self.update_module()
コード例 #4
0
 def setUp(self):
     default_config = SortedDictionary(
         ("use_soma_workflow", True)
     )
     self.study_config = StudyConfig(init_config=default_config)
     self.atomic_pipeline = MyAtomicPipeline()
     self.composite_pipeline = MyCompositePipeline()
コード例 #5
0
ファイル: pipeline_nodes.py プロジェクト: nsouedet/capsul
    def __init__(self, pipeline, name, inputs, outputs):
        """ Generate a Node

        Parameters
        ----------
        pipeline: Pipeline (mandatory)
            the pipeline object where the node is added
        name: str (mandatory)
            the node name
        inputs: list of dict (mandatory)
            a list of input parameters containing a dictionary with default
            values (mandatory key: name)
        outputs: dict (mandatory)
            a list of output parameters containing a dictionary with default
            values (mandatory key: name)
        """
        super(Node, self).__init__()
        self.pipeline = pipeline
        self.name = name
        self.plugs = SortedDictionary()
        # _callbacks -> (src_plug_name, dest_node, dest_plug_name)
        self._callbacks = {}

        # generate a list with all the inputs and outputs
        # the second parameter (parameter_type) is False for an input,
        # True for an output
        parameters = zip(inputs, [False, ] * len(inputs))
        parameters.extend(zip(outputs, [True, ] * len(outputs)))
        for parameter, parameter_type in parameters:
            # check if parameter is a dictionary as specified in the
            # docstring
            if isinstance(parameter, dict):
                # check if parameter contains a name item
                # as specified in the docstring
                if "name" not in parameter:
                    raise Exception("Can't create parameter with unknown"
                                    "identifier and parameter {0}".format(
                                        parameter))
                parameter = parameter.copy()
                plug_name = parameter.pop("name")
                # force the parameter type
                parameter["output"] = parameter_type
                # generate plug with input parameter and identifier name
                plug = Plug(**parameter)
            else:
                raise Exception("Can't create Node. Expect a dict structure "
                                "to initialize the Node, "
                                "got {0}: {1}".format(type(parameter),
                                                      parameter))
            # update plugs list
            self.plugs[plug_name] = plug
            # add an event on plug to validate the pipeline
            plug.on_trait_change(pipeline.update_nodes_and_plugs_activation,
                                 "enabled")

        # add an event on the Node instance traits to validate the pipeline
        self.on_trait_change(pipeline.update_nodes_and_plugs_activation,
                             "enabled")
コード例 #6
0
ファイル: test_soma_workflow.py プロジェクト: M40V/capsul
 def setUp(self):
     default_config = SortedDictionary(("use_soma_workflow", True))
     # use a custom temporary soma-workflow dir to avoid concurrent
     # access problems
     tmpdb = tempfile.mkstemp('', prefix='soma_workflow')
     os.close(tmpdb[0])
     os.unlink(tmpdb[1])
     self.soma_workflow_temp_dir = tmpdb[1]
     os.mkdir(self.soma_workflow_temp_dir)
     swf_conf = '[%s]\nSOMA_WORKFLOW_DIR = %s\n' \
         % (socket.gethostname(), tmpdb[1])
     swconfig.Configuration.search_config_path \
         = staticmethod(lambda : StringIO.StringIO(swf_conf))
     self.study_config = StudyConfig(init_config=default_config)
     self.atomic_pipeline = MyAtomicPipeline()
     self.composite_pipeline = MyCompositePipeline()
コード例 #7
0
ファイル: pipeline_nodes.py プロジェクト: nsouedet/capsul
class Node(Controller):
    """ Basic Node structure of the pipeline that need to be tuned.

    Attributes
    ----------
    name : str
        the node name
    full_name : str
        a unique name among all nodes and sub-nodes of the top level pipeline
    enabled : bool
        user parameter to control the node activation
    activated : bool
        parameter describing the node status

    Methods
    -------
    connect
    set_callback_on_plug
    get_plug_value
    set_plug_value
    get_trait
    """
    name = Str()
    enabled = Bool(default_value=True)
    activated = Bool(default_value=False)
    node_type = Enum(("processing_node", "view_node"))

    def __init__(self, pipeline, name, inputs, outputs):
        """ Generate a Node

        Parameters
        ----------
        pipeline: Pipeline (mandatory)
            the pipeline object where the node is added
        name: str (mandatory)
            the node name
        inputs: list of dict (mandatory)
            a list of input parameters containing a dictionary with default
            values (mandatory key: name)
        outputs: dict (mandatory)
            a list of output parameters containing a dictionary with default
            values (mandatory key: name)
        """
        super(Node, self).__init__()
        self.pipeline = pipeline
        self.name = name
        self.plugs = SortedDictionary()
        # _callbacks -> (src_plug_name, dest_node, dest_plug_name)
        self._callbacks = {}

        # generate a list with all the inputs and outputs
        # the second parameter (parameter_type) is False for an input,
        # True for an output
        parameters = zip(inputs, [False, ] * len(inputs))
        parameters.extend(zip(outputs, [True, ] * len(outputs)))
        for parameter, parameter_type in parameters:
            # check if parameter is a dictionary as specified in the
            # docstring
            if isinstance(parameter, dict):
                # check if parameter contains a name item
                # as specified in the docstring
                if "name" not in parameter:
                    raise Exception("Can't create parameter with unknown"
                                    "identifier and parameter {0}".format(
                                        parameter))
                parameter = parameter.copy()
                plug_name = parameter.pop("name")
                # force the parameter type
                parameter["output"] = parameter_type
                # generate plug with input parameter and identifier name
                plug = Plug(**parameter)
            else:
                raise Exception("Can't create Node. Expect a dict structure "
                                "to initialize the Node, "
                                "got {0}: {1}".format(type(parameter),
                                                      parameter))
            # update plugs list
            self.plugs[plug_name] = plug
            # add an event on plug to validate the pipeline
            plug.on_trait_change(pipeline.update_nodes_and_plugs_activation,
                                 "enabled")

        # add an event on the Node instance traits to validate the pipeline
        self.on_trait_change(pipeline.update_nodes_and_plugs_activation,
                             "enabled")

    @property
    def full_name(self):
        if self.pipeline.parent_pipeline:
            return self.pipeline.pipeline_node.full_name + '.' + self.name
        else:
            return self.name

    def _value_callback(self, source_plug_name, dest_node, dest_plug_name,
                        value):
        """ Spread the source plug value to the destination plug.
        """
        dest_node.set_plug_value(dest_plug_name, value)

    def _value_callback_with_logging(
            self, log_stream, prefix, source_plug_name, dest_node,
            dest_plug_name, value):
        """ Spread the source plug value to the destination plug, and log it in
        a stream for debugging.
        """
        #print '(debug) value changed:', self, self.name, source_plug_name, dest_node, dest_plug_name, repr(value), ', stream:', log_stream, prefix

        plug = self.plugs.get(source_plug_name, None)
        if plug is None:
            return
        def _link_name(dest_node, plug, prefix, dest_plug_name,
                       source_node_or_process):
            external = True
            sibling = False
            # check if it is an external link: if source is not a parent of dest
            if hasattr(source_node_or_process, 'process') \
                    and hasattr(source_node_or_process.process, 'nodes'):
                source_process = source_node_or_process
                source_node = source_node_or_process.process.pipeline_node
                children = [x for k, x in source_node.process.nodes.items()
                            if x != '']
                if dest_node in children:
                    external = False
            # check if it is a sibling node:
            # if external and source is not in dest
            if external:
                sibling = True
                #print >> open('/tmp/linklog.txt', 'a'), 'check sibling, prefix:', prefix, 'source:', source_node_or_process, ', dest_plug_name:', dest_plug_name, 'dest_node:', dest_node, dest_node.name
                if hasattr(dest_node, 'process') \
                        and hasattr(dest_node.process, 'nodes'):
                    children = [x for k, x in dest_node.process.nodes.items()
                                if x != '']
                    if source_node_or_process in children:
                        sibling = False
                    else:
                        children = [
                            x.process for x in children \
                            if hasattr(x, 'process')]
                    if source_node_or_process in children:
                        sibling = False
                #print 'sibling:', sibling
            if external:
                if sibling:
                    name = '.'.join(prefix.split('.')[:-2] \
                        + [dest_node.name, dest_plug_name])
                else:
                    name = '.'.join(prefix.split('.')[:-2] + [dest_plug_name])
            else:
                # internal connection in a (sub) pipeline
                name = prefix + dest_node.name
                if name != '' and not name.endswith('.'):
                  name += '.'
                name += dest_plug_name
            return name
        dest_plug = dest_node.plugs[dest_plug_name]
        #print >> open('/tmp/linklog.txt', 'a'), 'link_name:',  self, repr(self.name), ', prefix:', repr(prefix), ', source_plug_name:', source_plug_name, 'dest:', dest_plug, repr(dest_plug_name), 'dest node:', dest_node, repr(dest_node.name)
        print >> log_stream, 'value link:', \
            'from:', prefix + source_plug_name, \
            'to:', _link_name(dest_node, dest_plug, prefix, dest_plug_name,
                              self), \
            ', value:', repr(value) #, 'self:', self, repr(self.name), ', prefix:',repr(prefix), ', source_plug_name:', source_plug_name, 'dest:', dest_plug, repr(dest_plug_name), 'dest node:', dest_node, repr(dest_node.name)
        log_stream.flush()

        # actually propagate
        dest_node.set_plug_value(dest_plug_name, value)

    def connect(self, source_plug_name, dest_node, dest_plug_name):
        """ Connect linked plugs of two nodes

        Parameters
        ----------
        source_plug_name: str (mandatory)
            the source plug name
        dest_node: Node (mandatory)
            the destination node
        dest_plug_name: str (mandatory)
            the destination plug name
        """
        # add a callback to spread the source plug value
        value_callback = SomaPartial(self._value_callback, source_plug_name,
                                     dest_node, dest_plug_name)
        self._callbacks[(source_plug_name, dest_node,
                         dest_plug_name)] = value_callback
        self.set_callback_on_plug(source_plug_name, value_callback)

    def disconnect(self, source_plug_name, dest_node, dest_plug_name):
        """ disconnect linked plugs of two nodes

        Parameters
        ----------
        source_plug_name: str (mandatory)
            the source plug name
        dest_node: Node (mandatory)
            the destination node
        dest_plug_name: str (mandatory)
            the destination plug name
        """
        # remove the callback to spread the source plug value
        callback = self._callbacks.pop((source_plug_name, dest_node,
                                        dest_plug_name))
        self.remove_callback_from_plug(source_plug_name, callback)

    def __getstate__(self):
        """ Remove the callbacks from the default __getstate__ result because
        they prevent Node instance from being used with pickle.
        """
        state = super(Node, self).__getstate__()
        state['_callbacks'] = state['_callbacks'].keys()
        return state

    def __setstate__(self, state):
        """ Restore the callbacks that have been removed by __getstate__.
        """
        state['_callbacks'] = dict((i, SomaPartial(self._value_callback, *i))
                                   for i in state['_callbacks'])
        super(Node, self).__setstate__(state)
        for callback_key, value_callback in self._callbacks.iteritems():
            self.set_callback_on_plug(callback_key[0], value_callback)

    def set_callback_on_plug(self, plug_name, callback):
        """ Add an event when a plug change

        Parameters
        ----------
        plug_name: str (mandatory)
            a plug name
        callback: @f (mandatory)
            a callback function
        """
        self.on_trait_change(callback, plug_name)

    def remove_callback_from_plug(self, plug_name, callback):
        """ Remove an event when a plug change

        Parameters
        ----------
        plug_name: str (mandatory)
            a plug name
        callback: @f (mandatory)
            a callback function
        """
        self.on_trait_change(callback, plug_name, remove=True)

    def get_plug_value(self, plug_name):
        """ Return the plug value

        Parameters
        ----------
        plug_name: str (mandatory)
            a plug name

        Returns
        -------
        output: object
            the plug value
        """
        return getattr(self, plug_name)

    def set_plug_value(self, plug_name, value):
        """ Set the plug value

        Parameters
        ----------
        plug_name: str (mandatory)
            a plug name
        value: object (mandatory)
            the plug value we want to set
        """
        setattr(self, plug_name, value)

    def get_trait(self, trait_name):
        """ Return the desired trait

        Parameters
        ----------
        trait_name: str (mandatory)
            a trait name

        Returns
        -------
        output: trait
            the trait named trait_name
        """
        return self.trait(trait_name)
コード例 #8
0
ファイル: pipeline_nodes.py プロジェクト: nsouedet/capsul
class Node(Controller):
    """ Basic Node structure of the pipeline that need to be tuned.

    Attributes
    ----------
    name : str
        the node name
    full_name : str
        a unique name among all nodes and sub-nodes of the top level pipeline
    enabled : bool
        user parameter to control the node activation
    activated : bool
        parameter describing the node status

    Methods
    -------
    connect
    set_callback_on_plug
    get_plug_value
    set_plug_value
    get_trait
    """
    name = Str()
    enabled = Bool(default_value=True)
    activated = Bool(default_value=False)
    node_type = Enum(("processing_node", "view_node"))

    def __init__(self, pipeline, name, inputs, outputs):
        """ Generate a Node

        Parameters
        ----------
        pipeline: Pipeline (mandatory)
            the pipeline object where the node is added
        name: str (mandatory)
            the node name
        inputs: list of dict (mandatory)
            a list of input parameters containing a dictionary with default
            values (mandatory key: name)
        outputs: dict (mandatory)
            a list of output parameters containing a dictionary with default
            values (mandatory key: name)
        """
        super(Node, self).__init__()
        self.pipeline = weak_proxy(pipeline)
        self.name = name
        self.plugs = SortedDictionary()
        # _callbacks -> (src_plug_name, dest_node, dest_plug_name)
        self._callbacks = {}

        # generate a list with all the inputs and outputs
        # the second parameter (parameter_type) is False for an input,
        # True for an output
        parameters = list(zip(inputs, [
            False,
        ] * len(inputs)))
        parameters.extend(list(zip(outputs, [
            True,
        ] * len(outputs))))
        for parameter, parameter_type in parameters:
            # check if parameter is a dictionary as specified in the
            # docstring
            if isinstance(parameter, dict):
                # check if parameter contains a name item
                # as specified in the docstring
                if "name" not in parameter:
                    raise Exception(
                        "Can't create parameter with unknown"
                        "identifier and parameter {0}".format(parameter))
                parameter = parameter.copy()
                plug_name = parameter.pop("name")
                # force the parameter type
                parameter["output"] = parameter_type
                # generate plug with input parameter and identifier name
                plug = Plug(**parameter)
            else:
                raise Exception("Can't create Node. Expect a dict structure "
                                "to initialize the Node, "
                                "got {0}: {1}".format(type(parameter),
                                                      parameter))
            # update plugs list
            self.plugs[plug_name] = plug
            # add an event on plug to validate the pipeline
            plug.on_trait_change(pipeline.update_nodes_and_plugs_activation,
                                 "enabled")

        # add an event on the Node instance traits to validate the pipeline
        self.on_trait_change(pipeline.update_nodes_and_plugs_activation,
                             "enabled")

    @property
    def full_name(self):
        if self.pipeline.parent_pipeline:
            return self.pipeline.pipeline_node.full_name + '.' + self.name
        else:
            return self.name

    @staticmethod
    def _value_callback(self, source_plug_name, dest_node, dest_plug_name,
                        value):
        """ Spread the source plug value to the destination plug.
        """
        dest_node.set_plug_value(dest_plug_name, value)

    def _value_callback_with_logging(self, log_stream, prefix,
                                     source_plug_name, dest_node,
                                     dest_plug_name, value):
        """ Spread the source plug value to the destination plug, and log it in
        a stream for debugging.
        """
        #print '(debug) value changed:', self, self.name, source_plug_name, dest_node, dest_plug_name, repr(value), ', stream:', log_stream, prefix

        plug = self.plugs.get(source_plug_name, None)
        if plug is None:
            return

        def _link_name(dest_node, plug, prefix, dest_plug_name,
                       source_node_or_process):
            external = True
            sibling = False
            # check if it is an external link: if source is not a parent of dest
            if hasattr(source_node_or_process, 'process') \
                    and hasattr(source_node_or_process.process, 'nodes'):
                source_process = source_node_or_process
                source_node = source_node_or_process.process.pipeline_node
                children = [
                    x for k, x in source_node.process.nodes.items() if x != ''
                ]
                if dest_node in children:
                    external = False
            # check if it is a sibling node:
            # if external and source is not in dest
            if external:
                sibling = True
                #print >> open('/tmp/linklog.txt', 'a'), 'check sibling, prefix:', prefix, 'source:', source_node_or_process, ', dest_plug_name:', dest_plug_name, 'dest_node:', dest_node, dest_node.name
                if hasattr(dest_node, 'process') \
                        and hasattr(dest_node.process, 'nodes'):
                    children = [
                        x for k, x in dest_node.process.nodes.items()
                        if x != ''
                    ]
                    if source_node_or_process in children:
                        sibling = False
                    else:
                        children = [
                            x.process for x in children \
                            if hasattr(x, 'process')]
                    if source_node_or_process in children:
                        sibling = False
                #print 'sibling:', sibling
            if external:
                if sibling:
                    name = '.'.join(prefix.split('.')[:-2] \
                        + [dest_node.name, dest_plug_name])
                else:
                    name = '.'.join(prefix.split('.')[:-2] + [dest_plug_name])
            else:
                # internal connection in a (sub) pipeline
                name = prefix + dest_node.name
                if name != '' and not name.endswith('.'):
                    name += '.'
                name += dest_plug_name
            return name

        dest_plug = dest_node.plugs[dest_plug_name]
        #print >> open('/tmp/linklog.txt', 'a'), 'link_name:',  self, repr(self.name), ', prefix:', repr(prefix), ', source_plug_name:', source_plug_name, 'dest:', dest_plug, repr(dest_plug_name), 'dest node:', dest_node, repr(dest_node.name)
        print('value link:', \
            'from:', prefix + source_plug_name, \
            'to:', _link_name(dest_node, dest_plug, prefix, dest_plug_name,
                              self), \
            ', value:', repr(value), file=log_stream) #, 'self:', self, repr(self.name), ', prefix:',repr(prefix), ', source_plug_name:', source_plug_name, 'dest:', dest_plug, repr(dest_plug_name), 'dest node:', dest_node, repr(dest_node.name)
        log_stream.flush()

        # actually propagate
        dest_node.set_plug_value(dest_plug_name, value)

    def connect(self, source_plug_name, dest_node, dest_plug_name):
        """ Connect linked plugs of two nodes

        Parameters
        ----------
        source_plug_name: str (mandatory)
            the source plug name
        dest_node: Node (mandatory)
            the destination node
        dest_plug_name: str (mandatory)
            the destination plug name
        """
        # add a callback to spread the source plug value
        value_callback = SomaPartial(self.__class__._value_callback,
                                     weak_proxy(self), source_plug_name,
                                     weak_proxy(dest_node), dest_plug_name)
        self._callbacks[(source_plug_name, dest_node,
                         dest_plug_name)] = value_callback
        self.set_callback_on_plug(source_plug_name, value_callback)

    def disconnect(self, source_plug_name, dest_node, dest_plug_name):
        """ disconnect linked plugs of two nodes

        Parameters
        ----------
        source_plug_name: str (mandatory)
            the source plug name
        dest_node: Node (mandatory)
            the destination node
        dest_plug_name: str (mandatory)
            the destination plug name
        """
        # remove the callback to spread the source plug value
        callback = self._callbacks.pop(
            (source_plug_name, dest_node, dest_plug_name))
        self.remove_callback_from_plug(source_plug_name, callback)

    def __getstate__(self):
        """ Remove the callbacks from the default __getstate__ result because
        they prevent Node instance from being used with pickle.
        """
        state = super(Node, self).__getstate__()
        state['_callbacks'] = state['_callbacks'].keys()
        return state

    def __setstate__(self, state):
        """ Restore the callbacks that have been removed by __getstate__.
        """
        state['_callbacks'] = dict((i, SomaPartial(self._value_callback, *i))
                                   for i in state['_callbacks'])
        super(Node, self).__setstate__(state)
        for callback_key, value_callback in six.iteritems(self._callbacks):
            self.set_callback_on_plug(callback_key[0], value_callback)

    def set_callback_on_plug(self, plug_name, callback):
        """ Add an event when a plug change

        Parameters
        ----------
        plug_name: str (mandatory)
            a plug name
        callback: @f (mandatory)
            a callback function
        """
        self.on_trait_change(callback, plug_name)

    def remove_callback_from_plug(self, plug_name, callback):
        """ Remove an event when a plug change

        Parameters
        ----------
        plug_name: str (mandatory)
            a plug name
        callback: @f (mandatory)
            a callback function
        """
        self.on_trait_change(callback, plug_name, remove=True)

    def get_plug_value(self, plug_name):
        """ Return the plug value

        Parameters
        ----------
        plug_name: str (mandatory)
            a plug name

        Returns
        -------
        output: object
            the plug value
        """
        return getattr(self, plug_name)

    def set_plug_value(self, plug_name, value):
        """ Set the plug value

        Parameters
        ----------
        plug_name: str (mandatory)
            a plug name
        value: object (mandatory)
            the plug value we want to set
        """
        setattr(self, plug_name, value)

    def get_trait(self, trait_name):
        """ Return the desired trait

        Parameters
        ----------
        trait_name: str (mandatory)
            a trait name

        Returns
        -------
        output: trait
            the trait named trait_name
        """
        return self.trait(trait_name)
コード例 #9
0
ファイル: pipeline_nodes.py プロジェクト: nsouedet/capsul
    def __init__(self, pipeline, name, inputs, outputs):
        """ Generate a Node

        Parameters
        ----------
        pipeline: Pipeline (mandatory)
            the pipeline object where the node is added
        name: str (mandatory)
            the node name
        inputs: list of dict (mandatory)
            a list of input parameters containing a dictionary with default
            values (mandatory key: name)
        outputs: dict (mandatory)
            a list of output parameters containing a dictionary with default
            values (mandatory key: name)
        """
        super(Node, self).__init__()
        self.pipeline = weak_proxy(pipeline)
        self.name = name
        self.plugs = SortedDictionary()
        # _callbacks -> (src_plug_name, dest_node, dest_plug_name)
        self._callbacks = {}

        # generate a list with all the inputs and outputs
        # the second parameter (parameter_type) is False for an input,
        # True for an output
        parameters = list(zip(inputs, [
            False,
        ] * len(inputs)))
        parameters.extend(list(zip(outputs, [
            True,
        ] * len(outputs))))
        for parameter, parameter_type in parameters:
            # check if parameter is a dictionary as specified in the
            # docstring
            if isinstance(parameter, dict):
                # check if parameter contains a name item
                # as specified in the docstring
                if "name" not in parameter:
                    raise Exception(
                        "Can't create parameter with unknown"
                        "identifier and parameter {0}".format(parameter))
                parameter = parameter.copy()
                plug_name = parameter.pop("name")
                # force the parameter type
                parameter["output"] = parameter_type
                # generate plug with input parameter and identifier name
                plug = Plug(**parameter)
            else:
                raise Exception("Can't create Node. Expect a dict structure "
                                "to initialize the Node, "
                                "got {0}: {1}".format(type(parameter),
                                                      parameter))
            # update plugs list
            self.plugs[plug_name] = plug
            # add an event on plug to validate the pipeline
            plug.on_trait_change(pipeline.update_nodes_and_plugs_activation,
                                 "enabled")

        # add an event on the Node instance traits to validate the pipeline
        self.on_trait_change(pipeline.update_nodes_and_plugs_activation,
                             "enabled")
コード例 #10
0
ファイル: controller.py プロジェクト: M40V/soma-base
class Controller(HasTraits):

    """ A Controller contains some traits: attributes typing and observer
    (callback) pattern.

    The class provides some methods to add/remove/inspect user defined traits.

    Attributes
    ----------
    `user_traits_changed` : Event
        single event that can be sent when several traits changes. This event
        has to be triggered explicitely to take into account changes due to
        call(s) to add_trait or remove_trait.

    Methods
    -------
    user_traits
    is_user_trait
    add_trait
    remove_trait
    _clone_trait
    """

    # This event is necessary because there is no event when a trait is
    # removed with remove_trait and because it is sometimes better to send
    # a single event when several traits changes are done (especially
    # when GUI is updated on real time). This event have to be triggered
    # explicitely to take into account changes due to call(s) to
    # add_trait or remove_trait.
    user_traits_changed = Event

    def __init__(self, *args, **kwargs):
        """ Initilaize the Controller class.

        During the class initialization create a class attribute
        '_user_traits' that contains all the class traits and instance traits
        defined by user (i.e.  the traits that are not automatically
        defined by HasTraits or Controller). We can access this class
        parameter with the 'user_traits' method.

        If user trait parameters are defined directly on derived class, this
        procedure call the 'add_trait' method in order to not share
        user traits between instances.
        """
        # Inheritance
        super(Controller, self).__init__(*args, **kwargs)

        # Create a sorted dictionnary with user parameters
        # The dictionary order correspond to the definition order
        self._user_traits = SortedDictionary()

        # Get all the class traits
        class_traits = self.class_traits()

        # If some traits are defined on the controller, create a list
        # with definition ordered trait name. These names will correspond
        # to user trait sorted dictionary keys
        if class_traits:
            sorted_names = sorted(
                (getattr(trait, "order", ""), name)
                for name, trait in six.iteritems(class_traits)
                if self.is_user_trait(trait))
            sorted_names = [sorted_name[1] for sorted_name in sorted_names]

            # Go through all trait names that have been ordered
            for name in sorted_names:

                # If the trait is defined on the class, need to clone
                # the class trait and add the cloned trait to the instance.
                # This step avoids us to share trait objects between
                # instances.
                if name in self.__base_traits__:
                    logger.debug("Add class parameter '{0}'.".format(name))
                    trait = class_traits[name]
                    self.add_trait(name, self._clone_trait(trait))

                # If the trait is defined on the instance, just
                # add the user parameter to the '_user_traits' instance
                # parameter
                else:
                    logger.debug("Add instance parameter '{0}'.".format(name))
                    self._user_traits[name] = class_traits[name]

    #
    # Private methods
    #

    def _clone_trait(self, clone, metadata=None):
        """ Creates a clone of a specific trait (ie. the same trait
        type but different ids).

        Parameters
        ----------
        clone: CTrait (mandatory)
            the input trait to clone.
        metadata: dict (opional, default None)
            some metadata than can be added to the trait __dict__.

        Returns
        -------
        trait: CTrait
            the cloned input trait.
        """
        # Create an empty trait
        trait = CTrait(0)

        # we need a CTrait, not a TraitType
        if isinstance(clone, TraitType):
            clone = clone.as_ctrait()

        # Clone the input trait in the empty trait structure
        trait.clone(clone)

        # Set the input trait __dict__ elements to the cloned trait
        # __dict__
        if clone.__dict__ is not None:
            trait.__dict__ = clone.__dict__.copy()

        # Update the cloned trait __dict__ if necessary
        if metadata is not None:
            trait.__dict__.update(metadata)

        return trait

    def _propagate_optional_parameter(self, trait, optional=None):
        """
        """
        # Get the trait class name
        if hasattr(trait, 'handler'):
            handler = trait.handler or trait
        else:
            handler = trait # hope it is already a handler
        main_id = handler.__class__.__name__
        if main_id == "TraitCoerceType":
            real_id = _type_to_trait_id.get(handler.aType)
            if real_id:
                main_id = real_id

        # Debug message
        logger.debug("Propagation optional parameter of trait with main id %s",
                     main_id)

        # Get the optional parameter and set the default value if necessary
        if optional is not None:
            trait.optional = optional
        else:
            optional = trait.optional
            if optional is None:
                optional = False
                trait.optional = optional

        # Either case
        if main_id in ["Either", "TraitCompound"]:

            # Debug message
            logger.debug("A coumpound trait has been found %s", repr(
                handler.handlers))

            # Update each trait compound optional parameter
            for sub_trait in handler.handlers:
                if not isinstance(sub_trait, (TraitInstance, TraitCoerceType)):
                    sub_trait = sub_trait()
                self._propagate_optional_parameter(sub_trait, optional)

        # Default case
        else:
            # FIXME may recurse indefinitely if the trait is recursive
            for inner_trait in handler.inner_traits():
                self._propagate_optional_parameter(inner_trait, optional)

    #
    # Public methods
    #

    def user_traits(self):
        """ Method to access the user parameters.

        Returns
        -------
        out: dict
            a dictionnary containing class traits and instance traits
            defined by user (i.e.  the traits that are not automatically
            defined by HasTraits or Controller). Returned values are
            sorted according to the 'order' trait meta-attribute.
        """
        return self._user_traits

    def is_user_trait(self, trait):
        """ Method that evaluate if a trait is a user parameter
        (i.e. not an Event).

        Returns
        -------
        out: bool
            True if the trait is a user trait,
            False otherwise.
        """
        return not isinstance(trait.handler, Event)


    @staticmethod
    def checked_trait(trait):
        """ Check the trait and build a new one if needed.

        This function mainly checks the default value of the given trait,
        and tests in some ways whether it is valid ot not. If not, a new
        trait is created to replace it.

        For now it just checks that lists with a non-null minlen will actually
        get a default value which is a list with this minimum size. Otherwise
        it causes exceptions in the traits notification system at some point.

        Parameters
        ----------
        trait: Trait instance to be checked

        Returns
        -------
        new_trait: Trait instance
            the returned trait may be the input one (trait), or a new one if
            it had to be modified.
        """
        ut = getattr(trait, 'trait_type', trait)
        if isinstance(ut, traits.List):
            if ut.minlen != 0 and (not isinstance(ut.default, list)
                                   or len(ut.default) < ut.minlen):
                # default value is not OK, we have to build another one
                if isinstance(ut.default, list):
                    default = list(ut.default)
                else:
                    default = []
                default += [ut.item_trait.default] * (ut.minlen - len(default))
                trait = traits.List(ut.item_trait, default, minlen = ut.minlen,
                                    maxlen=ut.maxlen)
        return trait


    def add_trait(self, name, *trait):
        """ Add a new trait.

        Parameters
        ----------
        name: str (mandatory)
            the trait name.
        trait: traits.api (mandatory)
            a valid trait.
        """
        # Debug message
        logger.debug("Adding trait '{0}'...".format(name))

        # check trait default value inconsistencies
        trait = (self.checked_trait(trait[0]), ) + trait[1:]

        # Inheritance: create the instance trait attribute
        super(Controller, self).add_trait(name, *trait)

        # Get the trait instance and if it is a user trait load the traits
        # to get it direcly from the instance (as a property) and add it
        # to the class '_user_traits' attributes
        trait_instance = self.trait(name)
        if self.is_user_trait(trait_instance):
            #trait_instance.defaultvalue = trait_instance.default
            #try:
                #self.get(name)
            #except TraitError:
                ## default value is invalid
                #try:
                    #setattr(self, name, Undefined)
                #except TraitError:
                    ## Undefined is invalid, too...
                    #pass
            self._user_traits[name] = trait_instance

        # Update/set the optional trait parameter
        self._propagate_optional_parameter(trait_instance)
        self.user_traits_changed = True

    def remove_trait(self, name):
        """ Remove a trait from its name.

        Parameters
        ----------
        name: str (mandatory)
            the trait name to remove.
        """
        # Debug message
        logger.debug("Removing trait '{0}'...".format(name))

        # Call the Traits remove_trait method
        super(Controller, self).remove_trait(name)

        # Remove name from the '_user_traits' without error if it
        # is not present
        self._user_traits.pop(name, None)
        self.user_traits_changed = True

    def export_to_dict(self, exclude_undefined=False,
                       exclude_transient=False,
                       exclude_none=False,
                       exclude_empty=False,
                       dict_class=OrderedDict):
        """ return the controller state to a dictionary, replacing controller
        values in sub-trees to dicts also.

        Parameters
        ----------
        exclude_undefined: bool (optional)
            if set, do not export Undefined values
        exclude_transient: bool (optional)
            if set, do not export values whose trait is marked "transcient"
        exclude_none: bool (optional)
            if set, do not export None values
        exclude_empty: bool (optional)
            if set, do not export empty lists/dicts values
        dict_class: class type (optional, default: soma.sorted_dictionary.OrderedDict)
            use this type of mapping type to represent controllers. It should
            follow the mapping protocol API.
        """
        return controller_to_dict(self, exclude_undefined=exclude_undefined,
                                  exclude_transient=exclude_transient,
                                  exclude_none=exclude_none,
                                  exclude_empty=exclude_empty,
                                  dict_class=dict_class)

    def import_from_dict(self, state_dict, clear=False):
        """ Set Controller variables from a dictionary. When setting values on
        Controller instances (in the Controller sub-tree), replace dictionaries
        by Controller instances appropriately.

        Parameters
        ----------
        state_dict: dict, sorted_dictionary or OrderedDict
            dict containing the variables to set
        clear: bool (optional, default: False)
            if True, older values (in keys not listed in state_dict) will be
            cleared, otherwise they are left in place.
        """
        if clear:
            for trait_name in self.user_traits():
                if trait_name not in state_dict:
                    delattr(self, trait_name)
        for trait_name, value in six.iteritems(state_dict):
            trait = self.trait(trait_name)
            if trait is None and not isinstance(self, OpenKeyController):
                raise KeyError(
                    "item %s is not a trait in the Controller" % trait_name)
            if isinstance(trait.trait_type, Instance) \
                    and issubclass(trait.trait_type.klass, Controller):
                controller = trait.trait_type.create_default_value(
                    trait.trait_type.klass)
                controller.import_from_dict(value)
            else:
                # check trait type for conversions
                tr = self.trait(trait_name)
                if tr and isinstance(tr.trait_type, Set):
                    setattr(self, trait_name, set(value))
                else:
                    setattr(self, trait_name, value)

    def copy(self, with_values=True):
        """ Copy traits definitions to a new Controller object

        Parameters
        ----------
        with_values: bool (optional, default: False)
            if True, traits values will be copied, otherwise the defaut trait
            value will be left in the copy.

        Returns
        -------
        copied: Controller instance
            the returned copy will have the same class as the copied object
            (which may be a derived class from Controller). Traits definitions
            will be copied. Traits values will only be copied if with_values is
            True.
        """
        import copy

        initargs = ()
        if hasattr(self, '__getinitargs__'):
            # if the Controller class is subclassed and needs init parameters
            initargs = self.__getinitargs__()
        copied = self.__class__(*initargs)
        for name, trait in six.iteritems(self.user_traits()):
            copied.add_trait(name, self._clone_trait(trait))
            if with_values:
                setattr(copied, name, getattr(self, name))
        return copied

    def reorder_traits(self, traits_list):
        """ Reorder traits in the controller according to a new ordered list.

        If the new list does not contain all user traits, the remaining ones
        will be appended at the end.

        Parameters
        ----------
        traits_list: list
            New list of trait names. This list order will be kept.
        """
        former_traits = set(self._user_traits.sortedKeys)
        for t in traits_list:
            if t not in former_traits:
                raise ValueError("parameter %s is not is Controller traits."
                                 % t)
        new_traits = list(traits_list)
        done = set(new_traits)
        for t in self._user_traits.sortedKeys:
            if t not in done:
                new_traits.append(t)
        self._user_traits.sortedKeys = new_traits
コード例 #11
0
    def create_attributes_with_fom(self):
        """To get useful attributes by the fom"""

        process = self.process
        study_config = process.study_config
        modules_data = study_config.modules_data

        #Get attributes in input fom
        names_search_list = (self.name, process.id, process.name,
                             getattr(process, 'context_name', ''))
        capsul_attributes = self.get_attribute_values()
        matching_fom = False
        input_found = False
        output_found = False

        foms = SortedDictionary()
        foms.update(modules_data.foms)
        if study_config.auto_fom:
            # in auto-fom mode, also search in additional and non-loaded FOMs
            for schema, fom in six.iteritems(modules_data.all_foms):
                if schema not in (study_config.input_fom,
                                  study_config.output_fom,
                                  study_config.shared_fom):
                    foms[schema] = fom

        def editable_attributes(attributes, fom):
            ea = EditableAttributes()
            for attribute in attributes:
                if attribute.startswith('fom_'):
                    continue  # skip FOM internals
                default_value = fom.attribute_definitions[attribute].get(
                    'default_value', '')
                ea.add_trait(attribute, Str(default_value))
            return ea

        for schema, fom in six.iteritems(foms):
            if fom is None:
                fom, atp, pta \
                    = study_config.modules['FomConfig'].load_fom(schema)
            else:
                atp = modules_data.fom_atp.get(schema) \
                    or modules_data.fom_atp['all'].get(schema)

            if atp is None:
                continue
            for name in names_search_list:
                fom_patterns = fom.patterns.get(name)
                if fom_patterns is not None:
                    break
            else:
                continue

            if not matching_fom:
                matching_fom = True
            if schema == 'input':
                input_found = True
            elif schema == 'output':
                output_found = True
            elif matching_fom in (False, True, None):
                matching_fom = schema, fom, atp, fom_patterns
            #print('completion using FOM:', schema, 'for', process)
            #break

            for parameter in fom_patterns:
                param_attributes = atp.find_discriminant_attributes(
                    fom_parameter=parameter, fom_process=name)
                if param_attributes:
                    #process_attributes[parameter] = param_attributes
                    ea = editable_attributes(param_attributes, fom)
                    try:
                        capsul_attributes.set_parameter_attributes(
                            parameter, schema, ea, {})
                    except KeyError:
                        # param already registered
                        pass

        if not matching_fom:
            raise KeyError('Process not found in FOMs')
        #print('matching_fom:', matching_fom)

        if not input_found and matching_fom is not True:
            fom_type, fom, atp, fom_patterns = matching_fom
            schema = 'input'
            for parameter in fom_patterns:
                param_attributes = atp.find_discriminant_attributes(
                    fom_parameter=parameter, fom_process=name)
                if param_attributes:
                    #process_attributes[parameter] = param_attributes
                    ea = editable_attributes(param_attributes, fom)
                    try:
                        capsul_attributes.set_parameter_attributes(
                            parameter, schema, ea, {})
                    except KeyError:
                        # param already registered
                        pass
            modules_data.foms[schema] = fom
            modules_data.fom_atp[schema] = atp
            study_config.input_fom = fom_type

        if not output_found and matching_fom is not True:
            fom_type, fom, atp, fom_patterns = matching_fom
            schema = 'output'
            for parameter in fom_patterns:
                param_attributes = atp.find_discriminant_attributes(
                    fom_parameter=parameter, fom_process=name)
                if param_attributes:
                    #process_attributes[parameter] = param_attributes
                    ea = editable_attributes(param_attributes, fom)
                    try:
                        capsul_attributes.set_parameter_attributes(
                            parameter, schema, ea, {})
                    except KeyError:
                        # param already registered
                        pass
            modules_data.foms[schema] = fom
            modules_data.fom_atp[schema] = atp
            study_config.output_fom = fom_type

        # in a pipeline, we still must iterate over nodes to find switches,
        # which have their own behaviour.
        if isinstance(self.process, Pipeline):
            attributes = self.capsul_attributes
            name = self.process.name

            for node_name, node in six.iteritems(self.process.nodes):
                if isinstance(node, Switch):
                    subprocess = node
                    if subprocess is None:
                        continue
                    pname = '.'.join([name, node_name])
                    subprocess_compl = \
                        ProcessCompletionEngine.get_completion_engine(
                            subprocess, pname)
                    try:
                        sub_attributes \
                            = subprocess_compl.get_attribute_values()
                    except:
                        try:
                            subprocess_compl = self.__class__(subprocess)
                            sub_attributes \
                                = subprocess_compl.get_attribute_values()
                        except:
                            continue
                    for attribute, trait \
                            in six.iteritems(sub_attributes.user_traits()):
                        if attributes.trait(attribute) is None:
                            attributes.add_trait(attribute, trait)
                            setattr(attributes, attribute,
                                    getattr(sub_attributes, attribute))

            self._get_linked_attributes()
コード例 #12
0
def init_settings(capsul_engine):
    with capsul_engine.settings as session:
        session.ensure_module_fields(
            'fom',
            [
                dict(name='input_fom', type='string', description='input FOM'),
                dict(name='output_fom',
                     type='string',
                     description='output FOM'),
                dict(name='shared_fom',
                     type='string',
                     description='shared data FOM'),
                dict(name='volumes_format',
                     type='string',
                     description='Format used for volumes'),
                dict(name='meshes_format',
                     type='string',
                     description='Format used for meshes'),
                dict(
                    name='auto_fom',
                    type='boolean',
                    description='Look in all FOMs when a process is not found '
                    '(in addition to the standard share/foms). Note that '
                    'auto_fom looks for the first FOM matching the process to '
                    'get completion for, and does not handle ambiguities. '
                    'Moreover it brings an overhead (typically 6-7 seconds) the '
                    'first time it is used since it has to parse all available '
                    'FOMs.'),
                dict(
                    name='fom_path',
                    type='list_string',
                    description='list of additional directories where to look '
                    'for FOMs'),
                # FIXME: until directories are included in another config module
                dict(name='input_directory',
                     type='string',
                     description='input study data directory'),
                dict(name='output_directory',
                     type='string',
                     description='output study data directory'),
            ])

    capsul_engine.load_module('capsul.engine.module.axon')
    capsul_engine.load_module('capsul.engine.module.spm')
    capsul_engine.load_module('capsul.engine.module.attributes')

    with capsul_engine.settings as session:
        config = session.config('fom', 'global')
        if not config:
            values = {
                capsul_engine.settings.config_id_field: 'fom',
                'auto_fom': True,
                'fom_path': []
            }
            session.new_config('fom', 'global', values)

    if not hasattr(capsul_engine, '_modules_data'):
        capsul_engine._modules_data = {}
    store = capsul_engine._modules_data.setdefault('fom', {})
    store['foms'] = {}
    store['all_foms'] = SortedDictionary()
    store['fom_atp'] = {'all': {}}
    store['fom_pta'] = {'all': {}}

    capsul_engine.settings.module_notifiers['capsul.engine.module.fom'] \
        = [partial(fom_config_updated, weakref.proxy(capsul_engine), 'global')]
    capsul_engine.settings.module_notifiers.setdefault(
        'capsul.engine.module.axon', []).append(
            partial(config_updated, weakref.proxy(capsul_engine), 'global'))
    capsul_engine.settings.module_notifiers.setdefault(
        'capsul.engine.module.spm', []).append(
            partial(config_updated, weakref.proxy(capsul_engine), 'global'))

    # link with StudyConfig
    if hasattr(capsul_engine, 'study_config') \
            and 'FomConfig' not in capsul_engine.study_config.modules:
        scmod = capsul_engine.study_config.load_module('FomConfig', {})
        scmod.initialize_module()
        scmod.initialize_callbacks()

    update_fom(capsul_engine, 'global')
コード例 #13
0
    def test_sorted_dictionary(self):
        d0 = SortedDictionary()
        self.assertEqual(dict(d0), {})

        d1 = SortedDictionary(
            ('titi', {'bubu': '50', 'turlute': 12}),
            ('toto', 'val"u\'e'),
            ('tutu', [0, 1, 2, [u'papa', 5]]))
        d2 = SortedDictionary(
            ('tutu', [0, 1, 2, [u'papa', 5]]),
            ('toto', 'val"u\'e'),
            ('titi', {'bubu': '50', 'turlute': 12}))

        self.assertEqual(dict(d1), dict(d2))
        self.assertNotEqual(d1.keys(), d2.keys())
        self.assertEqual(d1, dict(d1))
        self.assertEqual(d1, SortedDictionary(dict(d1).items()))

        d1['titi'] = 'babar'
        d2['titi'] = 'bubur'
        self.assertNotEqual(dict(d1), dict(d2))
        d2['titi'] = 'babar'
        self.assertEqual(dict(d1), dict(d2))
        d1['ababo'] = 43.65
        self.assertEqual(d1.keys(), ['titi', 'toto', 'tutu', 'ababo'])

        del d1['titi']
        del d1['ababo']
        del d2['titi']
        self.assertEqual(dict(d1), dict(d2))
        self.assertEqual(d2.keys(), ['tutu', 'toto'])
        p = pickle.dumps(d1)
        p2 = pickle.loads(p)
        self.assertTrue(isinstance(p2, SortedDictionary))
        self.assertEqual(d1, p2)

        d1.insert(1, 'babar', 'new item')
        self.assertEqual(d1.keys(), ['toto', 'babar', 'tutu'])
        self.assertRaises(KeyError, d1.insert, 2, 'babar', 'other')
        self.assertEqual(d1.index('babar'), 1)