Exemplo n.º 1
0
    def test__ensure_node(self, gcimock):
        bep = BaseEndpoint()
        self.assertIsNone(bep.node)

        bep._ensure_node()

        self.assertEquals(bep.node, gcimock().node)
Exemplo n.º 2
0
    def test__ensure_node(self, gcimock):
        bep = BaseEndpoint()
        self.assertIsNone(bep.node)

        bep._ensure_node()

        self.assertEquals(bep.node, gcimock().node)
Exemplo n.º 3
0
    def _build_header(self, raw_msg, raw_headers):
        """
        Builds the header for this Process-level RPC conversation.
        """
        header = EndpointUnit._build_header(self, raw_msg, raw_headers)

        # Add our process identity to the headers (as sender)
        header.update({'sender-name': self._process.name or 'unnamed-process',     # @TODO
                       'sender': self._process.id})

        if hasattr(self._process, 'process_type'):
            header.update({'sender-type': self._process.process_type or 'unknown-process-type'})
            if self._process.process_type == 'service' and hasattr(self.channel, '_send_name'):
                header.update({'sender-service': "%s,%s" % (self.channel._send_name.exchange, self._process.name)})

        # Use received message headers context to set security attributes forward
        context = self.get_context()
        if isinstance(context, dict):
            new_header = self.build_security_headers(context)
            header.update(new_header)
        else:
            # no context? we're the originator of the message then
            container_id = BaseEndpoint._get_container_instance().id
            header['origin-container-id'] = container_id

            # This is the originating conversation
            if 'conv-id' in raw_headers:
                header['original-conv-id'] = raw_headers['conv-id']

        return header
Exemplo n.º 4
0
    def __init__(self, event_type=None, xp=None, process=None, **kwargs):
        """
        Constructs a publisher of events for a specific type.

        @param  event_type  The name of the event type object
        @param  xp          Exchange (AMQP) name, can be none, will use events default.
        """

        self.event_type = event_type
        self.process = process
        self._events_xp = CFG.get_safe("exchange.core.events",
                                       DEFAULT_EVENTS_XP)

        if bootstrap.container_instance and getattr(
                bootstrap.container_instance, 'event_repository', None):
            self.event_repo = bootstrap.container_instance.event_repository
        else:
            self.event_repo = None

        # generate an exchange name to publish events to
        container = (hasattr(self, '_process') and hasattr(
            self._process, 'container') and self._process.container
                     ) or BaseEndpoint._get_container_instance()
        if container and container.has_capability(
                container.CCAP.EXCHANGE_MANAGER
        ):  # might be too early in chain
            xp = xp or container.create_xp(self._events_xp)
            to_name = xp
        else:
            xp = xp or self.get_events_exchange_point()
            to_name = (xp, None)

        Publisher.__init__(self, to_name=to_name, **kwargs)
Exemplo n.º 5
0
    def _build_header(self, raw_msg, raw_headers):
        """
        Builds the header for this Process-level RPC conversation.
        https://confluence.oceanobservatories.org/display/syseng/CIAD+COI+OV+Common+Message+Format
        """

        header = EndpointUnit._build_header(self, raw_msg, raw_headers)

        # add our process identity to the headers
        header.update({'sender-name': self._process.name or 'unnamed-process',     # @TODO
                       'sender': self._process.id})

        if hasattr(self._process, 'process_type'):
            header.update({'sender-type': self._process.process_type or 'unknown-process-type'})
            if self._process.process_type == 'service' and hasattr(self.channel, '_send_name'):
                header.update({'sender-service': "%s,%s" % (self.channel._send_name.exchange, self._process.name)})

        context = self.get_context()
        #log.debug('ProcessEndpointUnitMixin._build_header has context of: %s', context)


        # use context to set security attributes forward
        if isinstance(context, dict):
            new_header = self.build_security_headers(context)
            header.update(new_header)
        else:
            # no context? we're the originator of the message then
            container_id                    = BaseEndpoint._get_container_instance().id
            header['origin-container-id']   = container_id

            #This is the originating conversation
            if 'conv-id' in raw_headers:
                header['original-conv-id'] = raw_headers['conv-id']

        return header
Exemplo n.º 6
0
    def _build_header(self, raw_msg, raw_headers):
        """
        Builds the header for this Process-level RPC conversation.
        """
        header = EndpointUnit._build_header(self, raw_msg, raw_headers)

        # Add our process identity to the headers (as sender)
        header.update({'sender-name': self._process.name or 'unnamed-process',     # @TODO
                       'sender': self._process.id})

        if hasattr(self._process, 'process_type'):
            header.update({'sender-type': self._process.process_type or 'unknown-process-type'})
            if self._process.process_type == 'service' and hasattr(self.channel, '_send_name'):
                header.update({'sender-service': "%s,%s" % (self.channel._send_name.exchange, self._process.name)})

        # Use received message headers context to set security attributes forward
        context = self.get_context()
        if isinstance(context, dict):
            new_header = self.build_security_headers(context)
            header.update(new_header)
        else:
            # no context? we're the originator of the message then
            container_id = BaseEndpoint._get_container_instance().id
            header['origin-container-id'] = container_id

            # This is the originating conversation
            if 'conv-id' in raw_headers:
                header['original-conv-id'] = raw_headers['conv-id']

        return header
Exemplo n.º 7
0
 def test__ensure_node_errors(self):
     bep = BaseEndpoint()
     gcimock = Mock()
     gcimock.return_value = None
     with patch('pyon.net.endpoint.BaseEndpoint._get_container_instance',
                gcimock):
         self.assertRaises(EndpointError, bep._ensure_node)
Exemplo n.º 8
0
    def _build_header(self, raw_msg, raw_headers):
        """
        Builds the header for this Process-level RPC conversation.
        https://confluence.oceanobservatories.org/display/syseng/CIAD+COI+OV+Common+Message+Format
        """

        header = EndpointUnit._build_header(self, raw_msg, raw_headers)

        # add our process identity to the headers
        header.update({'sender-name': self._process.name or 'unnamed-process',     # @TODO
                       'sender': self._process.id})

        if hasattr(self._process, 'process_type'):
            header.update({'sender-type': self._process.process_type or 'unknown-process-type'})
            if self._process.process_type == 'service' and hasattr(self.channel, '_send_name'):
                header.update({'sender-service': "%s,%s" % (self.channel._send_name.exchange, self._process.name)})

        context = self.get_context()
        log.debug('ProcessEndpointUnitMixin._build_header has context of: %s', context)

        # use context to set security attributes forward
        if isinstance(context, dict):
            new_header = self.build_security_headers(context)
            header.update(new_header)
        else:
            # no context? we're the originator of the message then
            container_id                    = BaseEndpoint._get_container_instance().id
            header['origin-container-id']   = container_id

            #This is the originating conversation
            if 'conv-id' in raw_headers:
                header['original-conv-id'] = raw_headers['conv-id']

        return header
Exemplo n.º 9
0
    def _build_header(self, raw_msg):
        """
        Builds the header for this Process-level RPC conversation.
        https://confluence.oceanobservatories.org/display/syseng/CIAD+COI+OV+Common+Message+Format
        """

        header = EndpointUnit._build_header(self, raw_msg)

        # add our process identity to the headers
        header.update({
            'sender-name': self._process.name or 'unnamed-process',  # @TODO
            'sender': self._process.id
        })

        if hasattr(self._process, 'process_type'):
            header.update({
                'sender-type':
                self._process.process_type or 'unknown-process-type'
            })
            if self._process.process_type == 'service':
                header.update({
                    'sender-service':
                    "%s,%s" %
                    (self.channel._send_name.exchange, self._process.name)
                })

        context = self._process.get_context()
        log.debug('ProcessEndpointUnitMixin._build_header has context of: %s',
                  context)

        # use context to set security attributes forward
        if isinstance(context, dict):
            # fwd on actor specific information, according to common message format spec
            actor_id = context.get('ion-actor-id', None)
            actor_roles = context.get('ion-actor-roles', None)
            actor_tokens = context.get('ion-actor-tokens', None)
            expiry = context.get('expiry', None)
            container_id = context.get('origin-container-id', None)

            #If an actor-id is specified then there may be other associated data that needs to be passed on
            if actor_id:
                header['ion-actor-id'] = actor_id
                if actor_roles: header['ion-actor-roles'] = actor_roles
                if actor_tokens: header['ion-actor-tokens'] = actor_tokens

            if expiry: header['expiry'] = expiry
            if container_id: header['origin-container-id'] = container_id
        else:
            # no context? we're the originator of the message then
            container_id = BaseEndpoint._get_container_instance().id
            header['origin-container-id'] = container_id

        return header
Exemplo n.º 10
0
    def _build_header(self, raw_msg):
        """
        Builds the header for this Process-level RPC conversation.
        https://confluence.oceanobservatories.org/display/syseng/CIAD+COI+OV+Common+Message+Format
        """

        header = EndpointUnit._build_header(self, raw_msg)

        # add our process identity to the headers
        header.update({'sender-name'  : self._process.name or 'unnamed-process',     # @TODO
                       'sender'       : self._process.id })

        if hasattr(self._process,'process_type' ):
            header.update({'sender-type'  : self._process.process_type or 'unknown-process-type' })
            if self._process.process_type == 'service':
                header.update({ 'sender-service' : "%s,%s" % ( self.channel._send_name.exchange,self._process.name) })

        context = self._process.get_context()
        log.debug('ProcessEndpointUnitMixin._build_header has context of: %s', context)

        # use context to set security attributes forward
        if isinstance(context, dict):
            # fwd on actor specific information, according to common message format spec
            actor_id            = context.get('ion-actor-id', None)
            actor_roles         = context.get('ion-actor-roles', None)
            actor_tokens        = context.get('ion-actor-tokens', None)
            expiry              = context.get('expiry', None)
            container_id        = context.get('origin-container-id', None)

            #If an actor-id is specified then there may be other associated data that needs to be passed on
            if actor_id:
                header['ion-actor-id']  = actor_id
                if actor_roles:     header['ion-actor-roles']   = actor_roles
                if actor_tokens:    header['ion-actor-tokens']  = actor_tokens

            if expiry:          header['expiry']                = expiry
            if container_id:    header['origin-container-id']   = container_id
        else:
            # no context? we're the originator of the message then
            container_id                    = BaseEndpoint._get_container_instance().id
            header['origin-container-id']   = container_id

        return header
Exemplo n.º 11
0
 def test_close(self):
     bep = BaseEndpoint()
     bep.close()
Exemplo n.º 12
0
 def test__get_container_instance(self):
     c = Container() # ensure we've got an instance in Container.instance
     self.assertEquals(BaseEndpoint._get_container_instance(), c)
Exemplo n.º 13
0
 def test_close(self):
     bep = BaseEndpoint()
     bep.close()
Exemplo n.º 14
0
 def setUp(self):
     self._node = Mock(spec=NodeB)
     self._ef = BaseEndpoint(node=self._node)
     self._ch = Mock(spec=SendChannel)
     self._node.channel.return_value = self._ch
Exemplo n.º 15
0
class TestBaseEndpoint(PyonTestCase):
    def setUp(self):
        self._node = Mock(spec=NodeB)
        self._ef = BaseEndpoint(node=self._node, name="EFTest")
        self._ch = Mock(spec=SendChannel)
        self._node.channel.return_value = self._ch

    def test_create_endpoint(self):
        e = self._ef.create_endpoint()

        # check attrs
        self.assertTrue(hasattr(e, "channel"))
        self.assertEquals(self._ch.connect.call_count, 1)
        self.assertTrue(self._ef.name in self._ch.connect.call_args[0])

        # make sure we can shut it down
        e.close()
        self._ch.close.assert_any_call()

    def test_create_endpoint_new_name(self):
        e = self._ef.create_endpoint(to_name="reroute")
        self.assertEquals(self._ch.connect.call_count, 1)
        self.assertTrue("reroute" in self._ch.connect.call_args[0][0])  # @TODO: this is obtuse
        e.close()

    def test_create_endpoint_existing_channel(self):
        ch = Mock(spec=SendChannel)
        e = self._ef.create_endpoint(existing_channel=ch)
        self.assertEquals(e.channel, ch)
        self.assertEquals(ch.connect.call_count, 0)

        ch.connect("exist")
        ch.connect.assert_called_once_with("exist")

        e.close()

    def test_create_endpoint_kwarg(self):
        """
        Make sure our kwarg gets set.
        """

        class OptEndpointUnit(EndpointUnit):
            def __init__(self, opt=None, **kwargs):
                self._opt = opt
                EndpointUnit.__init__(self, **kwargs)

        self._ef.endpoint_unit_type = OptEndpointUnit

        e = self._ef.create_endpoint(opt="stringer")
        self.assertTrue(hasattr(e, "_opt"))
        self.assertEquals(e._opt, "stringer")

    def test__ensure_node_errors(self):
        bep = BaseEndpoint(name=sentinel.name)
        gcimock = Mock()
        gcimock.return_value = None
        with patch("pyon.net.endpoint.BaseEndpoint._get_container_instance", gcimock):
            self.assertRaises(EndpointError, bep._ensure_node)

    @patch("pyon.net.endpoint.BaseEndpoint._get_container_instance")
    def test__ensure_node_existing_node(self, gcimock):
        self._ef._ensure_node()
        self.assertFalse(gcimock.called)

    @patch("pyon.net.endpoint.BaseEndpoint._get_container_instance")
    def test__ensure_node(self, gcimock):
        bep = BaseEndpoint(name=sentinel.name)
        self.assertIsNone(bep.node)

        bep._ensure_node()

        self.assertEquals(bep.node, gcimock().node)
Exemplo n.º 16
0
    def __init__(self, xp_name=None, event_type=None, origin=None, queue_name=None,
                 sub_type=None, origin_type=None, pattern=None, auto_delete=None):
        self._events_xp = CFG.get_safe("exchange.core.events", DEFAULT_EVENTS_XP)
        self.event_type = event_type
        self.sub_type = sub_type
        self.origin_type = origin_type
        self.origin = origin

        # Default for auto_delete is True for events, unless otherwise specified
        if auto_delete is None:
            auto_delete = True
        self._auto_delete = auto_delete

        xp_name = xp_name or self._events_xp
        if pattern:
            binding = pattern
        else:
            binding = self._topic(event_type, origin, sub_type, origin_type)

        # create queue_name if none passed in
        if queue_name is None:
            queue_name = "subsc_" + create_simple_unique_id()

        # prepend proc name to queue name if we have one
        if hasattr(self, "_process") and self._process:
            queue_name = "%s_%s" % (self._process._proc_name, queue_name)

        # do we have a container/ex_manager?
        container = (hasattr(self, '_process') and hasattr(self._process, 'container') and self._process.container) or BaseEndpoint._get_container_instance()
        if container:
            xp = container.create_xp(xp_name)
            xne = container.create_event_xn(queue_name,
                                            pattern=binding,
                                            xp=xp,
                                            auto_delete=auto_delete)

            self._ev_recv_name = xne
            self.binding = None

        else:
            # Remove this case. No container??
            self.binding = binding

            # prefix the queue_name, if specified, with the sysname
            queue_name = "%s.system.%s" % (bootstrap.get_sys_name(), queue_name)

            # set this name to be picked up by inherited folks
            self._ev_recv_name = (xp_name, queue_name)

        local_event_queues.append(queue_name)
Exemplo n.º 17
0
class TestBaseEndpoint(PyonTestCase):
    def setUp(self):
        self._node = Mock(spec=NodeB)
        self._ef = BaseEndpoint(node=self._node)
        self._ch = Mock(spec=SendChannel)
        self._node.channel.return_value = self._ch

    def test_create_endpoint(self):
        e = self._ef.create_endpoint()

        # check attrs
        self.assertTrue(hasattr(e, 'channel'))

        # make sure we can shut it down
        e.close()
        self._ch.close.assert_any_call()

    def test_create_endpoint_existing_channel(self):
        ch = Mock(spec=SendChannel)
        e = self._ef.create_endpoint(existing_channel=ch)
        self.assertEquals(e.channel, ch)
        self.assertEquals(ch.connect.call_count, 0)

        ch.connect("exist")
        ch.connect.assert_called_once_with('exist')

        e.close()

    def test_create_endpoint_kwarg(self):
        """
Make sure our kwarg gets set.
"""

        class OptEndpointUnit(EndpointUnit):
            def __init__(self, opt=None, **kwargs):
                self._opt = opt
                EndpointUnit.__init__(self, **kwargs)

        self._ef.endpoint_unit_type = OptEndpointUnit

        e = self._ef.create_endpoint(opt="stringer")
        self.assertTrue(hasattr(e, "_opt"))
        self.assertEquals(e._opt, "stringer")

    def test__ensure_node_errors(self):
        bep = BaseEndpoint()
        gcimock = Mock()
        gcimock.return_value = None
        with patch('pyon.net.endpoint.BaseEndpoint._get_container_instance', gcimock):
            self.assertRaises(EndpointError, bep._ensure_node)

    @patch('pyon.net.endpoint.BaseEndpoint._get_container_instance')
    def test__ensure_node_existing_node(self, gcimock):
        self._ef._ensure_node()
        self.assertFalse(gcimock.called)

    @patch('pyon.net.endpoint.BaseEndpoint._get_container_instance')
    def test__ensure_node(self, gcimock):
        bep = BaseEndpoint()
        self.assertIsNone(bep.node)

        bep._ensure_node()

        self.assertEquals(bep.node, gcimock().node)

    def test__get_container_instance(self):
        c = Container() # ensure we've got an instance in Container.instance
        self.assertEquals(BaseEndpoint._get_container_instance(), c)

    def test_close(self):
        bep = BaseEndpoint()
        bep.close()
Exemplo n.º 18
0
class ProcessEndpointUnitMixin(EndpointUnit):
    """
    Common-base mixin for Process related endpoints.

    This reduces code duplication on either side of the ProcessRPCRequest/ProcessRPCResponse.
    """
    def __init__(self, process=None, **kwargs):
        EndpointUnit.__init__(self, **kwargs)
        self._process = process

    def get_context(self):
        """
        Gets context used to build headers for the conversation.

        This method may be overridden for advanced purposes.
        """
        return self._process.get_context()

    def _build_invocation(self, **kwargs):
        newkwargs = kwargs.copy()
        newkwargs.update({'process': self._process})

        inv = EndpointUnit._build_invocation(self, **newkwargs)
        return inv

    def _intercept_msg_in(self, inv):
        """
        Override for incoming message interception.

        This is a request, so the order should be Message, Process
        """
        inv_one = EndpointUnit._intercept_msg_in(self, inv)
        inv_two = process_interceptors(
            self.interceptors["process_incoming"]
            if "process_incoming" in self.interceptors else [], inv_one)
        return inv_two

    def _intercept_msg_out(self, inv):
        """
        Override for outgoing message interception.

        This is request, so the order should be Process, Message
        """
        inv_one = process_interceptors(
            self.interceptors["process_outgoing"]
            if "process_outgoing" in self.interceptors else [], inv)
        inv_two = EndpointUnit._intercept_msg_out(self, inv_one)

        return inv_two

    def _build_header(self, raw_msg, raw_headers):
        """
        Builds the header for this Process-level RPC conversation.
        https://confluence.oceanobservatories.org/display/syseng/CIAD+COI+OV+Common+Message+Format
        """

        header = EndpointUnit._build_header(self, raw_msg, raw_headers)

        # add our process identity to the headers
        header.update({
            'sender-name': self._process.name or 'unnamed-process',  # @TODO
            'sender': self._process.id
        })

        if hasattr(self._process, 'process_type'):
            header.update({
                'sender-type':
                self._process.process_type or 'unknown-process-type'
            })
            if self._process.process_type == 'service' and hasattr(
                    self.channel, '_send_name'):
                header.update({
                    'sender-service':
                    "%s,%s" %
                    (self.channel._send_name.exchange, self._process.name)
                })

        context = self.get_context()
        log.debug('ProcessEndpointUnitMixin._build_header has context of: %s',
                  context)

        #Check for a field with the ResourceId decorator and if found, then set resource-id
        # in the header with that field's value or if the decorator specifies a field within an object,
        #then use the object's field value ( ie. _id)
        try:
            if isinstance(raw_msg, IonObjectBase):
                decorator = 'ResourceId'
                field = raw_msg.find_field_for_decorator(decorator)
                if field is not None and hasattr(raw_msg, field):
                    deco_value = raw_msg.get_decorator_value(field, decorator)
                    if deco_value:
                        #Assume that if there is a value, then it is specifying a field in the object
                        fld_value = getattr(raw_msg, field)
                        header['resource-id'] = getattr(fld_value, deco_value)
                    else:
                        header['resource-id'] = getattr(raw_msg, field)

        except Exception, ex:
            log.exception(ex)

        # use context to set security attributes forward
        if isinstance(context, dict):
            new_header = self.build_security_headers(context)
            header.update(new_header)
        else:
            # no context? we're the originator of the message then
            container_id = BaseEndpoint._get_container_instance().id
            header['origin-container-id'] = container_id

            #This is the originating conversation
            if 'conv-id' in raw_headers:
                header['original-conv-id'] = raw_headers['conv-id']

        return header
Exemplo n.º 19
0
    def __init__(self, xp_name=None, event_type=None, origin=None, queue_name=None,
                 sub_type=None, origin_type=None, pattern=None):
        self._events_xp = CFG.get_safe("exchange.core.events", DEFAULT_EVENTS_XP)
        self.event_type = event_type
        self.sub_type = sub_type
        self.origin_type = origin_type
        self.origin = origin

        # establish names for xp, binding/pattern/topic, queue_name
        xp_name = xp_name or self._events_xp
        if pattern:
            binding = pattern
        else:
            binding = self._topic(event_type, origin, sub_type, origin_type)

        # create queue_name if none passed in
        if queue_name is None:
            queue_name = "subsc_" + create_simple_unique_id()

        # prepend proc name to queue name if we have one
        if hasattr(self, "_process") and self._process:
            queue_name = "%s_%s" % (self._process._proc_name, queue_name)

        # do we have a container/ex_manager?
        container = (hasattr(self, '_process') and hasattr(self._process, 'container') and self._process.container) or BaseEndpoint._get_container_instance()
        if container:
            xp = container.create_xp(xp_name)
            xne = container.create_xn_event(queue_name,
                                            pattern=binding,
                                            xp=xp)

            self._ev_recv_name = xne
            self.binding = None

        else:
            self.binding = binding

            # TODO: Provide a case where we can have multiple bindings (e.g. different event_types)

            # prefix the queue_name, if specified, with the sysname
            queue_name = "%s.%s" % (bootstrap.get_sys_name(), queue_name)

            # set this name to be picked up by inherited folks
            self._ev_recv_name = (xp_name, queue_name)

        local_event_queues.append(queue_name)
Exemplo n.º 20
0
    def __init__(self, event_type=None, xp=None, process=None, **kwargs):
        """
        Constructs a publisher of events for a specific type.

        @param  event_type  The name of the event type object
        @param  xp          Exchange (AMQP) name, can be none, will use events default.
        """

        self.event_type = event_type
        self.process = process
        self._events_xp = CFG.get_safe("exchange.core.events", DEFAULT_EVENTS_XP)

        if bootstrap.container_instance and getattr(bootstrap.container_instance, 'event_repository', None):
            self.event_repo = bootstrap.container_instance.event_repository
        else:
            self.event_repo = None

        # generate an exchange name to publish events to
        container = (hasattr(self, '_process') and hasattr(self._process, 'container') and self._process.container) or BaseEndpoint._get_container_instance()
        if container and container.has_capability(container.CCAP.EXCHANGE_MANAGER):   # might be too early in chain
            xp = xp or container.create_xp(self._events_xp)
            to_name = xp
        else:
            xp = xp or self.get_events_exchange_point()
            to_name = (xp, None)

        Publisher.__init__(self, to_name=to_name, **kwargs)
Exemplo n.º 21
0
    def publish_event_object(self, event_object):
        """
        Publishes an event of given type for the given origin. Event_type defaults to an
        event_type set when initializing the EventPublisher. Other kwargs fill out the fields
        of the event. This operation will fail with an exception.
        @param event_object     the event object to be published
        @retval event_object    the event object which was published
        """
        if not event_object:
            raise BadRequest("Must provide event_object")

        event_object.base_types = event_object._get_extends()

        topic = self._topic(
            event_object
        )  # Routing key generated using type_, base_types, origin, origin_type, sub_type
        container = (hasattr(self, '_process') and hasattr(
            self._process, 'container') and self._process.container
                     ) or BaseEndpoint._get_container_instance()
        if container and container.has_capability(
                container.CCAP.EXCHANGE_MANAGER):
            # make sure we are an xp, if not, upgrade
            if not isinstance(self._send_name, XOTransport):

                default_nt = NameTrio(self.get_events_exchange_point())
                if isinstance(self._send_name, NameTrio) \
                   and self._send_name.exchange == default_nt.exchange \
                   and self._send_name.queue == default_nt.queue \
                   and self._send_name.binding == default_nt.binding:
                    self._send_name = container.create_xp(self._events_xp)
                else:
                    self._send_name = container.create_xp(self._send_name)

            xp = self._send_name
            to_name = xp.create_route(topic)
        else:
            to_name = (self._send_name.exchange, topic)

        current_time = get_ion_ts_millis()

        # Ensure valid created timestamp if supplied
        if event_object.ts_created:

            if not is_valid_ts(event_object.ts_created):
                raise BadRequest(
                    "The ts_created value is not a valid timestamp: '%s'" %
                    (event_object.ts_created))

            # Reject events that are older than specified time
            if int(event_object.ts_created) > (current_time +
                                               VALID_EVENT_TIME_PERIOD):
                raise BadRequest(
                    "This ts_created value is too far in the future:'%s'" %
                    (event_object.ts_created))

            # Reject events that are older than specified time
            if int(event_object.ts_created) < (current_time -
                                               VALID_EVENT_TIME_PERIOD):
                raise BadRequest("This ts_created value is too old:'%s'" %
                                 (event_object.ts_created))

        else:
            event_object.ts_created = str(current_time)

        # Set the actor id based on
        if not event_object.actor_id:
            event_object.actor_id = self._get_actor_id()

        #Validate this object - ideally the validator should pass on problems, but for now just log
        #any errors and keep going, since seeing invalid situations are better than skipping validation.
        try:
            event_object._validate()
        except Exception as e:
            log.exception(e)

        #Ensure the event object has a unique id
        if '_id' in event_object:
            raise BadRequest(
                "The event object cannot contain a _id field '%s'" %
                (event_object))

        #Generate a unique ID for this event
        event_object._id = create_unique_event_id()

        try:
            self.publish(event_object, to_name=to_name)
        except Exception as ex:
            log.exception("Failed to publish event (%s): '%s'" %
                          (ex.message, event_object))
            raise

        return event_object
Exemplo n.º 22
0
    def __init__(self,
                 xp_name=None,
                 event_type=None,
                 origin=None,
                 queue_name=None,
                 sub_type=None,
                 origin_type=None,
                 pattern=None,
                 auto_delete=None):
        self._events_xp = CFG.get_safe("exchange.core.events",
                                       DEFAULT_EVENTS_XP)
        self.event_type = event_type
        self.sub_type = sub_type
        self.origin_type = origin_type
        self.origin = origin

        # Default for auto_delete is True for events, unless otherwise specified
        if auto_delete is None:
            auto_delete = True
        self._auto_delete = auto_delete

        xp_name = xp_name or self._events_xp
        if pattern:
            binding = pattern
        else:
            binding = self._topic(event_type, origin, sub_type, origin_type)

        # create queue_name if none passed in
        if queue_name is None:
            queue_name = "subsc_" + create_simple_unique_id()

        # prepend proc name to queue name if we have one
        if hasattr(self, "_process") and self._process:
            queue_name = "%s_%s" % (self._process._proc_name, queue_name)

        # do we have a container/ex_manager?
        container = (hasattr(self, '_process') and hasattr(
            self._process, 'container') and self._process.container
                     ) or BaseEndpoint._get_container_instance()
        if container:
            xp = container.create_xp(xp_name)
            xne = container.create_event_xn(queue_name,
                                            pattern=binding,
                                            xp=xp,
                                            auto_delete=auto_delete)

            self._ev_recv_name = xne
            self.binding = None

        else:
            # Remove this case. No container??
            self.binding = binding

            # prefix the queue_name, if specified, with the sysname
            queue_name = "%s.system.%s" % (bootstrap.get_sys_name(),
                                           queue_name)

            # set this name to be picked up by inherited folks
            self._ev_recv_name = (xp_name, queue_name)

        local_event_queues.append(queue_name)
Exemplo n.º 23
0
    def publish_event_object(self, event_object):
        """
        Publishes an event of given type for the given origin. Event_type defaults to an
        event_type set when initializing the EventPublisher. Other kwargs fill out the fields
        of the event. This operation will fail with an exception.
        @param event_object     the event object to be published
        @retval event_object    the event object which was published
        """
        if not event_object:
            raise BadRequest("Must provide event_object")

        event_object.base_types = event_object._get_extends()

        topic = self._topic(event_object)  # Routing key generated using type_, base_types, origin, origin_type, sub_type
        container = (hasattr(self, '_process') and hasattr(self._process, 'container') and self._process.container) or BaseEndpoint._get_container_instance()
        if container and container.has_capability(container.CCAP.EXCHANGE_MANAGER):
            # make sure we are an xp, if not, upgrade
            if not isinstance(self._send_name, XOTransport):

                default_nt = NameTrio(self.get_events_exchange_point())
                if isinstance(self._send_name, NameTrio) \
                   and self._send_name.exchange == default_nt.exchange \
                   and self._send_name.queue == default_nt.queue \
                   and self._send_name.binding == default_nt.binding:
                    self._send_name = container.create_xp(self._events_xp)
                else:
                    self._send_name = container.create_xp(self._send_name)

            xp = self._send_name
            to_name = xp.create_route(topic)
        else:
            to_name = (self._send_name.exchange, topic)

        current_time = get_ion_ts_millis()

        # Ensure valid created timestamp if supplied
        if event_object.ts_created:

            if not is_valid_ts(event_object.ts_created):
                raise BadRequest("The ts_created value is not a valid timestamp: '%s'" % (event_object.ts_created))

            # Reject events that are older than specified time
            if int(event_object.ts_created) > ( current_time + VALID_EVENT_TIME_PERIOD ):
                raise BadRequest("This ts_created value is too far in the future:'%s'" % (event_object.ts_created))

            # Reject events that are older than specified time
            if int(event_object.ts_created) < (current_time - VALID_EVENT_TIME_PERIOD) :
                raise BadRequest("This ts_created value is too old:'%s'" % (event_object.ts_created))

        else:
            event_object.ts_created = str(current_time)

        # Set the actor id based on
        if not event_object.actor_id:
            event_object.actor_id = self._get_actor_id()

        #Validate this object - ideally the validator should pass on problems, but for now just log
        #any errors and keep going, since seeing invalid situations are better than skipping validation.
        try:
            event_object._validate()
        except Exception as e:
            log.exception(e)


        #Ensure the event object has a unique id
        if '_id' in event_object:
            raise BadRequest("The event object cannot contain a _id field '%s'" % (event_object))

        #Generate a unique ID for this event
        event_object._id = create_unique_event_id()

        try:
            self.publish(event_object, to_name=to_name)
        except Exception as ex:
            log.exception("Failed to publish event (%s): '%s'" % (ex.message, event_object))
            raise

        return event_object
Exemplo n.º 24
0
 def test__get_container_instance(self):
     c = Container()  # ensure we've got an instance in Container.instance
     self.assertEquals(BaseEndpoint._get_container_instance(), c)
Exemplo n.º 25
0
class TestBaseEndpoint(PyonTestCase):
    def setUp(self):
        self._node = Mock(spec=NodeB)
        self._ef = BaseEndpoint(node=self._node)
        self._ch = Mock(spec=SendChannel)
        self._node.channel.return_value = self._ch

    def test_create_endpoint(self):
        e = self._ef.create_endpoint()

        # check attrs
        self.assertTrue(hasattr(e, 'channel'))

        # make sure we can shut it down
        e.close()
        self._ch.close.assert_any_call()

    def test_create_endpoint_existing_channel(self):
        ch = Mock(spec=SendChannel)
        e = self._ef.create_endpoint(existing_channel=ch)
        self.assertEquals(e.channel, ch)
        self.assertEquals(ch.connect.call_count, 0)

        ch.connect("exist")
        ch.connect.assert_called_once_with('exist')

        e.close()

    def test_create_endpoint_kwarg(self):
        """
Make sure our kwarg gets set.
"""
        class OptEndpointUnit(EndpointUnit):
            def __init__(self, opt=None, **kwargs):
                self._opt = opt
                EndpointUnit.__init__(self, **kwargs)

        self._ef.endpoint_unit_type = OptEndpointUnit

        e = self._ef.create_endpoint(opt="stringer")
        self.assertTrue(hasattr(e, "_opt"))
        self.assertEquals(e._opt, "stringer")

    def test__ensure_node_errors(self):
        bep = BaseEndpoint()
        gcimock = Mock()
        gcimock.return_value = None
        with patch('pyon.net.endpoint.BaseEndpoint._get_container_instance',
                   gcimock):
            self.assertRaises(EndpointError, bep._ensure_node)

    @patch('pyon.net.endpoint.BaseEndpoint._get_container_instance')
    def test__ensure_node_existing_node(self, gcimock):
        self._ef._ensure_node()
        self.assertFalse(gcimock.called)

    @patch('pyon.net.endpoint.BaseEndpoint._get_container_instance')
    def test__ensure_node(self, gcimock):
        bep = BaseEndpoint()
        self.assertIsNone(bep.node)

        bep._ensure_node()

        self.assertEquals(bep.node, gcimock().node)

    def test__get_container_instance(self):
        c = Container()  # ensure we've got an instance in Container.instance
        self.assertEquals(BaseEndpoint._get_container_instance(), c)

    def test_close(self):
        bep = BaseEndpoint()
        bep.close()
Exemplo n.º 26
0
 def setUp(self):
     self._node = Mock(spec=NodeB)
     self._ef = BaseEndpoint(node=self._node)
     self._ch = Mock(spec=SendChannel)
     self._node.channel.return_value = self._ch