def subscribe(self, handler, topic=None, options=None):
        """
        Implements :func:`autobahn.wamp.interfaces.ISubscriber.subscribe`
        """
        if six.PY2 and type(topic) == str:
            topic = six.u(topic)
        assert (topic is None or type(topic) == six.text_type)
        assert ((callable(handler) and topic is not None)
                or hasattr(handler, '__class__'))
        assert (options is None or isinstance(options, dict))

        if not self._transport:
            raise exception.TransportLost()

        if callable(handler):
            # subscribe a single handler
            return self._subscribe(None, handler, topic, options)
        else:
            # subscribe all methods on an object decorated with "wamp.subscribe"
            on_replies = []
            for k in inspect.getmembers(handler.__class__,
                                        is_method_or_function):
                proc = k[1]
                wampuris = filter(lambda x: x.is_handler(),
                                  proc.__dict__.get("_wampuris")) or ()
                for pat in wampuris:
                    subopts = pat.options or options or types.SubscribeOptions(
                        match=u"wildcard" if pat.uri_type == uri.Pattern.
                        URI_TYPE_WILDCARD else u"exact").message_attr()
                    on_replies.append(
                        self._subscribe(handler, proc, pat.uri(), subopts))
            return txaio.gather(on_replies, consume_exceptions=True)
Example #2
0
def test_gather_no_consume(framework):
    '''
    consume_exceptions=False
    '''

    errors = []
    results = []
    calls = []

    f0 = txaio.create_future_error(error=RuntimeError("f0 failed"))
    f1 = txaio.create_future_error(error=RuntimeError("f1 failed"))

    f2 = txaio.gather([f0, f1], consume_exceptions=False)

    def done(arg):
        results.append(arg)

    def error(fail):
        errors.append(fail)
        # fail.printTraceback()
    txaio.add_callbacks(f0, done, error)
    txaio.add_callbacks(f1, done, error)
    txaio.add_callbacks(f2, done, error)

    # FIXME more testing annoyance; the propogated errors are raised
    # out of "run_until_complete()" as well; fix util.py?
    for f in [f0, f1, f2]:
        try:
            _await(f)
        except Exception:
            pass

    assert len(results) == 0
    assert len(errors) == 3
    assert len(calls) == 0
def run(entry_points):

    transports = [
        {
            "type": "websocket",
            "url": "ws://127.0.0.1:8080/ws"
        }
    ]

    done = []

    for main in entry_points:
        connection = Connection(main, realm=u'public',
            transports=transports, reactor=reactor)
        done.append(connection.connect())

    # deferred that fires when all connections are done
    done = txaio.gather(done)

    def finish(res):
        print("all connections done", res)
        reactor.stop()

    done.addBoth(finish)

    reactor.run()
Example #4
0
    async def stop(self):
        """
        Stop rotating/offering keys to the XBR market maker.
        """
        assert self._state in [SimpleSeller.STATE_STARTED
                               ], 'seller not running'

        self._state = SimpleSeller.STATE_STOPPING

        dl = []
        for key_series in self._keys.values():
            d = key_series.stop()
            dl.append(d)

        if self._session_regs:
            if self._session and self._session.is_attached():
                # voluntarily unregister interface
                for reg in self._session_regs:
                    d = reg.unregister()
                    dl.append(d)
            self._session_regs = None

        d = txaio.gather(dl)

        try:
            await d
        except:
            self.log.failure()
        finally:
            self._state = SimpleSeller.STATE_STOPPED
            self._session = None
Example #5
0
def test_gather_no_consume(framework):
    '''
    consume_exceptions=False
    '''

    errors = []
    results = []
    calls = []

    f0 = txaio.create_future_error(error=RuntimeError("f0 failed"))
    f1 = txaio.create_future_error(error=RuntimeError("f1 failed"))

    f2 = txaio.gather([f0, f1], consume_exceptions=False)

    def done(arg):
        results.append(arg)

    def error(fail):
        errors.append(fail)
        # fail.printTraceback()
    txaio.add_callbacks(f0, done, error)
    txaio.add_callbacks(f1, done, error)
    txaio.add_callbacks(f2, done, error)

    # FIXME more testing annoyance; the propogated errors are raised
    # out of "run_until_complete()" as well; fix util.py?
    for f in [f0, f1, f2]:
        try:
            await(f)
        except:
            pass

    assert len(results) == 0
    assert len(errors) == 3
    assert len(calls) == 0
Example #6
0
 def fire(self, event, *args, **kwargs):
     res = []
     if event in self._listeners:
         for handler in self._listeners[event]:
             value = txaio.as_future(handler, *args, **kwargs)
             res.append(value)
     if self._parent is not None:
         res.append(self._parent.fire(event, *args, **kwargs))
     return txaio.gather(res)
Example #7
0
 def fire(self, event, *args, **kwargs):
     res = []
     if event in self._listeners:
         for handler in self._listeners[event]:
             value = txaio.as_future(handler, *args, **kwargs)
             res.append(value)
     if self._parent is not None:
         res.append(self._parent.fire(event, *args, **kwargs))
     return txaio.gather(res)
Example #8
0
    def subscribe(self, handler, topic=None, options=None):
        """
        Implements :func:`autobahn.wamp.interfaces.ISubscriber.subscribe`
        """
        assert ((callable(handler) and topic is not None)
                or hasattr(handler, '__class__'))
        if topic and six.PY2 and type(topic) == str:
            topic = six.u(topic)
        assert (topic is None or type(topic) == six.text_type)
        assert (options is None or isinstance(options, types.SubscribeOptions))

        if not self._transport:
            raise exception.TransportLost()

        def _subscribe(obj, fn, topic, options):
            request_id = util.id()
            on_reply = txaio.create_future()
            handler_obj = Handler(fn, obj,
                                  options.details_arg if options else None)
            self._subscribe_reqs[request_id] = SubscribeRequest(
                request_id, on_reply, handler_obj)

            if options:
                msg = message.Subscribe(request_id, topic,
                                        **options.message_attr())
            else:
                msg = message.Subscribe(request_id, topic)

            self._transport.send(msg)
            return on_reply

        if callable(handler):

            # subscribe a single handler
            return _subscribe(None, handler, topic, options)

        else:

            # subscribe all methods on an object decorated with "wamp.subscribe"
            on_replies = []
            for k in inspect.getmembers(handler.__class__,
                                        is_method_or_function):
                proc = k[1]
                if "_wampuris" in proc.__dict__:
                    pat = proc.__dict__["_wampuris"][0]
                    if pat.is_handler():
                        uri = pat.uri()
                        subopts = options or pat.subscribe_options()
                        on_replies.append(
                            _subscribe(handler, proc, uri, subopts))

            # XXX needs coverage
            return txaio.gather(on_replies, consume_exceptions=True)
Example #9
0
    def register(self, endpoint, procedure=None, options=None):
        """
        Implements :func:`autobahn.wamp.interfaces.ICallee.register`
        """
        assert ((callable(endpoint) and procedure is not None)
                or hasattr(endpoint, '__class__'))
        if procedure and six.PY2 and type(procedure) == str:
            procedure = six.u(procedure)
        assert (procedure is None or type(procedure) == six.text_type)
        assert (options is None or isinstance(options, types.RegisterOptions))

        if not self._transport:
            raise exception.TransportLost()

        def _register(obj, fn, procedure, options):
            request_id = self._request_id_gen.next()
            on_reply = txaio.create_future()
            endpoint_obj = Endpoint(fn, obj,
                                    options.details_arg if options else None)
            self._register_reqs[request_id] = RegisterRequest(
                request_id, on_reply, procedure, endpoint_obj)

            if options:
                msg = message.Register(request_id, procedure,
                                       **options.message_attr())
            else:
                msg = message.Register(request_id, procedure)

            self._transport.send(msg)
            return on_reply

        if callable(endpoint):

            # register a single callable
            return _register(None, endpoint, procedure, options)

        else:

            # register all methods on an object decorated with "wamp.register"
            on_replies = []
            for k in inspect.getmembers(endpoint.__class__,
                                        is_method_or_function):
                proc = k[1]
                if "_wampuris" in proc.__dict__:
                    for pat in proc.__dict__["_wampuris"]:
                        if pat.is_endpoint():
                            uri = pat.uri()
                            on_replies.append(
                                _register(endpoint, proc, uri, options))

            # XXX neds coverage
            return txaio.gather(on_replies, consume_exceptions=True)
    def subscribe(self, handler, topic=None, options=None):
        """
        Implements :func:`autobahn.wamp.interfaces.ISubscriber.subscribe`
        """
        assert((callable(handler) and topic is not None) or hasattr(handler, '__class__'))
        if topic and six.PY2 and type(topic) == str:
            topic = six.u(topic)
        assert(topic is None or type(topic) == six.text_type)
        assert(options is None or isinstance(options, types.SubscribeOptions))

        if not self._transport:
            raise exception.TransportLost()

        def _subscribe(obj, fn, topic, options):
            request_id = util.id()
            on_reply = txaio.create_future()
            handler_obj = Handler(fn, obj, options.details_arg if options else None)
            self._subscribe_reqs[request_id] = SubscribeRequest(request_id, on_reply, handler_obj)

            if options:
                msg = message.Subscribe(request_id, topic, **options.message_attr())
            else:
                msg = message.Subscribe(request_id, topic)

            self._transport.send(msg)
            return on_reply

        if callable(handler):

            # subscribe a single handler
            return _subscribe(None, handler, topic, options)

        else:

            # subscribe all methods on an object decorated with "wamp.subscribe"
            on_replies = []
            for k in inspect.getmembers(handler.__class__, is_method_or_function):
                proc = k[1]
                if "_wampuris" in proc.__dict__:
                    pat = proc.__dict__["_wampuris"][0]
                    if pat.is_handler():
                        uri = pat.uri()
                        subopts = options or pat.subscribe_options()
                        on_replies.append(_subscribe(handler, proc, uri, subopts))

            # XXX needs coverage
            return txaio.gather(on_replies, consume_exceptions=True)
Example #11
0
def _run(reactor, components):
    if isinstance(components, Component):
        components = [components]

    if type(components) != list:
        raise ValueError(
            '"components" must be a list of Component objects - encountered'
            ' {0}'.format(type(components))
        )

    for c in components:
        if not isinstance(c, Component):
            raise ValueError(
                '"components" must be a list of Component objects - encountered'
                'item of type {0}'.format(type(c))
            )

    log = txaio.make_logger()

    def component_success(c, arg):
        log.debug("Component {c} successfully completed: {arg}", c=c, arg=arg)
        return arg

    def component_failure(f):
        log.error("Component error: {msg}", msg=txaio.failure_message(f))
        log.debug("Component error: {tb}", tb=txaio.failure_format_traceback(f))
        return None

    # all components are started in parallel
    dl = []
    for c in components:
        # a component can be of type MAIN or SETUP
        d = c.start(reactor)
        txaio.add_callbacks(d, partial(component_success, c), component_failure)
        dl.append(d)
    d = txaio.gather(dl, consume_exceptions=False)

    def all_done(arg):
        log.debug("All components ended; stopping reactor")
        try:
            reactor.stop()
        except ReactorNotRunning:
            pass

    txaio.add_callbacks(d, all_done, all_done)

    return d
    def onJoin(self, details):
        self.log.info('session joined: {details}', details=details)

        def on_event(pid, seq, ran, details=None):
            self.log.info(
                'event received on topic {topic}: pid={pid}, seq={seq}, ran={ran}, details={details}\n',
                topic=details.topic,
                pid=pid,
                seq=seq,
                ran=binascii.b2a_hex(ran),
                details=details)

        reg = yield self.subscribe(on_event,
                                   SUBTOPIC,
                                   options=SubscribeOptions(match=SUBMATCH,
                                                            details=True))

        self.log.info(
            'subscribed to topic {topic} (match={match}): registration={reg}',
            topic=SUBTOPIC,
            match=SUBMATCH,
            reg=reg)

        pid = os.getpid()
        seq = 1

        while True:
            self.log.info('-' * 60)
            pubs = []
            for PUBTOPIC in [PUBTOPIC1, PUBTOPIC2, PUBTOPIC3, PUBTOPIC4]:
                topic = PUBTOPIC.format(seq)
                pub = self.publish(
                    topic,
                    pid,
                    seq,
                    os.urandom(8),
                    options=PublishOptions(acknowledge=True, exclude_me=False),
                )
                self.log.info(
                    'event published to {topic}: publication={pub}\n',
                    topic=topic,
                    pub=pub)
                pubs.append(pub)
            seq += 1
            yield txaio.gather(pubs)
            yield sleep(1)
Example #13
0
    def start(self, session):
        """
        Start rotating keys and placing key offers with the XBR market maker.

        :param session: WAMP session over which to communicate with the XBR market maker.
        :param provider_id: The XBR provider ID.
        :return:
        """
        assert self._session is None

        self._session = session
        self._session_regs = []

        procedure = 'xbr.provider.{}.sell'.format(self._provider_id)
        reg = yield session.register(
            self.sell,
            procedure,
            options=RegisterOptions(details_arg='details'))
        self._session_regs.append(reg)
        self.log.info('Registered procedure "{procedure}"',
                      procedure=hl(reg.procedure))

        for key_series in self._keys.values():
            key_series.start()

        if False:
            dl = []
            for func in [self.sell]:
                procedure = 'xbr.provider.{}.{}'.format(
                    self._provider_id, func.__name__)
                d = session.register(
                    func,
                    procedure,
                    options=RegisterOptions(details_arg='details'))
                dl.append(d)
            d = txaio.gather(dl)

            def registered(regs):
                for reg in regs:
                    self.log.info('Registered procedure "{procedure}"',
                                  procedure=hl(reg.procedure))
                self._session_regs = regs

            d.addCallback(registered)

            return d
    def register(self, endpoint, procedure=None, options=None):
        """
        Implements :func:`autobahn.wamp.interfaces.ICallee.register`
        """
        assert((callable(endpoint) and procedure is not None) or hasattr(endpoint, '__class__'))
        if procedure and six.PY2 and type(procedure) == str:
            procedure = six.u(procedure)
        assert(procedure is None or type(procedure) == six.text_type)
        assert(options is None or isinstance(options, types.RegisterOptions))

        if not self._transport:
            raise exception.TransportLost()

        def _register(obj, fn, procedure, options):
            request_id = util.id()
            on_reply = txaio.create_future()
            endpoint_obj = Endpoint(fn, obj, options.details_arg if options else None)
            self._register_reqs[request_id] = RegisterRequest(request_id, on_reply, procedure, endpoint_obj)

            if options:
                msg = message.Register(request_id, procedure, **options.message_attr())
            else:
                msg = message.Register(request_id, procedure)

            self._transport.send(msg)
            return on_reply

        if callable(endpoint):

            # register a single callable
            return _register(None, endpoint, procedure, options)

        else:

            # register all methods on an object decorated with "wamp.register"
            on_replies = []
            for k in inspect.getmembers(endpoint.__class__, is_method_or_function):
                proc = k[1]
                if "_wampuris" in proc.__dict__:
                    pat = proc.__dict__["_wampuris"][0]
                    if pat.is_endpoint():
                        uri = pat.uri()
                        on_replies.append(_register(endpoint, proc, uri, options))

            # XXX neds coverage
            return txaio.gather(on_replies, consume_exceptions=True)
Example #15
0
    def onJoin(self, details):
        self.log.info('session joined: {}'.format(details))

        calls = []

        started = time.time()

        i = 1
        stop = [False]

        def on_error(err):
            if isinstance(
                    err.value, ApplicationError
            ) and err.value.error == u'crossbar.error.max_concurrency_reached':
                if not stop[0]:
                    stop[0] = True
                    self.log.info(
                        'stopping to issue calls - maximum concurrency reached: {}'
                        .format(err.value.args[0]))
            else:
                stop[0] = True
                return err

        while not stop[0]:
            self.log.info('issueing call {i} ..', i=i)
            # d = self.call(u'com.example.compute', i, mode='sleep', runtime=5)
            d = self.call(u'com.example.compute', i, mode='fib', n=30)
            d.addErrback(on_error)
            i += 1
            calls.append(d)
            yield sleep(.01)

        results = yield txaio.gather(calls)

        ended = time.time()
        runtime = ended - started

        self.log.info('total run-time (wallclock): {runtime}', runtime=runtime)
        for result in results:
            if result:
                self.log.info('{result}', result=result)

        yield self.leave()
Example #16
0
def test_gather_two(framework):
    '''
    Wait for two Futures.
    '''

    errors = []
    results = []
    calls = []

    def foo():
        def codependant(*args, **kw):
            calls.append((args, kw))
            return 42

        return txaio.as_future(codependant)

    def method(*args, **kw):
        calls.append((args, kw))
        return "OHAI"

    f0 = txaio.as_future(method, 1, 2, 3, key='word')
    f1 = txaio.as_future(foo)

    f2 = txaio.gather([f0, f1])

    def done(arg):
        results.append(arg)

    def error(fail):
        errors.append(fail)
        # fail.printTraceback()

    txaio.add_callbacks(f2, done, error)

    for f in [f0, f1, f2]:
        await (f)

    assert len(results) == 1
    assert len(errors) == 0
    assert results[0] == ['OHAI', 42] or results[0] == [42, 'OHAI']
    assert len(calls) == 2
    assert calls[0] == ((1, 2, 3), dict(key='word'))
    assert calls[1] == (tuple(), dict())
Example #17
0
    def stop(self):
        """
        Stop rotating/offering keys to the XBR market maker.
        """
        dl = []
        for key_series in self._keys.values():
            d = key_series.stop()
            dl.append(d)

        if self._session_regs:
            if self._session and self._session.is_attached():
                # voluntarily unregister interface
                for reg in self._session_regs:
                    d = reg.unregister()
                    dl.append(d)
            self._session_regs = None

        d = txaio.gather(dl)
        return d
Example #18
0
def _run(reactor, components):
    if isinstance(components, Component):
        components = [components]

    if type(components) != list:
        raise RuntimeError('"components" must be a list of Component objects - encountered {0}'.format(type(components)))

    for c in components:
        if not isinstance(c, Component):
            raise RuntimeError('"components" must be a list of Component objects - encountered item of type {0}'.format(type(c)))

    # all components are started in parallel
    dl = []
    for c in components:
        # a component can be of type MAIN or SETUP
        dl.append(c.start(reactor))

    d = txaio.gather(dl, consume_exceptions=True)

    return d
Example #19
0
def test_gather_two():
    '''
    Wait for two Futures.
    '''

    errors = []
    results = []
    calls = []

    def foo():
        def codependant(*args, **kw):
            calls.append((args, kw))
            return 42
        return txaio.as_future(codependant)

    def method(*args, **kw):
        calls.append((args, kw))
        return "OHAI"
    f0 = txaio.as_future(method, 1, 2, 3, key='word')
    f1 = txaio.as_future(foo)

    f2 = txaio.gather([f0, f1])

    def done(arg):
        results.append(arg)

    def error(fail):
        errors.append(fail)
        # fail.printTraceback()
    txaio.add_callbacks(f2, done, error)

    await(f0)
    await(f1)
    await(f2)

    assert len(results) == 1
    assert len(errors) == 0
    assert results[0] == ['OHAI', 42] or results[0] == [42, 'OHAI']
    assert len(calls) == 2
    assert calls[0] == ((1, 2, 3), dict(key='word'))
    assert calls[1] == (tuple(), dict())
Example #20
0
    def onJoin(self, details):
        self.log.info('session joined: {}'.format(details))

        calls = []

        started = time.time()

        i = 1
        stop = [False]

        def on_error(err):
            if isinstance(err.value, ApplicationError) and err.value.error == u'crossbar.error.max_concurrency_reached':
                if not stop[0]:
                    stop[0] = True
                    self.log.info('stopping to issue calls - maximum concurrency reached: {}'.format(err.value.args[0]))
            else:
                stop[0] = True
                return err

        while not stop[0]:
            self.log.info('issueing call {i} ..', i=i)
            # d = self.call(u'com.example.compute', i, mode='sleep', runtime=5)
            d = self.call(u'com.example.compute', i, mode='fib', n=30)
            d.addErrback(on_error)
            i += 1
            calls.append(d)
            yield sleep(.01)

        results = yield txaio.gather(calls)

        ended = time.time()
        runtime = ended - started

        self.log.info('total run-time (wallclock): {runtime}', runtime=runtime)
        for result in results:
            if result:
                self.log.info('{result}', result=result)

        yield self.leave()
Example #21
0
    def fire(self, event, *args, **kwargs):
        """
        Fire a particular event.

        :param event: the event to fire. All other args and kwargs are
            passed on to the handler(s) for the event.

        :return: a Deferred/Future gathering all async results from
            all handlers and/or parent handlers.
        """
        # print("firing '{}' from '{}'".format(event, hash(self)))
        if self._listeners is None:
            return txaio.create_future(result=[])

        self._check_event(event)
        res = []
        for handler in self._listeners.get(event, set()):
            future = txaio.as_future(handler, *args, **kwargs)
            res.append(future)
        if self._parent is not None:
            res.append(self._parent.fire(event, *args, **kwargs))
        return txaio.gather(res, consume_exceptions=False)
Example #22
0
    def fire(self, event, *args, **kwargs):
        """
        Fire a particular event.

        :param event: the event to fire. All other args and kwargs are
            passed on to the handler(s) for the event.

        :return: a Deferred/Future gathering all async results from
            all handlers and/or parent handlers.
        """
        # print("firing '{}' from '{}'".format(event, hash(self)))
        if self._listeners is None:
            return txaio.create_future(result=[])

        self._check_event(event)
        res = []
        for handler in self._listeners.get(event, set()):
            future = txaio.as_future(handler, *args, **kwargs)
            res.append(future)
        if self._parent is not None:
            res.append(self._parent.fire(event, *args, **kwargs))
        return txaio.gather(res, consume_exceptions=False)
Example #23
0
    def onJoin(self, details):
        self.log.info('session joined: {}'.format(details))

        calls = []

        started = time.time()

        for i in range(20):
            self.log.info('issueing call {i} ..', i=i)
            d = self.call(u'com.example.compute', i, 2)
            calls.append(d)

        results = yield txaio.gather(calls)

        ended = time.time()
        runtime = ended - started

        self.log.info('total run-time (wallclock): {runtime}', runtime=runtime)
        for result in results:
            if result:
                self.log.info('{result}', result=result)

        yield self.leave()
def run(entry_points):

    transports = [{"type": "websocket", "url": "ws://127.0.0.1:8080/ws"}]

    done = []

    for main in entry_points:
        connection = Connection(main,
                                realm=u'public',
                                transports=transports,
                                reactor=reactor)
        done.append(connection.connect())

    # deferred that fires when all connections are done
    done = txaio.gather(done)

    def finish(res):
        print("all connections done", res)
        reactor.stop()

    done.addBoth(finish)

    reactor.run()
Example #25
0
    def onJoin(self, details):
        self.log.info('session joined: {}'.format(details))

        calls = []

        started = time.time()

        for i in range(20):
            self.log.info('issueing call {i} ..', i=i)
            d = self.call(u'com.example.compute', i, 2)
            calls.append(d)

        results = yield txaio.gather(calls)

        ended = time.time()
        runtime = ended - started

        self.log.info('total run-time (wallclock): {runtime}', runtime=runtime)
        for result in results:
            if result:
                self.log.info('{result}', result=result)

        yield self.leave()
Example #26
0
def _run(reactor, components):
    if isinstance(components, Component):
        components = [components]

    if type(components) != list:
        raise RuntimeError(
            '"components" must be a list of Component objects - encountered {0}'
            .format(type(components)))

    for c in components:
        if not isinstance(c, Component):
            raise RuntimeError(
                '"components" must be a list of Component objects - encountered item of type {0}'
                .format(type(c)))

    # all components are started in parallel
    dl = []
    for c in components:
        # a component can be of type MAIN or SETUP
        dl.append(c.start(reactor))

    d = txaio.gather(dl, consume_exceptions=True)

    return d
Example #27
0
def _run(reactor, components, done_callback):
    """
    Internal helper. Use "run" method from autobahn.twisted.wamp or
    autobahn.asyncio.wamp

    This is the generic parts of the run() method so that there's very
    little code in the twisted/asyncio specific run() methods.

    This is called by react() (or run_until_complete() so any errors
    coming out of this should be handled properly. Logging will
    already be started.
    """
    # let user pass a single component to run, too
    # XXX probably want IComponent? only demand it, here and below?
    if isinstance(components, Component):
        components = [components]

    if type(components) != list:
        raise ValueError(
            '"components" must be a list of Component objects - encountered'
            ' {0}'.format(type(components)))

    for c in components:
        if not isinstance(c, Component):
            raise ValueError(
                '"components" must be a list of Component objects - encountered'
                'item of type {0}'.format(type(c)))

    # validation complete; proceed with startup
    log = txaio.make_logger()

    def component_success(comp, arg):
        log.debug("Component '{c}' successfully completed: {arg}",
                  c=comp,
                  arg=arg)
        return arg

    def component_failure(comp, f):
        log.error("Component '{c}' error: {msg}",
                  c=comp,
                  msg=txaio.failure_message(f))
        log.debug("Component error: {tb}",
                  tb=txaio.failure_format_traceback(f))
        # double-check: is a component-failure still fatal to the
        # startup process (because we passed consume_exception=False
        # to gather() below?)
        return None

    def component_start(comp):
        # the future from start() errbacks if we fail, or callbacks
        # when the component is considered "done" (so maybe never)
        d = txaio.as_future(comp.start, reactor)
        txaio.add_callbacks(
            d,
            partial(component_success, comp),
            partial(component_failure, comp),
        )
        return d

    # note that these are started in parallel -- maybe we want to add
    # a "connected" signal to components so we could start them in the
    # order they're given to run() as "a" solution to dependencies.
    dl = []
    for comp in components:
        d = component_start(comp)
        dl.append(d)
    done_d = txaio.gather(dl, consume_exceptions=False)

    def all_done(arg):
        log.debug("All components ended; stopping reactor")
        done_callback(reactor, arg)

    txaio.add_callbacks(done_d, all_done, all_done)
    return done_d
Example #28
0
            def on_authorize_success(authorization):

                # the call to authorize the action _itself_ succeeded. now go on depending on whether
                # the action was actually authorized or not ..
                #
                if not authorization[u'allow']:

                    if publish.acknowledge:
                        if self._router.is_traced:
                            publish.correlation_is_last = False
                            self._router._factory._worker._maybe_trace_rx_msg(session, publish)

                        reply = message.Error(message.Publish.MESSAGE_TYPE, publish.request, ApplicationError.NOT_AUTHORIZED, [u"session not authorized to publish to topic '{0}'".format(publish.topic)])
                        reply.correlation_id = publish.correlation_id
                        reply.correlation_uri = publish.topic
                        reply.correlation_is_anchor = False
                        reply.correlation_is_last = True
                        self._router.send(session, reply)

                    else:
                        if self._router.is_traced:
                            publish.correlation_is_last = True
                            self._router._factory._worker._maybe_trace_rx_msg(session, publish)

                else:

                    # new ID for the publication
                    #
                    publication = util.id()

                    # publisher disclosure
                    #
                    if authorization[u'disclose']:
                        disclose = True
                    elif (publish.topic.startswith(u"wamp.") or publish.topic.startswith(u"crossbar.")):
                        disclose = True
                    else:
                        disclose = False

                    forward_for = None
                    if disclose:
                        if publish.forward_for:
                            publisher = publish.forward_for[0]['session']
                            publisher_authid = publish.forward_for[0]['authid']
                            publisher_authrole = publish.forward_for[0]['authrole']
                            forward_for = publish.forward_for + [
                                {
                                    'session': session._session_id,
                                    'authid': session._authid,
                                    'authrole': session._authrole,
                                }
                            ]
                        else:
                            publisher = session._session_id
                            publisher_authid = session._authid
                            publisher_authrole = session._authrole
                    else:
                        publisher = None
                        publisher_authid = None
                        publisher_authrole = None

                    # skip publisher
                    #
                    if publish.exclude_me is None or publish.exclude_me:
                        me_also = False
                    else:
                        me_also = True

                    # persist event (this is done only once, regardless of the number of subscriptions
                    # the event matches on)
                    #
                    if store_event:
                        self._event_store.store_event(session, publication, publish)

                    # retain event on the topic
                    #
                    if retain_event:
                        retained_event = RetainedEvent(publish, publisher, publisher_authid, publisher_authrole)

                        observation = self._subscription_map.get_observation(publish.topic)

                        if not observation:
                            # No observation, lets make a new one
                            observation = self._subscription_map.create_observation(publish.topic, extra=SubscriptionExtra())
                        else:
                            # this can happen if event-history is
                            # enabled on the topic: the event-store
                            # creates an observation before any client
                            # could possible hit the code above
                            if observation.extra is None:
                                observation.extra = SubscriptionExtra()
                            elif not isinstance(observation.extra, SubscriptionExtra):
                                raise Exception(
                                    "incorrect 'extra' for '{}'".format(publish.topic)
                                )

                        if observation.extra.retained_events:
                            if not publish.eligible and not publish.exclude:
                                observation.extra.retained_events = [retained_event]
                            else:
                                observation.extra.retained_events.append(retained_event)
                        else:
                            observation.extra.retained_events = [retained_event]

                    subscription_to_receivers = {}
                    total_receivers_cnt = 0

                    # iterate over all subscriptions and determine actual receivers of the event
                    # under the respective subscription. also persist events (independent of whether
                    # there is any actual receiver right now on the subscription)
                    #
                    for subscription in subscriptions:

                        # initial list of receivers are all subscribers on a subscription ..
                        #
                        receivers = subscription.observers
                        receivers = self._filter_publish_receivers(receivers, publish)

                        # if receivers is non-empty, dispatch event ..
                        #
                        receivers_cnt = len(receivers) - (1 if self in receivers else 0)
                        if receivers_cnt:

                            total_receivers_cnt += receivers_cnt
                            subscription_to_receivers[subscription] = receivers

                    # send publish acknowledge before dispatching
                    #
                    if publish.acknowledge:
                        if self._router.is_traced:
                            publish.correlation_is_last = False
                            self._router._factory._worker._maybe_trace_rx_msg(session, publish)

                        reply = message.Published(publish.request, publication)
                        reply.correlation_id = publish.correlation_id
                        reply.correlation_uri = publish.topic
                        reply.correlation_is_anchor = False
                        reply.correlation_is_last = total_receivers_cnt == 0
                        self._router.send(session, reply)
                    else:
                        if self._router.is_traced and publish.correlation_is_last is None:
                            if total_receivers_cnt == 0:
                                publish.correlation_is_last = True
                            else:
                                publish.correlation_is_last = False

                    # now actually dispatch the events!
                    # for chunked dispatching, this will be filled with deferreds for each chunk
                    # processed. when the complete list of deferreds is done, that means the
                    # event has been sent out to all applicable receivers
                    all_dl = []

                    if total_receivers_cnt:

                        # list of receivers that should have received the event, but we could not
                        # send the event, since the receiver has disappeared in the meantime
                        vanished_receivers = []

                        for subscription, receivers in subscription_to_receivers.items():

                            storing_event = store_event and self._event_store in subscription.observers

                            self.log.debug('dispatching for subscription={subscription}, storing_event={storing_event}',
                                           subscription=subscription, storing_event=storing_event)

                            # for pattern-based subscriptions, the EVENT must contain
                            # the actual topic being published to
                            #
                            if subscription.match != message.Subscribe.MATCH_EXACT:
                                topic = publish.topic
                            else:
                                topic = None

                            if publish.payload:
                                msg = message.Event(subscription.id,
                                                    publication,
                                                    payload=publish.payload,
                                                    publisher=publisher,
                                                    publisher_authid=publisher_authid,
                                                    publisher_authrole=publisher_authrole,
                                                    topic=topic,
                                                    enc_algo=publish.enc_algo,
                                                    enc_key=publish.enc_key,
                                                    enc_serializer=publish.enc_serializer,
                                                    forward_for=forward_for)
                            else:
                                msg = message.Event(subscription.id,
                                                    publication,
                                                    args=publish.args,
                                                    kwargs=publish.kwargs,
                                                    publisher=publisher,
                                                    publisher_authid=publisher_authid,
                                                    publisher_authrole=publisher_authrole,
                                                    topic=topic,
                                                    forward_for=forward_for)

                            # if the publish message had a correlation ID, this will also be the
                            # correlation ID of the event message sent out
                            msg.correlation_id = publish.correlation_id
                            msg.correlation_uri = publish.topic
                            msg.correlation_is_anchor = False
                            msg.correlation_is_last = False

                            chunk_size = self._options.event_dispatching_chunk_size

                            if chunk_size and len(receivers) > chunk_size:
                                self.log.debug('chunked dispatching to {receivers_size} with chunk_size={chunk_size}',
                                               receivers_size=len(receivers), chunk_size=chunk_size)
                            else:
                                self.log.debug('unchunked dispatching to {receivers_size} receivers',
                                               receivers_size=len(receivers))

                            # note that we're using one code-path for both chunked and unchunked
                            # dispatches; the *first* chunk is always done "synchronously" (before
                            # the first call-later) so "un-chunked mode" really just means we know
                            # we'll be done right now and NOT do a call_later...

                            # a Deferred that fires when all chunks are done
                            all_d = txaio.create_future()
                            all_dl.append(all_d)

                            # all the event messages are the same except for the last one, which
                            # needs to have the "is_last" flag set if we're doing a trace
                            if self._router.is_traced:
                                last_msg = copy.deepcopy(msg)
                                last_msg.correlation_id = msg.correlation_id
                                last_msg.correlation_uri = msg.correlation_uri
                                last_msg.correlation_is_anchor = False
                                last_msg.correlation_is_last = True

                            def _notify_some(receivers):

                                # we do a first pass over the proposed chunk of receivers
                                # because not all of them will have a transport, and if this
                                # will be the last chunk of receivers we need to figure out
                                # which event is last...
                                receivers_this_chunk = []
                                for receiver in receivers[:chunk_size]:
                                    if receiver._session_id and receiver._transport:
                                        receivers_this_chunk.append(receiver)
                                    else:
                                        vanished_receivers.append(receiver)

                                receivers = receivers[chunk_size:]

                                # XXX note there's still going to be some edge-cases here .. if
                                # we are NOT the last chunk, but all the next chunk's receivers
                                # (could be only 1 in that chunk!) vanish before we run our next
                                # batch, then a "last" event will never go out ...

                                # we now actually do the deliveries, but now we know which
                                # receiver is the last one
                                if receivers or not self._router.is_traced:

                                    # NOT the last chunk (or we're not traced so don't care)
                                    for receiver in receivers_this_chunk:

                                        # send out WAMP msg to peer
                                        self._router.send(receiver, msg)
                                        if self._event_store or storing_event:
                                            self._event_store.store_event_history(publication, subscription.id, receiver)
                                else:
                                    # last chunk, so last receiver gets the different message
                                    for receiver in receivers_this_chunk[:-1]:
                                        self._router.send(receiver, msg)
                                        if self._event_store or storing_event:
                                            self._event_store.store_event_history(publication, subscription.id, receiver)

                                    # FIXME: I don't get the following comment and code path. when, how? and what to
                                    # do about event store? => storing_event
                                    #
                                    # we might have zero valid receivers
                                    if receivers_this_chunk:
                                        self._router.send(receivers_this_chunk[-1], last_msg)
                                        # FIXME: => storing_event

                                if receivers:
                                    # still more to do ..
                                    return txaio.call_later(0, _notify_some, receivers)
                                else:
                                    # all done! resolve all_d, which represents all receivers
                                    # to a single subscription matching the event
                                    txaio.resolve(all_d, None)

                            _notify_some([
                                recv for recv in receivers
                                if (me_also or recv != session) and recv != self._event_store
                            ])

                    return txaio.gather(all_dl)
Example #29
0
            def on_authorize_success(authorization):

                # the call to authorize the action _itself_ succeeded. now go on depending on whether
                # the action was actually authorized or not ..
                #
                if not authorization[u'allow']:

                    if publish.acknowledge:
                        reply = message.Error(
                            message.Publish.MESSAGE_TYPE, publish.request,
                            ApplicationError.NOT_AUTHORIZED, [
                                u"session not authorized to publish to topic '{0}'"
                                .format(publish.topic)
                            ])
                        reply.correlation_id = publish.correlation_id
                        reply.correlation_uri = publish.topic
                        reply.correlation_is_anchor = False
                        reply.correlation_is_last = True
                        self._router.send(session, reply)

                else:

                    # new ID for the publication
                    #
                    publication = util.id()

                    # publisher disclosure
                    #
                    if authorization[u'disclose']:
                        disclose = True
                    elif (publish.topic.startswith(u"wamp.")
                          or publish.topic.startswith(u"crossbar.")):
                        disclose = True
                    else:
                        disclose = False

                    if disclose:
                        publisher = session._session_id
                        publisher_authid = session._authid
                        publisher_authrole = session._authrole
                    else:
                        publisher = None
                        publisher_authid = None
                        publisher_authrole = None

                    # skip publisher
                    #
                    if publish.exclude_me is None or publish.exclude_me:
                        me_also = False
                    else:
                        me_also = True

                    # persist event (this is done only once, regardless of the number of subscriptions
                    # the event matches on)
                    #
                    if store_event:
                        self._event_store.store_event(session._session_id,
                                                      publication,
                                                      publish.topic,
                                                      publish.args,
                                                      publish.kwargs)

                    # retain event on the topic
                    #
                    if retain_event:
                        retained_event = RetainedEvent(publish, publisher,
                                                       publisher_authid,
                                                       publisher_authrole)

                        observation = self._subscription_map.get_observation(
                            publish.topic)

                        if not observation:
                            # No observation, lets make a new one
                            observation = self._subscription_map.create_observation(
                                publish.topic, extra=SubscriptionExtra())
                        else:
                            # this can happen if event-history is
                            # enabled on the topic: the event-store
                            # creates an observation before any client
                            # could possible hit the code above
                            if observation.extra is None:
                                observation.extra = SubscriptionExtra()
                            elif not isinstance(observation.extra,
                                                SubscriptionExtra):
                                raise Exception(
                                    "incorrect 'extra' for '{}'".format(
                                        publish.topic))

                        if observation.extra.retained_events:
                            if not publish.eligible and not publish.exclude:
                                observation.extra.retained_events = [
                                    retained_event
                                ]
                            else:
                                observation.extra.retained_events.append(
                                    retained_event)
                        else:
                            observation.extra.retained_events = [
                                retained_event
                            ]

                    subscription_to_receivers = {}
                    total_receivers_cnt = 0

                    # iterate over all subscriptions and determine actual receivers of the event
                    # under the respective subscription. also persist events (independent of whether
                    # there is any actual receiver right now on the subscription)
                    #
                    for subscription in subscriptions:

                        # persist event history, but check if it is persisted on the individual subscription!
                        #
                        if store_event and self._event_store in subscription.observers:
                            self._event_store.store_event_history(
                                publication, subscription.id)

                        # initial list of receivers are all subscribers on a subscription ..
                        #
                        receivers = subscription.observers
                        receivers = self._filter_publish_receivers(
                            receivers, publish)

                        # if receivers is non-empty, dispatch event ..
                        #
                        receivers_cnt = len(receivers) - (1 if self
                                                          in receivers else 0)
                        if receivers_cnt:

                            total_receivers_cnt += receivers_cnt
                            subscription_to_receivers[subscription] = receivers

                    # send publish acknowledge before dispatching
                    #
                    if publish.acknowledge:
                        if self._router.is_traced:
                            publish.correlation_is_last = False

                        reply = message.Published(publish.request, publication)
                        reply.correlation_id = publish.correlation_id
                        reply.correlation_uri = publish.topic
                        reply.correlation_is_anchor = False
                        reply.correlation_is_last = total_receivers_cnt == 0
                        self._router.send(session, reply)
                    else:
                        if self._router.is_traced and publish.correlation_is_last is None:
                            if total_receivers_cnt == 0:
                                publish.correlation_is_last = True
                            else:
                                publish.correlation_is_last = False

                    # now actually dispatch the events!
                    # for chunked dispatching, this will be filled with deferreds for each chunk
                    # processed. when the complete list of deferreds is done, that means the
                    # event has been sent out to all applicable receivers
                    all_dl = []

                    if total_receivers_cnt:

                        # list of receivers that should have received the event, but we could not
                        # send the event, since the receiver has disappeared in the meantime
                        vanished_receivers = []

                        for subscription, receivers in subscription_to_receivers.items(
                        ):

                            self.log.debug(
                                'dispatching for subscription={subscription}',
                                subscription=subscription)

                            # for pattern-based subscriptions, the EVENT must contain
                            # the actual topic being published to
                            #
                            if subscription.match != message.Subscribe.MATCH_EXACT:
                                topic = publish.topic
                            else:
                                topic = None

                            if publish.payload:
                                msg = message.Event(
                                    subscription.id,
                                    publication,
                                    payload=publish.payload,
                                    publisher=publisher,
                                    publisher_authid=publisher_authid,
                                    publisher_authrole=publisher_authrole,
                                    topic=topic,
                                    enc_algo=publish.enc_algo,
                                    enc_key=publish.enc_key,
                                    enc_serializer=publish.enc_serializer)
                            else:
                                msg = message.Event(
                                    subscription.id,
                                    publication,
                                    args=publish.args,
                                    kwargs=publish.kwargs,
                                    publisher=publisher,
                                    publisher_authid=publisher_authid,
                                    publisher_authrole=publisher_authrole,
                                    topic=topic)

                            # if the publish message had a correlation ID, this will also be the
                            # correlation ID of the event message sent out
                            msg.correlation_id = publish.correlation_id
                            msg.correlation_uri = publish.topic
                            msg.correlation_is_anchor = False
                            msg.correlation_is_last = False

                            chunk_size = self._options.event_dispatching_chunk_size

                            if chunk_size and len(receivers) > chunk_size:
                                self.log.debug(
                                    'chunked dispatching to {receivers_size} with chunk_size={chunk_size}',
                                    receivers_size=len(receivers),
                                    chunk_size=chunk_size)
                            else:
                                self.log.debug(
                                    'unchunked dispatching to {receivers_size} receivers',
                                    receivers_size=len(receivers))

                            # note that we're using one code-path for both chunked and unchunked
                            # dispatches; the *first* chunk is always done "synchronously" (before
                            # the first call-later) so "un-chunked mode" really just means we know
                            # we'll be done right now and NOT do a call_later...

                            # a Deferred that fires when all chunks are done
                            all_d = txaio.create_future()
                            all_dl.append(all_d)

                            # all the event messages are the same except for the last one, which
                            # needs to have the "is_last" flag set if we're doing a trace
                            if self._router.is_traced:
                                last_msg = copy.deepcopy(msg)
                                last_msg.correlation_id = msg.correlation_id
                                last_msg.correlation_uri = msg.correlation_uri
                                last_msg.correlation_is_anchor = False
                                last_msg.correlation_is_last = True

                            def _notify_some(receivers):

                                # we do a first pass over the proposed chunk of receivers
                                # because not all of them will have a transport, and if this
                                # will be the last chunk of receivers we need to figure out
                                # which event is last...
                                receivers_this_chunk = []
                                for receiver in receivers[:chunk_size]:
                                    if (me_also or receiver != session
                                        ) and receiver != self._event_store:
                                        # the receiving subscriber session might have no transport,
                                        # or no longer be joined
                                        if receiver._session_id and receiver._transport:
                                            receivers_this_chunk.append(
                                                receiver)
                                        else:
                                            vanished_receivers.append(receiver)

                                receivers = receivers[chunk_size:]

                                # XXX note there's still going to be some edge-cases here .. if
                                # we are NOT the last chunk, but all the next chunk's receivers
                                # (could be only 1 in that chunk!) vanish before we run our next
                                # batch, then a "last" event will never go out ...

                                # we now actually do the deliveries, but now we know which
                                # receiver is the last one
                                if receivers or not self._router.is_traced:
                                    # NOT the last chunk (or we're not traced so don't care)
                                    for receiver in receivers_this_chunk:
                                        self._router.send(receiver, msg)
                                else:
                                    # last chunk, so last receiver gets the different message
                                    for receiver in receivers_this_chunk[:-1]:
                                        self._router.send(receiver, msg)
                                    # we might have zero valid receivers
                                    if receivers_this_chunk:
                                        self._router.send(
                                            receivers_this_chunk[-1], last_msg)

                                if receivers:
                                    # still more to do ..
                                    return txaio.call_later(
                                        0, _notify_some, receivers)
                                else:
                                    # all done! resolve all_d, which represents all receivers
                                    # to a single subscription matching the event
                                    txaio.resolve(all_d, None)

                            _notify_some(list(receivers))

                    return txaio.gather(all_dl)
Example #30
0
def _run(reactor, components, done_callback):
    """
    Internal helper. Use "run" method from autobahn.twisted.wamp or
    autobahn.asyncio.wamp

    This is the generic parts of the run() method so that there's very
    little code in the twisted/asyncio specific run() methods.

    This is called by react() (or run_until_complete() so any errors
    coming out of this should be handled properly. Logging will
    already be started.
    """
    # let user pass a single component to run, too
    # XXX probably want IComponent? only demand it, here and below?
    if isinstance(components, Component):
        components = [components]

    if type(components) != list:
        raise ValueError(
            '"components" must be a list of Component objects - encountered'
            ' {0}'.format(type(components))
        )

    for c in components:
        if not isinstance(c, Component):
            raise ValueError(
                '"components" must be a list of Component objects - encountered'
                'item of type {0}'.format(type(c))
            )

    # validation complete; proceed with startup
    log = txaio.make_logger()

    def component_success(comp, arg):
        log.debug("Component '{c}' successfully completed: {arg}", c=comp, arg=arg)
        return arg

    def component_failure(comp, f):
        log.error("Component '{c}' error: {msg}", c=comp, msg=txaio.failure_message(f))
        log.debug("Component error: {tb}", tb=txaio.failure_format_traceback(f))
        # double-check: is a component-failure still fatal to the
        # startup process (because we passed consume_exception=False
        # to gather() below?)
        return None

    def component_start(comp):
        # the future from start() errbacks if we fail, or callbacks
        # when the component is considered "done" (so maybe never)
        d = txaio.as_future(comp.start, reactor)
        txaio.add_callbacks(
            d,
            partial(component_success, comp),
            partial(component_failure, comp),
        )
        return d

    # note that these are started in parallel -- maybe we want to add
    # a "connected" signal to components so we could start them in the
    # order they're given to run() as "a" solution to dependencies.
    dl = []
    for comp in components:
        d = component_start(comp)
        dl.append(d)
    done_d = txaio.gather(dl, consume_exceptions=False)

    def all_done(arg):
        log.debug("All components ended; stopping reactor")
        done_callback(reactor, arg)

    txaio.add_callbacks(done_d, all_done, all_done)
    return done_d
Example #31
0
            def on_authorize_success(authorization):

                # the call to authorize the action _itself_ succeeded. now go on depending on whether
                # the action was actually authorized or not ..
                #
                if not authorization[u'allow']:

                    if publish.acknowledge:
                        reply = message.Error(
                            message.Publish.MESSAGE_TYPE, publish.request,
                            ApplicationError.NOT_AUTHORIZED, [
                                u"session not authorized to publish to topic '{0}'"
                                .format(publish.topic)
                            ])
                        self._router.send(session, reply)

                else:

                    # new ID for the publication
                    #
                    publication = util.id()

                    # send publish acknowledge immediately when requested
                    #
                    if publish.acknowledge:
                        reply = message.Published(publish.request, publication)
                        self._router.send(session, reply)

                    # publisher disclosure
                    #
                    if authorization[u'disclose']:
                        disclose = True
                    elif (publish.topic.startswith(u"wamp.")
                          or publish.topic.startswith(u"crossbar.")):
                        disclose = True
                    else:
                        disclose = False

                    if disclose:
                        publisher = session._session_id
                        publisher_authid = session._authid
                        publisher_authrole = session._authrole
                    else:
                        publisher = None
                        publisher_authid = None
                        publisher_authrole = None

                    # skip publisher
                    #
                    if publish.exclude_me is None or publish.exclude_me:
                        me_also = False
                    else:
                        me_also = True

                    # persist event (this is done only once, regardless of the number of subscriptions
                    # the event matches on)
                    #
                    if store_event:
                        self._event_store.store_event(session._session_id,
                                                      publication,
                                                      publish.topic,
                                                      publish.args,
                                                      publish.kwargs)

                    # retain event on the topic
                    #
                    if retain_event:
                        retained_event = RetainedEvent(publish, publisher,
                                                       publisher_authid,
                                                       publisher_authrole)

                        observation = self._subscription_map.get_observation(
                            publish.topic)

                        if not observation:
                            # No observation, lets make a new one
                            observation = self._subscription_map.create_observation(
                                publish.topic, extra=SubscriptionExtra())
                        else:
                            # this can happen if event-history is
                            # enabled on the topic: the event-store
                            # creates an observation before any client
                            # could possible hit the code above
                            if observation.extra is None:
                                observation.extra = SubscriptionExtra()
                            elif not isinstance(observation.extra,
                                                SubscriptionExtra):
                                raise Exception(
                                    "incorrect 'extra' for '{}'".format(
                                        publish.topic))

                        if observation.extra.retained_events:
                            if not publish.eligible and not publish.exclude:
                                observation.extra.retained_events = [
                                    retained_event
                                ]
                            else:
                                observation.extra.retained_events.append(
                                    retained_event)
                        else:
                            observation.extra.retained_events = [
                                retained_event
                            ]

                    all_dl = []

                    # iterate over all subscriptions ..
                    #
                    for subscription in subscriptions:

                        self.log.debug(
                            'dispatching for subscription={subscription}',
                            subscription=subscription)

                        # persist event history, but check if it is persisted on the individual subscription!
                        #
                        if store_event and self._event_store in subscription.observers:
                            self._event_store.store_event_history(
                                publication, subscription.id)

                        # initial list of receivers are all subscribers on a subscription ..
                        #
                        receivers = subscription.observers
                        receivers = self._filter_publish_receivers(
                            receivers, publish)

                        # if receivers is non-empty, dispatch event ..
                        #
                        receivers_cnt = len(receivers) - (1 if self
                                                          in receivers else 0)
                        if receivers_cnt:

                            # for pattern-based subscriptions, the EVENT must contain
                            # the actual topic being published to
                            #
                            if subscription.match != message.Subscribe.MATCH_EXACT:
                                topic = publish.topic
                            else:
                                topic = None

                            if publish.payload:
                                msg = message.Event(
                                    subscription.id,
                                    publication,
                                    payload=publish.payload,
                                    publisher=publisher,
                                    publisher_authid=publisher_authid,
                                    publisher_authrole=publisher_authrole,
                                    topic=topic,
                                    enc_algo=publish.enc_algo,
                                    enc_key=publish.enc_key,
                                    enc_serializer=publish.enc_serializer)
                            else:
                                msg = message.Event(
                                    subscription.id,
                                    publication,
                                    args=publish.args,
                                    kwargs=publish.kwargs,
                                    publisher=publisher,
                                    publisher_authid=publisher_authid,
                                    publisher_authrole=publisher_authrole,
                                    topic=topic)

                            chunk_size = self._options.event_dispatching_chunk_size

                            if chunk_size:
                                self.log.debug(
                                    'chunked dispatching to {receivers_size} with chunk_size={chunk_size}',
                                    receivers_size=len(receivers),
                                    chunk_size=chunk_size)

                                # a Deferred that fires when all chunks are done
                                all_d = txaio.create_future()
                                all_dl.append(all_d)

                                def _notify_some(receivers):
                                    for receiver in receivers[:chunk_size]:
                                        if (
                                                me_also or receiver != session
                                        ) and receiver != self._event_store:
                                            # the receiving subscriber session
                                            # might have no transport, or no
                                            # longer be joined
                                            if receiver._session_id and receiver._transport:
                                                self._router.send(
                                                    receiver, msg)
                                    receivers = receivers[chunk_size:]
                                    if len(receivers) > 0:
                                        # still more to do ..
                                        return txaio.call_later(
                                            0, _notify_some, receivers)
                                    else:
                                        # all done! resolve all_d, which represents all receivers
                                        # to a single subscription matching the event
                                        txaio.resolve(all_d, None)

                                _notify_some(list(receivers))
                            else:
                                self.log.debug(
                                    'unchunked dispatching to {receivers_size} receivers',
                                    receivers_size=len(receivers))

                                for receiver in receivers:
                                    if (me_also or receiver != session
                                        ) and receiver != self._event_store:
                                        # the receiving subscriber session
                                        # might have no transport, or no
                                        # longer be joined
                                        if receiver._session_id and receiver._transport:
                                            self._router.send(receiver, msg)

                    return txaio.gather(all_dl)