Beispiel #1
0
    def __init__(self):
        self.hub = MokshaHub()
        self.log = log

        # If the stream specifies an 'app', then setup `self.engine` to
        # be a SQLAlchemy engine for that app, along with a configured DBSession
        app = getattr(self, 'app', None)
        self.engine = self.DBSession = None
        if app:
            self.engine = create_app_engine(app)
            self.DBSession = sessionmaker(bind=self.engine)()
Beispiel #2
0
    def __init__(self):
        self.hub = MokshaHub()
        self.log = log
        if self.hub.amqp_broker and not self.hub.stomp_broker:
            for topic in listify(self.topic):
                log.debug('Subscribing to consumer topic %s' % topic)

                if isinstance(self.hub, AMQPLibHub):
                    # AMQPLibHub specific
                    queue_name = str(uuid.uuid4())
                    self.hub.queue_declare(queue=queue_name, exclusive=True)
                    self.hub.exchange_bind(queue_name, binding_key=topic)
                    if self.jsonify:
                        self.hub.queue_subscribe(queue_name,
                                                 self._consume_json)
                    else:
                        self.hub.queue_subscribe(queue_name, self._consume)
                else:
                    # Assume we're using Qpid then.
                    server_queue_name = 'moksha_consumer_' + self.hub.session.name
                    self.hub.queue_declare(queue=server_queue_name,
                                           exclusive=True)
                    self.hub.exchange_bind(server_queue_name,
                                           binding_key=topic)
                    local_queue_name = 'moksha_consumer_' + self.hub.session.name
                    self.hub.local_queue = self.hub.session.incoming(
                        local_queue_name)
                    self.hub.message_subscribe(queue=server_queue_name,
                                               destination=local_queue_name)
                    self.hub.local_queue.start()
                    if self.jsonify:
                        self.hub.local_queue.listen(self._consume_json)
                    else:
                        self.hub.local_queue.listen(self._consume)

        # If the consumer specifies an 'app', then setup `self.engine` to
        # be a SQLAlchemy engine, along with a configured DBSession
        app = getattr(self, 'app', None)
        self.engine = self.DBSession = None
        if app:
            log.debug("Setting up individual engine for consumer")
            self.engine = create_app_engine(app)
            self.DBSession = sessionmaker(bind=self.engine)()
Beispiel #3
0
class DataStream(object):
    """ The parent DataStream class. """
    def __init__(self):
        self.hub = MokshaHub()
        self.log = log

        # If the stream specifies an 'app', then setup `self.engine` to
        # be a SQLAlchemy engine for that app, along with a configured DBSession
        app = getattr(self, 'app', None)
        self.engine = self.DBSession = None
        if app:
            self.engine = create_app_engine(app)
            self.DBSession = sessionmaker(bind=self.engine)()

    def send_message(self, topic, message):
        try:
            self.hub.send_message(topic, message)
        except Exception, e:
            log.error('Cannot send message: %s' % e)
Beispiel #4
0
    def __init__(self):
        self.hub = MokshaHub()
        self.log = log

        # If the stream specifies an 'app', then setup `self.engine` to
        # be a SQLAlchemy engine for that app, along with a configured DBSession
        app = getattr(self, "app", None)
        self.engine = self.DBSession = None
        if app:
            self.engine = create_app_engine(app)
            self.DBSession = sessionmaker(bind=self.engine)()
Beispiel #5
0
class DataStream(object):
    """ The parent DataStream class. """

    def __init__(self):
        self.hub = MokshaHub()
        self.log = log

        # If the stream specifies an 'app', then setup `self.engine` to
        # be a SQLAlchemy engine for that app, along with a configured DBSession
        app = getattr(self, "app", None)
        self.engine = self.DBSession = None
        if app:
            self.engine = create_app_engine(app)
            self.DBSession = sessionmaker(bind=self.engine)()

    def send_message(self, topic, message):
        try:
            self.hub.send_message(topic, message)
        except Exception, e:
            log.error("Cannot send message: %s" % e)
Beispiel #6
0
class TestHub(unittest.TestCase):

    def _setUp(self):
        def kernel(config):
            self.hub = MokshaHub(config=config)
            self.topic = str(uuid4())

        for __setup, name in testutils.make_setup_functions(kernel):
            yield __setup, name

    def _tearDown(self):
        self.hub.close()

    @testutils.crosstest
    def test_hub_creation(self):
        """ Test that we can simply create the hub. """
        assert_true(self.hub)
        eq_(self.hub.topics, {})

    @testutils.crosstest
    def test_hub_send_recv(self):
        "Test that we can send a message and receive it."

        messages_received = []

        def callback(json):
            messages_received.append(json.body[1:-1])

        self.hub.subscribe(topic=self.topic, callback=callback)
        sleep(sleep_duration)

        self.hub.send_message(topic=self.topic, message=secret)

        simulate_reactor(sleep_duration)
        sleep(sleep_duration)

        eq_(messages_received, [secret])

    @testutils.crosstest
    def test_hub_no_subscription(self):
        "Test that we don't receive messages we're not subscribed for."

        messages_received = []

        def callback(json):
            messages_received.append(json.body[1:-1])

        self.hub.send_message(topic=self.topic, message=secret)
        simulate_reactor(sleep_duration)
        sleep(sleep_duration)
        eq_(messages_received, [])
Beispiel #7
0
class TestHub(unittest.TestCase):
    def _setUp(self):
        def kernel(config):
            self.hub = MokshaHub(config=config)
            self.topic = str(uuid4())

        for __setup, name in testutils.make_setup_functions(kernel):
            yield __setup, name

    def _tearDown(self):
        self.hub.close()

    @testutils.crosstest
    def test_hub_creation(self):
        """ Test that we can simply create the hub. """
        assert_true(self.hub)
        eq_(self.hub.topics, {})

    @testutils.crosstest
    def test_hub_send_recv(self):
        "Test that we can send a message and receive it."

        messages_received = []

        def callback(json):
            messages_received.append(json.body[1:-1])

        self.hub.subscribe(topic=self.topic, callback=callback)
        sleep(sleep_duration)

        self.hub.send_message(topic=self.topic, message=secret)

        simulate_reactor(sleep_duration)
        sleep(sleep_duration)

        eq_(messages_received, [secret])

    @testutils.crosstest
    def test_hub_no_subscription(self):
        "Test that we don't receive messages we're not subscribed for."

        messages_received = []

        def callback(json):
            messages_received.append(json.body[1:-1])

        self.hub.send_message(topic=self.topic, message=secret)
        simulate_reactor(sleep_duration)
        sleep(sleep_duration)
        eq_(messages_received, [])
Beispiel #8
0
    def __init__(self):
        self.hub = MokshaHub()
        self.log = log
        if self.hub.amqp_broker and not self.hub.stomp_broker:
            for topic in listify(self.topic):
                log.debug('Subscribing to consumer topic %s' % topic)

                if isinstance(self.hub, AMQPLibHub):
                    # AMQPLibHub specific 
                    queue_name = str(uuid.uuid4())
                    self.hub.queue_declare(queue=queue_name, exclusive=True)
                    self.hub.exchange_bind(queue_name, binding_key=topic)
                    if self.jsonify:
                        self.hub.queue_subscribe(queue_name, self._consume_json)
                    else:
                        self.hub.queue_subscribe(queue_name, self._consume)
                else:
                    # Assume we're using Qpid then.
                    server_queue_name = 'moksha_consumer_' + self.hub.session.name
                    self.hub.queue_declare(queue=server_queue_name, exclusive=True)
                    self.hub.exchange_bind(server_queue_name, binding_key=topic)
                    local_queue_name = 'moksha_consumer_' + self.hub.session.name
                    self.hub.local_queue = self.hub.session.incoming(local_queue_name)
                    self.hub.message_subscribe(queue=server_queue_name,
                                           destination=local_queue_name)
                    self.hub.local_queue.start()
                    if self.jsonify:
                        self.hub.local_queue.listen(self._consume_json)
                    else:
                        self.hub.local_queue.listen(self._consume)

        # If the consumer specifies an 'app', then setup `self.engine` to
        # be a SQLAlchemy engine, along with a configured DBSession
        app = getattr(self, 'app', None)
        self.engine = self.DBSession = None
        if app:
            log.debug("Setting up individual engine for consumer")
            self.engine = create_app_engine(app)
            self.DBSession = sessionmaker(bind=self.engine)()
Beispiel #9
0
class TestConsumer:
    def _setUp(self):
        def kernel(config):
            self.hub = MokshaHub(config=config)
            self.a_topic = a_topic = str(uuid4())

        for __setup, name in testutils.make_setup_functions(kernel):
            yield __setup, name

    def _tearDown(self):
        self.hub.close()

    def fake_register_consumer(self, cons):
        """ Fake register a consumer, not by entry-point like usual.

        Normally, consumers are identified by the hub by way of entry-points
        Ideally, this test would register the TestConsumer on the
        moksha.consumers entry point, and the hub would pick it up.
        I'm not sure how to do that, so we're going to fake it and manually
        add this consumer to the list of consumers of which the Hub is aware.
        """
        self.hub.topics[cons.topic] = self.hub.topics.get(cons.topic, [])
        self.hub.topics[cons.topic].append(cons(self.hub).consume)
        sleep(sleep_duration)

    @testutils.crosstest
    def test_abstract(self):
        """ Ensure that conumsers with no consume method raise exceptions. """
        class StillAbstractConsumer(moksha.hub.api.consumer.Consumer):
            pass

        try:
            c = StillAbstractConsumer(self.hub)
            c.consume("foo")
            assert (False)
        except NotImplementedError as e:
            pass

    @testutils.crosstest
    def test_receive_without_json(self):
        """ Try sending/receiving messages without jsonifying. """

        messages_received = []

        class TestConsumer(moksha.hub.api.consumer.Consumer):
            jsonify = False
            topic = self.a_topic

            def consume(self, message):
                messages_received.append(message)

        self.fake_register_consumer(TestConsumer)

        # Now, send a generic message to that topic, and see if we get one.
        self.hub.send_message(topic=self.a_topic, message=secret)
        simulate_reactor(sleep_duration)
        sleep(sleep_duration)
        eq_(len(messages_received), 1)

    @testutils.crosstest
    def test_receive_str(self):
        """ Send a message  Consume and verify it. """

        messages_received = []

        class TestConsumer(moksha.hub.api.consumer.Consumer):
            topic = self.a_topic

            def consume(self, message):
                messages_received.append(message['body'])

        self.fake_register_consumer(TestConsumer)

        # Now, send a generic message to that topic, and see if the consumer
        # processed it.
        self.hub.send_message(topic=self.a_topic, message=secret)
        simulate_reactor(sleep_duration)
        sleep(sleep_duration)
        eq_(messages_received, [secret])

    @testutils.crosstest
    def test_receive_str_double(self):
        """ Send a message.  Have two consumers consume it. """

        messages_received = []

        class TestConsumer1(moksha.hub.api.consumer.Consumer):
            topic = self.a_topic

            def consume(self, message):
                messages_received.append(message['body'])

        class TestConsumer2(moksha.hub.api.consumer.Consumer):
            topic = self.a_topic

            def consume(self, message):
                messages_received.append(message['body'])

        self.fake_register_consumer(TestConsumer1)
        self.fake_register_consumer(TestConsumer2)

        # Now, send a generic message to that topic, and see if the consumer
        # processed it.
        self.hub.send_message(topic=self.a_topic, message=secret)
        simulate_reactor(sleep_duration)
        sleep(sleep_duration)
        eq_(messages_received, [secret, secret])

    @testutils.crosstest
    def test_receive_str_near_miss(self):
        """ Send a message.  Three consumers.  Only one receives. """

        messages_received = []

        class BaseConsumer(moksha.hub.api.consumer.Consumer):
            topic = self.a_topic

            def consume(self, message):
                messages_received.append(message['body'])

        class Consumer1(BaseConsumer):
            pass

        class Consumer2(BaseConsumer):
            topic = BaseConsumer.topic[:-1]

        class Consumer3(BaseConsumer):
            topic = BaseConsumer.topic + "X"

        self.fake_register_consumer(Consumer1)
        self.fake_register_consumer(Consumer2)
        self.fake_register_consumer(Consumer3)

        # Now, send a generic message to that topic, and see if Consumer1
        # processed it but that Consumer2 and Consumer3 didn't
        self.hub.send_message(topic=self.a_topic, message=secret)
        simulate_reactor(sleep_duration)
        sleep(sleep_duration)
        eq_(messages_received, [secret])

    @testutils.crosstest
    def test_receive_dict(self):
        """ Send a dict with a message.  Consume, extract, and verify it. """

        obj = {'secret': secret}
        messages_received = []

        class TestConsumer(moksha.hub.api.consumer.Consumer):
            topic = self.a_topic

            def consume(self, message):
                obj = message['body']
                messages_received.append(obj['secret'])

        self.fake_register_consumer(TestConsumer)

        # Now, send a generic message to that topic, and see if the consumer
        # processed it.
        self.hub.send_message(topic=self.a_topic, message=obj)
        simulate_reactor(sleep_duration)
        sleep(sleep_duration)
        eq_(messages_received, [secret])

    @testutils.crosstest
    def test_receive_n_messages(self):
        """ Send `n` messages, receive `n` messages. """

        n_messages = 10
        messages_received = []

        class TestConsumer(moksha.hub.api.consumer.Consumer):
            topic = self.a_topic

            def consume(self, message):
                messages_received.append(message['body'])

        self.fake_register_consumer(TestConsumer)

        # Now, send n messages and make sure that n messages were consumed.
        for i in range(n_messages):
            self.hub.send_message(topic=self.a_topic, message=secret)

        simulate_reactor(sleep_duration)
        sleep(sleep_duration)

        eq_(len(messages_received), n_messages)

    @testutils.crosstest
    def test_receive_n_dicts(self):
        """ Send `n` dicts, receive `n` dicts. """

        n_messages = 10
        obj = {'secret': secret}
        messages_received = []

        class TestConsumer(moksha.hub.api.consumer.Consumer):
            topic = self.a_topic

            def consume(self, message):
                messages_received.append(message['body'])

        self.fake_register_consumer(TestConsumer)

        # Now, send n objects and make sure that n objects were consumed.
        for i in range(n_messages):
            self.hub.send_message(topic=self.a_topic, message=obj)

        simulate_reactor(sleep_duration)
        sleep(sleep_duration)

        eq_(len(messages_received), n_messages)

    @testutils.crosstest
    def test_dynamic_topic(self):
        """ Test that a topic can be set at runtime (not import time) """
        class TestConsumer(moksha.hub.api.consumer.Consumer):
            topic = "bad topic"

            def __init__(self, *args, **kw):
                super(TestConsumer, self).__init__(*args, **kw)
                self.topic = "good topic"

            def consume(self, message):
                pass

        # Just a little fake config.
        config = dict(
            zmq_enabled=True,
            zmq_subscribe_endpoints='',
            zmq_published_endpoints='',
        )
        central = CentralMokshaHub(config, [TestConsumer], [])

        # Guarantee that "bad topic" is not in the topics list.
        eq_(central.topics.keys(), ["good topic"])

    @testutils.crosstest
    def test_open_and_close(self):
        """ Test that a central hub with a consumer can be closed.. ;) """
        class TestConsumer(moksha.hub.api.consumer.Consumer):
            topic = "whatever"

            def consume(self, message):
                pass

        # Just a little fake config.
        config = dict(
            zmq_enabled=True,
            zmq_subscribe_endpoints='',
            zmq_published_endpoints='',
        )
        central = CentralMokshaHub(config, [TestConsumer], [])
        central.close()
Beispiel #10
0
 def kernel(config):
     self.hub = MokshaHub(config=config)
     self.topic = str(uuid4())
Beispiel #11
0
class TestProducer:
    def _setUp(self):
        def kernel(config):
            self.hub = MokshaHub(config=config)
            self.a_topic = a_topic = str(uuid4())

        for __setup, name in testutils.make_setup_functions(kernel):
            yield __setup, name

    def _tearDown(self):
        self.hub.close()

    def fake_register_producer(self, prod):
        """ Fake register a producer, not by entry-point like usual.

        Registering producers is a little easier than registering consumers.
        The MokshaHub doesn't even keep track of the .poll method callbacks.
        We simply instantiate the producer (and it registers itself with the
        hub).
        """
        return prod(self.hub)

    @testutils.crosstest
    def test_produce_ten_strs(self):
        """ Produce ten-ish strings. """

        messages_received = []

        def callback(json):
            messages_received.append(json.body[1:-1])

        self.hub.subscribe(topic=self.a_topic, callback=callback)

        class TestProducer(moksha.hub.api.producer.PollingProducer):
            topic = self.a_topic
            frequency = sleep_duration / 10.9

            def poll(self):
                self.send_message(self.topic, secret)

        # Ready?
        self.fake_register_producer(TestProducer)

        # Go!
        simulate_reactor(duration=sleep_duration)

        # Ok.

        # We also need to sleep for `sleep_duration` seconds after the reactor
        # has stopped, not because the messages still need to get where they're
        # going, but so that the `messages_received` object can sync between
        # python threads.

        # It has already been updated in callback(json) at this point, but it
        # hasn't yet propagated to this context.
        sleep(sleep_duration)

        # Finally, the check.  Did we get our ten messages? (or about as much)
        assert (len(messages_received) > 8 and len(messages_received) < 12)

    @testutils.crosstest
    def test_idempotence(self):
        """ Test that running the same test twice still works. """
        return self.test_produce_ten_strs()
Beispiel #12
0
class Consumer(object):
    """ A message consumer """
    topic = None

    # Automatically decode JSON data
    jsonify = True

    def __init__(self):
        self.hub = MokshaHub()
        self.log = log
        if self.hub.amqp_broker and not self.hub.stomp_broker:
            for topic in listify(self.topic):
                log.debug('Subscribing to consumer topic %s' % topic)

                if isinstance(self.hub, AMQPLibHub):
                    # AMQPLibHub specific 
                    queue_name = str(uuid.uuid4())
                    self.hub.queue_declare(queue=queue_name, exclusive=True)
                    self.hub.exchange_bind(queue_name, binding_key=topic)
                    if self.jsonify:
                        self.hub.queue_subscribe(queue_name, self._consume_json)
                    else:
                        self.hub.queue_subscribe(queue_name, self._consume)
                else:
                    # Assume we're using Qpid then.
                    server_queue_name = 'moksha_consumer_' + self.hub.session.name
                    self.hub.queue_declare(queue=server_queue_name, exclusive=True)
                    self.hub.exchange_bind(server_queue_name, binding_key=topic)
                    local_queue_name = 'moksha_consumer_' + self.hub.session.name
                    self.hub.local_queue = self.hub.session.incoming(local_queue_name)
                    self.hub.message_subscribe(queue=server_queue_name,
                                           destination=local_queue_name)
                    self.hub.local_queue.start()
                    if self.jsonify:
                        self.hub.local_queue.listen(self._consume_json)
                    else:
                        self.hub.local_queue.listen(self._consume)

        # If the consumer specifies an 'app', then setup `self.engine` to
        # be a SQLAlchemy engine, along with a configured DBSession
        app = getattr(self, 'app', None)
        self.engine = self.DBSession = None
        if app:
            log.debug("Setting up individual engine for consumer")
            self.engine = create_app_engine(app)
            self.DBSession = sessionmaker(bind=self.engine)()

    def _consume_json(self, message):
        """ Convert our AMQP messages into a consistent dictionary format.

        This method exists because our STOMP & AMQP message brokers consume
        messages in different formats.  This causes our messaging abstraction
        to leak into the consumers themselves.

        :Note: We do not pass the message headers to the consumer (in this AMQP consumer)
        because the current AMQP.js bindings do not allow the client to change them.
        Thus, we need to throw any topic/queue details into the JSON body itself.
        """
        try:
            body = json.decode(message.body)
        except:
            log.debug("Unable to decode message body to JSON: %r" % message.body)
            body = message.body
        topic = None
        try:
            topic = message.headers[0].routing_key
        except TypeError:
            # We didn't get a JSON dictionary
            pass
        except AttributeError:
            # We didn't get headers or a routing key?
            pass

        self.consume({'body': body, 'topic': topic})

    def _consume(self, message):
        self.consume(message)

    def consume(self, message):
        raise NotImplementedError

    def send_message(self, topic, message):
        try:
            self.hub.send_message(topic, message, jsonify=self.jsonify)
        except Exception, e:
            log.error('Cannot send message: %s' % e)
Beispiel #13
0
class Consumer(object):
    """ A message consumer """
    topic = None

    # Automatically decode JSON data
    jsonify = True

    def __init__(self):
        self.hub = MokshaHub()
        self.log = log
        if self.hub.amqp_broker and not self.hub.stomp_broker:
            for topic in listify(self.topic):
                log.debug('Subscribing to consumer topic %s' % topic)

                if isinstance(self.hub, AMQPLibHub):
                    # AMQPLibHub specific
                    queue_name = str(uuid.uuid4())
                    self.hub.queue_declare(queue=queue_name, exclusive=True)
                    self.hub.exchange_bind(queue_name, binding_key=topic)
                    if self.jsonify:
                        self.hub.queue_subscribe(queue_name,
                                                 self._consume_json)
                    else:
                        self.hub.queue_subscribe(queue_name, self._consume)
                else:
                    # Assume we're using Qpid then.
                    server_queue_name = 'moksha_consumer_' + self.hub.session.name
                    self.hub.queue_declare(queue=server_queue_name,
                                           exclusive=True)
                    self.hub.exchange_bind(server_queue_name,
                                           binding_key=topic)
                    local_queue_name = 'moksha_consumer_' + self.hub.session.name
                    self.hub.local_queue = self.hub.session.incoming(
                        local_queue_name)
                    self.hub.message_subscribe(queue=server_queue_name,
                                               destination=local_queue_name)
                    self.hub.local_queue.start()
                    if self.jsonify:
                        self.hub.local_queue.listen(self._consume_json)
                    else:
                        self.hub.local_queue.listen(self._consume)

        # If the consumer specifies an 'app', then setup `self.engine` to
        # be a SQLAlchemy engine, along with a configured DBSession
        app = getattr(self, 'app', None)
        self.engine = self.DBSession = None
        if app:
            log.debug("Setting up individual engine for consumer")
            self.engine = create_app_engine(app)
            self.DBSession = sessionmaker(bind=self.engine)()

    def _consume_json(self, message):
        """ Convert our AMQP messages into a consistent dictionary format.

        This method exists because our STOMP & AMQP message brokers consume
        messages in different formats.  This causes our messaging abstraction
        to leak into the consumers themselves.

        :Note: We do not pass the message headers to the consumer (in this AMQP consumer)
        because the current AMQP.js bindings do not allow the client to change them.
        Thus, we need to throw any topic/queue details into the JSON body itself.
        """
        try:
            body = json.decode(message.body)
        except:
            log.debug("Unable to decode message body to JSON: %r" %
                      message.body)
            body = message.body
        topic = None
        try:
            topic = message.headers[0].routing_key
        except TypeError:
            # We didn't get a JSON dictionary
            pass
        except AttributeError:
            # We didn't get headers or a routing key?
            pass

        self.consume({'body': body, 'topic': topic})

    def _consume(self, message):
        self.consume(message)

    def consume(self, message):
        raise NotImplementedError

    def send_message(self, topic, message):
        try:
            self.hub.send_message(topic, message, jsonify=self.jsonify)
        except Exception, e:
            log.error('Cannot send message: %s' % e)
Beispiel #14
0
 def kernel(config):
     self.hub = MokshaHub(config=config)
     self.topic = str(uuid4())
Beispiel #15
0
class TestProducer:
    def _setUp(self):
        def kernel(config):
            self.hub = MokshaHub(config=config)
            self.a_topic = a_topic = str(uuid4())

        for __setup, name in testutils.make_setup_functions(kernel):
            yield __setup, name

    def _tearDown(self):
        self.hub.close()

    def fake_register_producer(self, prod):
        """ Fake register a producer, not by entry-point like usual.

        Registering producers is a little easier than registering consumers.
        The MokshaHub doesn't even keep track of the .poll method callbacks.
        We simply instantiate the producer (and it registers itself with the
        hub).
        """
        return prod(self.hub)

    @testutils.crosstest
    def test_produce_ten_strs(self):
        """ Produce ten-ish strings. """

        messages_received = []

        def callback(json):
            messages_received.append(json.body[1:-1])

        self.hub.subscribe(topic=self.a_topic, callback=callback)

        class TestProducer(moksha.hub.api.producer.PollingProducer):
            topic = self.a_topic
            frequency = sleep_duration / 10.9

            def poll(self):
                self.send_message(self.topic, secret)

        # Ready?
        self.fake_register_producer(TestProducer)

        # Go!
        simulate_reactor(duration=sleep_duration)

        # Ok.

        # We also need to sleep for `sleep_duration` seconds after the reactor
        # has stopped, not because the messages still need to get where they're
        # going, but so that the `messages_received` object can sync between
        # python threads.

        # It has already been updated in callback(json) at this point, but it
        # hasn't yet propagated to this context.
        sleep(sleep_duration)

        # Finally, the check.  Did we get our ten messages? (or about as much)
        assert(len(messages_received) > 8, len(messages_received) < 12)

    @testutils.crosstest
    def test_idempotence(self):
        """ Test that running the same test twice still works. """
        return self.test_produce_ten_strs()
Beispiel #16
0
class TestConsumer:

    def _setUp(self):
        def kernel(config):
            self.hub = MokshaHub(config=config)
            self.a_topic = a_topic = str(uuid4())

        for __setup, name in testutils.make_setup_functions(kernel):
            yield __setup, name

    def _tearDown(self):
        self.hub.close()

    def fake_register_consumer(self, cons):
        """ Fake register a consumer, not by entry-point like usual.

        Normally, consumers are identified by the hub by way of entry-points
        Ideally, this test would register the TestConsumer on the
        moksha.consumers entry point, and the hub would pick it up.
        I'm not sure how to do that, so we're going to fake it and manually
        add this consumer to the list of consumers of which the Hub is aware.
        """
        self.hub.topics[cons.topic] = self.hub.topics.get(cons.topic, [])
        self.hub.topics[cons.topic].append(cons(self.hub).consume)
        sleep(sleep_duration)

    @testutils.crosstest
    def test_abstract(self):
        """ Ensure that conumsers with no consume method raise exceptions. """

        class StillAbstractConsumer(moksha.hub.api.consumer.Consumer):
            pass

        try:
            c = StillAbstractConsumer(self.hub)
            c.consume("foo")
            assert(False)
        except NotImplementedError as e:
            pass

    @testutils.crosstest
    def test_receive_without_json(self):
        """ Try sending/receiving messages without jsonifying. """

        messages_received = []

        class TestConsumer(moksha.hub.api.consumer.Consumer):
            jsonify = False
            topic = self.a_topic

            def consume(self, message):
                messages_received.append(message)

        self.fake_register_consumer(TestConsumer)

        # Now, send a generic message to that topic, and see if we get one.
        self.hub.send_message(topic=self.a_topic, message=secret)
        simulate_reactor(sleep_duration)
        sleep(sleep_duration)
        eq_(len(messages_received), 1)

    @testutils.crosstest
    def test_receive_str(self):
        """ Send a message  Consume and verify it. """

        messages_received = []

        class TestConsumer(moksha.hub.api.consumer.Consumer):
            topic = self.a_topic

            def consume(self, message):
                messages_received.append(message['body'])

        self.fake_register_consumer(TestConsumer)

        # Now, send a generic message to that topic, and see if the consumer
        # processed it.
        self.hub.send_message(topic=self.a_topic, message=secret)
        simulate_reactor(sleep_duration)
        sleep(sleep_duration)
        eq_(messages_received, [secret])

    @testutils.crosstest
    def test_receive_str_double(self):
        """ Send a message.  Have two consumers consume it. """

        messages_received = []

        class TestConsumer1(moksha.hub.api.consumer.Consumer):
            topic = self.a_topic

            def consume(self, message):
                messages_received.append(message['body'])

        class TestConsumer2(moksha.hub.api.consumer.Consumer):
            topic = self.a_topic

            def consume(self, message):
                messages_received.append(message['body'])

        self.fake_register_consumer(TestConsumer1)
        self.fake_register_consumer(TestConsumer2)

        # Now, send a generic message to that topic, and see if the consumer
        # processed it.
        self.hub.send_message(topic=self.a_topic, message=secret)
        simulate_reactor(sleep_duration)
        sleep(sleep_duration)
        eq_(messages_received, [secret, secret])

    @testutils.crosstest
    def test_receive_str_near_miss(self):
        """ Send a message.  Three consumers.  Only one receives. """

        messages_received = []

        class BaseConsumer(moksha.hub.api.consumer.Consumer):
            topic = self.a_topic

            def consume(self, message):
                messages_received.append(message['body'])

        class Consumer1(BaseConsumer):
            pass

        class Consumer2(BaseConsumer):
            topic = BaseConsumer.topic[:-1]

        class Consumer3(BaseConsumer):
            topic = BaseConsumer.topic + "X"

        self.fake_register_consumer(Consumer1)
        self.fake_register_consumer(Consumer2)
        self.fake_register_consumer(Consumer3)

        # Now, send a generic message to that topic, and see if Consumer1
        # processed it but that Consumer2 and Consumer3 didn't
        self.hub.send_message(topic=self.a_topic, message=secret)
        simulate_reactor(sleep_duration)
        sleep(sleep_duration)
        eq_(messages_received, [secret])

    @testutils.crosstest
    def test_receive_dict(self):
        """ Send a dict with a message.  Consume, extract, and verify it. """

        obj = {'secret': secret}
        messages_received = []

        class TestConsumer(moksha.hub.api.consumer.Consumer):
            topic = self.a_topic

            def consume(self, message):
                obj = message['body']
                messages_received.append(obj['secret'])

        self.fake_register_consumer(TestConsumer)

        # Now, send a generic message to that topic, and see if the consumer
        # processed it.
        self.hub.send_message(topic=self.a_topic, message=obj)
        simulate_reactor(sleep_duration)
        sleep(sleep_duration)
        eq_(messages_received, [secret])

    @testutils.crosstest
    def test_receive_n_messages(self):
        """ Send `n` messages, receive `n` messages. """

        n_messages = 10
        messages_received = []

        class TestConsumer(moksha.hub.api.consumer.Consumer):
            topic = self.a_topic

            def consume(self, message):
                messages_received.append(message['body'])

        self.fake_register_consumer(TestConsumer)

        # Now, send n messages and make sure that n messages were consumed.
        for i in range(n_messages):
            self.hub.send_message(topic=self.a_topic, message=secret)

        simulate_reactor(sleep_duration)
        sleep(sleep_duration)

        eq_(len(messages_received), n_messages)

    @testutils.crosstest
    def test_receive_n_dicts(self):
        """ Send `n` dicts, receive `n` dicts. """

        n_messages = 10
        obj = {'secret': secret}
        messages_received = []

        class TestConsumer(moksha.hub.api.consumer.Consumer):
            topic = self.a_topic

            def consume(self, message):
                messages_received.append(message['body'])

        self.fake_register_consumer(TestConsumer)

        # Now, send n objects and make sure that n objects were consumed.
        for i in range(n_messages):
            self.hub.send_message(topic=self.a_topic, message=obj)

        simulate_reactor(sleep_duration)
        sleep(sleep_duration)

        eq_(len(messages_received), n_messages)