Пример #1
0
 def setup_middleware(self):
     store_prefix = self.config.store_prefix
     r_config = self.config.redis_manager
     self.redis = yield TxRedisManager.from_config(r_config)
     manager = TxRiakManager.from_config(self.config.riak_manager)
     self.store = MessageStore(manager,
                               self.redis.sub_manager(store_prefix))
     self.store_on_consume = self.config.store_on_consume
Пример #2
0
 def setup_middleware(self):
     store_prefix = self.config.get('store_prefix', 'message_store')
     r_config = self.config.get('redis_manager', {})
     self.redis = yield TxRedisManager.from_config(r_config)
     manager = TxRiakManager.from_config(self.config.get('riak_manager'))
     self.store = MessageStore(manager,
                               self.redis.sub_manager(store_prefix))
     self.store_on_consume = self.config.get('store_on_consume', True)
Пример #3
0
 def setUp(self):
     self.persistence_helper = self.add_helper(
         PersistenceHelper(use_riak=True, is_sync=False))
     self.msg_helper = self.add_helper(MessageHelper())
     # Since we're never loading the actual objects, we can't detect
     # tombstones. Therefore, each test needs its own bucket prefix.
     self.expected_bucket_prefix = "bucket-%s" % (uuid4().hex,)
     self.riak_manager = self.persistence_helper.get_riak_manager({
         "bucket_prefix": self.expected_bucket_prefix,
     })
     self.add_cleanup(self.riak_manager.close_manager)
     self.redis_manager = yield self.persistence_helper.get_redis_manager()
     self.mdb = MessageStore(self.riak_manager, self.redis_manager)
     self.default_args = [
         "-b", self.expected_bucket_prefix,
     ]
Пример #4
0
 def setUp(self):
     self.persistence_helper = self.add_helper(
         PersistenceHelper(use_riak=True, is_sync=False))
     self.msg_helper = self.add_helper(MessageHelper())
     # Since we're never loading the actual objects, we can't detect
     # tombstones. Therefore, each test needs its own bucket prefix.
     config = self.persistence_helper.mk_config({})["riak_manager"].copy()
     config["bucket_prefix"] = "%s-%s" % (
         uuid4().hex, config["bucket_prefix"])
     self.riak_manager = self.persistence_helper.get_riak_manager(config)
     self.redis_manager = yield self.persistence_helper.get_redis_manager()
     self.mdb = MessageStore(self.riak_manager, self.redis_manager)
     self.expected_bucket_prefix = "bucket"
     self.default_args = [
         "-b", self.expected_bucket_prefix,
     ]
Пример #5
0
 def __init__(self, options):
     self.options = options
     riak_config = {
         'bucket_prefix': options['bucket-prefix'],
     }
     self.manager = self.get_riak_manager(riak_config)
     self.mdb = MessageStore(self.manager, None)
Пример #6
0
 def setup_middleware(self):
     store_prefix = self.config.store_prefix
     r_config = self.config.redis_manager
     self.redis = yield TxRedisManager.from_config(r_config)
     manager = TxRiakManager.from_config(self.config.riak_manager)
     self.store = MessageStore(manager,
                               self.redis.sub_manager(store_prefix))
     self.store_on_consume = self.config.store_on_consume
Пример #7
0
    def setup_worker(self):
        config = self.get_static_config()
        riak = yield TxRiakManager.from_config(config.riak_manager)
        redis = yield TxRedisManager.from_config(config.redis_manager)
        self.store = MessageStore(riak, redis)

        site = build_web_site({
            config.web_path: MessageStoreResource(self.store),
            config.health_path: httprpc.HttpRpcHealthResource(self),
        })
        self.addService(
            StreamServerEndpointService(config.twisted_endpoint, site))
Пример #8
0
 def setUp(self):
     self.persistence_helper = self.add_helper(PersistenceHelper(use_riak=True, is_sync=False))
     self.msg_helper = self.add_helper(MessageHelper())
     # Since we're never loading the actual objects, we can't detect
     # tombstones. Therefore, each test needs its own bucket prefix.
     config = self.persistence_helper.mk_config({})["riak_manager"].copy()
     config["bucket_prefix"] = "%s-%s" % (uuid4().hex, config["bucket_prefix"])
     self.riak_manager = self.persistence_helper.get_riak_manager(config)
     self.redis_manager = yield self.persistence_helper.get_redis_manager()
     self.mdb = MessageStore(self.riak_manager, self.redis_manager)
     self.expected_bucket_prefix = "bucket"
     self.default_args = ["-b", self.expected_bucket_prefix]
Пример #9
0
class TestMessageStoreBase(VumiTestCase):
    @inlineCallbacks
    def setUp(self):
        self.persistence_helper = self.add_helper(
            PersistenceHelper(use_riak=True))
        try:
            from vumi.components.message_store import MessageStore
        except ImportError, e:
            import_skip(e, 'riak')
        self.redis = yield self.persistence_helper.get_redis_manager()
        self.manager = self.persistence_helper.get_riak_manager()
        self.store = MessageStore(self.manager, self.redis)
        self.msg_helper = self.add_helper(MessageHelper())
Пример #10
0
    def startWorker(self):
        web_path = self.config['web_path']
        web_port = int(self.config['web_port'])
        health_path = self.config['health_path']

        riak = yield TxRiakManager.from_config(self.config['riak_manager'])
        redis = yield TxRedisManager.from_config(self.config['redis_manager'])
        self.store = MessageStore(riak, redis)

        self.webserver = self.start_web_resources([
            (MessageStoreAPI(self.store), web_path),
            (httprpc.HttpRpcHealthResource(self), health_path),
        ], web_port)
Пример #11
0
    def __init__(self, manager, redis, sender=None, metric_publisher=None):
        # local import to avoid circular import since
        # go.api.go_api needs to access VumiApi
        from go.api.go_api.session_manager import SessionManager

        self.manager = manager
        self.redis = redis

        self.tpm = TagpoolManager(self.redis.sub_manager('tagpool_store'))
        self.mdb = MessageStore(self.manager,
                                self.redis.sub_manager('message_store'))
        self.account_store = AccountStore(self.manager)
        self.token_manager = TokenManager(
            self.redis.sub_manager('token_manager'))
        self.session_manager = SessionManager(
            self.redis.sub_manager('session_manager'))
        self.mapi = sender
        self.metric_publisher = metric_publisher
Пример #12
0
class MessageStoreCacheTestCase(VumiTestCase):

    start_batch = True

    @inlineCallbacks
    def setUp(self):
        self.persistence_helper = self.add_helper(
            PersistenceHelper(use_riak=True))
        try:
            from vumi.components.message_store import MessageStore
        except ImportError, e:
            import_skip(e, 'riak')
        self.redis = yield self.persistence_helper.get_redis_manager()
        self.manager = yield self.persistence_helper.get_riak_manager()
        self.store = yield MessageStore(self.manager, self.redis)
        self.cache = self.store.cache
        self.batch_id = 'a-batch-id'
        if self.start_batch:
            yield self.cache.batch_start(self.batch_id)
        self.msg_helper = self.add_helper(MessageHelper())
Пример #13
0
class StoringMiddleware(BaseMiddleware):
    """Middleware for storing inbound and outbound messages and events.

    Failures are not stored currently because these are typically
    stored by :class:`vumi.transports.FailureWorker` instances.

    Messages are always stored. However, in order for messages to be
    associated with a particular batch_id (
    see :class:`vumi.application.MessageStore`) a batch needs to be
    created in the message store (typically by an application worker
    that initiates sending outbound messages) and messages need to be
    tagged with a tag associated with the batch (typically by an
    application worker or middleware such as
    :class:`vumi.middleware.TaggingMiddleware`).

    Configuration options:

    :param string store_prefix:
        Prefix for message store keys in key-value store.
        Default is 'message_store'.
    :param dict redis_manager:
        Redis configuration parameters.
    :param dict riak_manager:
        Riak configuration parameters. Must contain at least
        a bucket_prefix key.
    :param bool store_on_consume:
        ``True`` to store consumed messages as well as published ones,
        ``False`` to store only published messages.
        Default is ``True``.
    """

    CONFIG_CLASS = StoringMiddlewareConfig

    @inlineCallbacks
    def setup_middleware(self):
        store_prefix = self.config.store_prefix
        r_config = self.config.redis_manager
        self.redis = yield TxRedisManager.from_config(r_config)
        manager = TxRiakManager.from_config(self.config.riak_manager)
        self.store = MessageStore(manager,
                                  self.redis.sub_manager(store_prefix))
        self.store_on_consume = self.config.store_on_consume

    @inlineCallbacks
    def teardown_middleware(self):
        yield self.redis.close_manager()

    def handle_consume_inbound(self, message, connector_name):
        if not self.store_on_consume:
            return message
        return self.handle_inbound(message, connector_name)

    @inlineCallbacks
    def handle_inbound(self, message, connector_name):
        tag = TaggingMiddleware.map_msg_to_tag(message)
        yield self.store.add_inbound_message(message, tag=tag)
        returnValue(message)

    def handle_consume_outbound(self, message, connector_name):
        if not self.store_on_consume:
            return message
        return self.handle_outbound(message, connector_name)

    @inlineCallbacks
    def handle_outbound(self, message, connector_name):
        tag = TaggingMiddleware.map_msg_to_tag(message)
        yield self.store.add_outbound_message(message, tag=tag)
        returnValue(message)

    def handle_consume_event(self, event, connector_name):
        if not self.store_on_consume:
            return event
        return self.handle_event(event, connector_name)

    @inlineCallbacks
    def handle_event(self, event, connector_name):
        transport_metadata = event.get('transport_metadata', {})
        # FIXME: The SMPP transport writes a 'datetime' object
        #        in the 'date' of the transport_metadata.
        #        json.dumps() that RiakObject uses is unhappy with that.
        if 'date' in transport_metadata:
            date = transport_metadata['date']
            if not isinstance(date, basestring):
                transport_metadata['date'] = date.isoformat()
        yield self.store.add_event(event)
        returnValue(event)
Пример #14
0
class StoringMiddleware(BaseMiddleware):
    """Middleware for storing inbound and outbound messages and events.

    Failures are not stored currently because these are typically
    stored by :class:`vumi.transports.FailureWorker` instances.

    Messages are always stored. However, in order for messages to be
    associated with a particular batch_id (
    see :class:`vumi.application.MessageStore`) a batch needs to be
    created in the message store (typically by an application worker
    that initiates sending outbound messages) and messages need to be
    tagged with a tag associated with the batch (typically by an
    application worker or middleware such as
    :class:`vumi.middleware.TaggingMiddleware`).

    Configuration options:

    :param string store_prefix:
        Prefix for message store keys in key-value store.
        Default is 'message_store'.
    :param dict redis_manager:
        Redis configuration parameters.
    :param dict riak_manager:
        Riak configuration parameters. Must contain at least
        a bucket_prefix key.
    :param bool store_on_consume:
        ``True`` to store consumed messages as well as published ones,
        ``False`` to store only published messages.
        Default is ``True``.
    """

    CONFIG_CLASS = StoringMiddlewareConfig

    @inlineCallbacks
    def setup_middleware(self):
        store_prefix = self.config.store_prefix
        r_config = self.config.redis_manager
        self.redis = yield TxRedisManager.from_config(r_config)
        manager = TxRiakManager.from_config(self.config.riak_manager)
        self.store = MessageStore(manager,
                                  self.redis.sub_manager(store_prefix))
        self.store_on_consume = self.config.store_on_consume

    @inlineCallbacks
    def teardown_middleware(self):
        yield self.redis.close_manager()

    def handle_consume_inbound(self, message, connector_name):
        if not self.store_on_consume:
            return message
        return self.handle_inbound(message, connector_name)

    @inlineCallbacks
    def handle_inbound(self, message, connector_name):
        tag = TaggingMiddleware.map_msg_to_tag(message)
        yield self.store.add_inbound_message(message, tag=tag)
        returnValue(message)

    def handle_consume_outbound(self, message, connector_name):
        if not self.store_on_consume:
            return message
        return self.handle_outbound(message, connector_name)

    @inlineCallbacks
    def handle_outbound(self, message, connector_name):
        tag = TaggingMiddleware.map_msg_to_tag(message)
        yield self.store.add_outbound_message(message, tag=tag)
        returnValue(message)

    def handle_consume_event(self, event, connector_name):
        if not self.store_on_consume:
            return event
        return self.handle_event(event, connector_name)

    @inlineCallbacks
    def handle_event(self, event, connector_name):
        transport_metadata = event.get('transport_metadata', {})
        # FIXME: The SMPP transport writes a 'datetime' object
        #        in the 'date' of the transport_metadata.
        #        json.dumps() that RiakObject uses is unhappy with that.
        if 'date' in transport_metadata:
            date = transport_metadata['date']
            if not isinstance(date, basestring):
                transport_metadata['date'] = date.isoformat()
        yield self.store.add_event(event)
        returnValue(event)
Пример #15
0
class TestMessageLister(VumiTestCase):

    @inlineCallbacks
    def setUp(self):
        self.persistence_helper = self.add_helper(
            PersistenceHelper(use_riak=True, is_sync=False))
        self.msg_helper = self.add_helper(MessageHelper())
        # Since we're never loading the actual objects, we can't detect
        # tombstones. Therefore, each test needs its own bucket prefix.
        config = self.persistence_helper.mk_config({})["riak_manager"].copy()
        config["bucket_prefix"] = "%s-%s" % (
            uuid4().hex, config["bucket_prefix"])
        self.riak_manager = self.persistence_helper.get_riak_manager(config)
        self.redis_manager = yield self.persistence_helper.get_redis_manager()
        self.mdb = MessageStore(self.riak_manager, self.redis_manager)
        self.expected_bucket_prefix = "bucket"
        self.default_args = [
            "-b", self.expected_bucket_prefix,
        ]

    def make_lister(self, args=None, batch=None, direction=None,
                    index_page_size=None):
        if args is None:
            args = self.default_args
        if batch is not None:
            args.extend(["--batch", batch])
        if direction is not None:
            args.extend(["--direction", direction])
        if index_page_size is not None:
            args.extend(["--index-page-size", str(index_page_size)])
        options = Options()
        options.parseOptions(args)
        return StubbedMessageLister(self, options)

    def get_sub_riak(self, config):
        self.assertEqual(config.get('bucket_prefix'),
                         self.expected_bucket_prefix)
        return self.riak_manager

    def make_inbound(self, batch_id, from_addr, timestamp=None):
        if timestamp is None:
            timestamp = datetime.utcnow()
        msg = self.msg_helper.make_inbound(
            None, from_addr=from_addr, timestamp=timestamp)
        d = self.mdb.add_inbound_message(msg, batch_id=batch_id)
        timestamp_str = timestamp.strftime("%Y-%m-%d %H:%M:%S.%f")
        d.addCallback(
            lambda _: (timestamp_str, from_addr, msg["message_id"]))
        return d

    def make_outbound(self, batch_id, to_addr, timestamp=None):
        if timestamp is None:
            timestamp = datetime.utcnow()
        msg = self.msg_helper.make_outbound(
            None, to_addr=to_addr, timestamp=timestamp)
        d = self.mdb.add_outbound_message(msg, batch_id=batch_id)
        timestamp_str = timestamp.strftime("%Y-%m-%d %H:%M:%S.%f")
        d.addCallback(
            lambda _: (timestamp_str, to_addr, msg["message_id"]))
        return d

    def test_batch_required(self):
        self.assertRaises(usage.UsageError, self.make_lister, [
            "--direction", "inbound",
            "-b", self.expected_bucket_prefix,
        ])

    def test_valid_direction_required(self):
        self.assertRaises(usage.UsageError, self.make_lister, [
            "--batch", "gingercoookies",
            "-b", self.expected_bucket_prefix,
        ])
        self.assertRaises(usage.UsageError, self.make_lister, [
            "--batch", "gingercoookies",
            "--direction", "widdershins",
            "-b", self.expected_bucket_prefix,
        ])

    def test_bucket_required(self):
        self.assertRaises(usage.UsageError, self.make_lister, [
            "--batch", "gingercoookies",
            "--direction", "inbound",
        ])

    @inlineCallbacks
    def test_main(self):
        """
        The lister runs via `main()`.
        """
        msg_data = yield self.make_inbound("gingercookies", "12345")
        self.patch(sys, "stdout", StringIO())
        yield main(
            None, "name",
            "--batch", "gingercookies",
            "--direction", "inbound",
            "-b", self.riak_manager.bucket_prefix)
        self.assertEqual(
            sys.stdout.getvalue(),
            "%s\n" % (",".join(msg_data),))

    @inlineCallbacks
    def test_list_inbound(self):
        """
        Inbound messages can be listed.
        """
        start = datetime.utcnow() - timedelta(seconds=10)
        msg_datas = [
            (yield self.make_inbound(
                "gingercookies", "1234%d" % i, start + timedelta(seconds=i)))
            for i in range(5)
        ]
        lister = self.make_lister(batch="gingercookies", direction="inbound")
        yield lister.run()
        self.assertEqual(
            lister.output, [",".join(msg_data) for msg_data in msg_datas])

    @inlineCallbacks
    def test_list_inbound_small_pages(self):
        """
        Inbound messages can be listed.
        """
        start = datetime.utcnow() - timedelta(seconds=10)
        msg_datas = [
            (yield self.make_inbound(
                "gingercookies", "1234%d" % i, start + timedelta(seconds=i)))
            for i in range(5)
        ]
        lister = self.make_lister(
            batch="gingercookies", direction="inbound", index_page_size=2)
        yield lister.run()
        self.assertEqual(
            lister.output, [",".join(msg_data) for msg_data in msg_datas])

    @inlineCallbacks
    def test_list_outbound(self):
        """
        Outbound messages can be listed.
        """
        start = datetime.utcnow() - timedelta(seconds=10)
        msg_datas = [
            (yield self.make_outbound(
                "gingercookies", "1234%d" % i, start + timedelta(seconds=i)))
            for i in range(5)
        ]
        lister = self.make_lister(batch="gingercookies", direction="outbound")
        yield lister.run()
        self.assertEqual(
            lister.output, [",".join(msg_data) for msg_data in msg_datas])
Пример #16
0
class TestMessageLister(VumiTestCase):

    @inlineCallbacks
    def setUp(self):
        self.persistence_helper = self.add_helper(
            PersistenceHelper(use_riak=True, is_sync=False))
        self.msg_helper = self.add_helper(MessageHelper())
        # Since we're never loading the actual objects, we can't detect
        # tombstones. Therefore, each test needs its own bucket prefix.
        self.expected_bucket_prefix = "bucket-%s" % (uuid4().hex,)
        self.riak_manager = self.persistence_helper.get_riak_manager({
            "bucket_prefix": self.expected_bucket_prefix,
        })
        self.add_cleanup(self.riak_manager.close_manager)
        self.redis_manager = yield self.persistence_helper.get_redis_manager()
        self.mdb = MessageStore(self.riak_manager, self.redis_manager)
        self.default_args = [
            "-b", self.expected_bucket_prefix,
        ]

    def make_lister(self, args=None, batch=None, direction=None,
                    index_page_size=None):
        if args is None:
            args = self.default_args
        if batch is not None:
            args.extend(["--batch", batch])
        if direction is not None:
            args.extend(["--direction", direction])
        if index_page_size is not None:
            args.extend(["--index-page-size", str(index_page_size)])
        options = Options()
        options.parseOptions(args)
        return StubbedMessageLister(self, options)

    def get_riak_manager(self, config):
        self.assertEqual(config["bucket_prefix"], self.expected_bucket_prefix)
        return self.persistence_helper.get_riak_manager(config)

    def make_inbound(self, batch_id, from_addr, timestamp=None):
        if timestamp is None:
            timestamp = datetime.utcnow()
        msg = self.msg_helper.make_inbound(
            None, from_addr=from_addr, timestamp=timestamp)
        d = self.mdb.add_inbound_message(msg, batch_id=batch_id)
        timestamp_str = timestamp.strftime("%Y-%m-%d %H:%M:%S.%f")
        d.addCallback(
            lambda _: (timestamp_str, from_addr, msg["message_id"]))
        return d

    def make_outbound(self, batch_id, to_addr, timestamp=None):
        if timestamp is None:
            timestamp = datetime.utcnow()
        msg = self.msg_helper.make_outbound(
            None, to_addr=to_addr, timestamp=timestamp)
        d = self.mdb.add_outbound_message(msg, batch_id=batch_id)
        timestamp_str = timestamp.strftime("%Y-%m-%d %H:%M:%S.%f")
        d.addCallback(
            lambda _: (timestamp_str, to_addr, msg["message_id"]))
        return d

    def test_batch_required(self):
        self.assertRaises(usage.UsageError, self.make_lister, [
            "--direction", "inbound",
            "-b", self.expected_bucket_prefix,
        ])

    def test_valid_direction_required(self):
        self.assertRaises(usage.UsageError, self.make_lister, [
            "--batch", "gingercoookies",
            "-b", self.expected_bucket_prefix,
        ])
        self.assertRaises(usage.UsageError, self.make_lister, [
            "--batch", "gingercoookies",
            "--direction", "widdershins",
            "-b", self.expected_bucket_prefix,
        ])

    def test_bucket_required(self):
        self.assertRaises(usage.UsageError, self.make_lister, [
            "--batch", "gingercoookies",
            "--direction", "inbound",
        ])

    @inlineCallbacks
    def test_main(self):
        """
        The lister runs via `main()`.
        """
        msg_data = yield self.make_inbound("gingercookies", "12345")
        self.patch(sys, "stdout", StringIO())
        yield main(
            None, "name",
            "--batch", "gingercookies",
            "--direction", "inbound",
            "-b", self.riak_manager.bucket_prefix)
        self.assertEqual(
            sys.stdout.getvalue(),
            "%s\n" % (",".join(msg_data),))

    @inlineCallbacks
    def test_list_inbound(self):
        """
        Inbound messages can be listed.
        """
        start = datetime.utcnow() - timedelta(seconds=10)
        msg_datas = [
            (yield self.make_inbound(
                "gingercookies", "1234%d" % i, start + timedelta(seconds=i)))
            for i in range(5)
        ]
        lister = self.make_lister(batch="gingercookies", direction="inbound")
        yield lister.run()
        self.assertEqual(
            lister.output, [",".join(msg_data) for msg_data in msg_datas])

    @inlineCallbacks
    def test_list_inbound_small_pages(self):
        """
        Inbound messages can be listed.
        """
        start = datetime.utcnow() - timedelta(seconds=10)
        msg_datas = [
            (yield self.make_inbound(
                "gingercookies", "1234%d" % i, start + timedelta(seconds=i)))
            for i in range(5)
        ]
        lister = self.make_lister(
            batch="gingercookies", direction="inbound", index_page_size=2)
        yield lister.run()
        self.assertEqual(
            lister.output, [",".join(msg_data) for msg_data in msg_datas])

    @inlineCallbacks
    def test_list_outbound(self):
        """
        Outbound messages can be listed.
        """
        start = datetime.utcnow() - timedelta(seconds=10)
        msg_datas = [
            (yield self.make_outbound(
                "gingercookies", "1234%d" % i, start + timedelta(seconds=i)))
            for i in range(5)
        ]
        lister = self.make_lister(batch="gingercookies", direction="outbound")
        yield lister.run()
        self.assertEqual(
            lister.output, [",".join(msg_data) for msg_data in msg_datas])