Exemple #1
0
    def get_worker(self, config=None):
        '''Get a new MessageForwardingWorker with the provided config'''
        if config is None:
            config = {}

        self.app_helper = ApplicationHelper(MessageForwardingWorker)
        yield self.app_helper.setup()
        self.addCleanup(self.app_helper.cleanup)

        persistencehelper = PersistenceHelper()
        yield persistencehelper.setup()
        self.addCleanup(persistencehelper.cleanup)

        config = conjoin(
            persistencehelper.mk_config({
                'transport_name':
                'testtransport',
                'mo_message_url':
                self.url.decode('utf-8'),
                'inbound_ttl':
                60,
                'outbound_ttl':
                60 * 60 * 24 * 2,
                'metric_window':
                1.0,
            }), config)

        worker = yield self.app_helper.get_application(config)
        returnValue(worker)
    def get_worker(self, config=None):
        """Get a new MessageForwardingWorker with the provided config"""
        if config is None:
            config = {}

        app_helper = ApplicationHelper(MessageForwardingWorker)
        yield app_helper.setup()
        self.addCleanup(app_helper.cleanup)

        persistencehelper = PersistenceHelper()
        yield persistencehelper.setup()
        self.addCleanup(persistencehelper.cleanup)

        config = conjoin(
            persistencehelper.mk_config(
                {
                    "transport_name": "testtransport",
                    "mo_message_url": self.url.decode("utf-8"),
                    "inbound_ttl": 60,
                    "outbound_ttl": 60 * 60 * 24 * 2,
                }
            ),
            config,
        )

        worker = yield app_helper.get_application(config)
        returnValue(worker)
Exemple #3
0
 def get_redis(self):
     '''Creates and returns a redis manager'''
     if hasattr(self, 'redis'):
         returnValue(self.redis)
     persistencehelper = PersistenceHelper()
     yield persistencehelper.setup()
     self.redis = yield persistencehelper.get_redis_manager()
     self.addCleanup(persistencehelper.cleanup)
     returnValue(self.redis)
Exemple #4
0
 def get_redis(self):
     '''Creates and returns a redis manager'''
     if hasattr(self, 'redis'):
         returnValue(self.redis)
     persistencehelper = PersistenceHelper()
     yield persistencehelper.setup()
     self.redis = yield persistencehelper.get_redis_manager()
     self.addCleanup(persistencehelper.cleanup)
     returnValue(self.redis)
Exemple #5
0
    def setUp(self):
        self.workerhelper = WorkerHelper()
        self.addCleanup(self.workerhelper.cleanup)

        self.persistencehelper = PersistenceHelper()
        yield self.persistencehelper.setup()
        self.addCleanup(self.persistencehelper.cleanup)

        self.messagehelper = MessageHelper()
        self.addCleanup(self.messagehelper.cleanup)
Exemple #6
0
    def create_channel_config(self, **kw):
        self.persistencehelper = PersistenceHelper()
        yield self.persistencehelper.setup()
        self.addCleanup(self.persistencehelper.cleanup)

        config = deepcopy(self.default_channel_config)
        config.update(kw)
        channel_config = self.persistencehelper.mk_config(config)
        channel_config['redis'] = channel_config['redis_manager']
        returnValue(JunebugConfig(channel_config))
Exemple #7
0
    def __init__(self, dispatcher_class, use_riak=False, **msg_helper_args):
        self.dispatcher_class = dispatcher_class
        self.worker_helper = WorkerHelper()
        self.persistence_helper = PersistenceHelper(use_riak=use_riak)
        self.msg_helper = MessageHelper(**msg_helper_args)
        self.dispatch_helper = MessageDispatchHelper(self.msg_helper,
                                                     self.worker_helper)

        # Proxy methods from our helpers.
        generate_proxies(self, self.msg_helper)
        generate_proxies(self, self.worker_helper)
        generate_proxies(self, self.dispatch_helper)
Exemple #8
0
    def __init__(self, is_sync=False, use_riak=True):
        self.is_sync = is_sync
        self._patch_helper = PatchHelper()
        generate_proxies(self, self._patch_helper)

        self._persistence_helper = PersistenceHelper(use_riak=use_riak,
                                                     is_sync=is_sync)
        self.broker = None  # Will be replaced by the first worker_helper.
        self._worker_helpers = {}
        self._users_created = 0
        self._user_helpers = {}
        self._vumi_api = None

        generate_proxies(self, self._persistence_helper)
Exemple #9
0
    def __init__(self, application_class, use_riak=False, **msg_helper_args):
        self.application_class = application_class
        self.persistence_helper = PersistenceHelper(use_riak=use_riak)
        self.msg_helper = MessageHelper(**msg_helper_args)
        self.transport_name = self.msg_helper.transport_name
        self.worker_helper = WorkerHelper(self.msg_helper.transport_name)
        self.dispatch_helper = MessageDispatchHelper(self.msg_helper,
                                                     self.worker_helper)

        # Proxy methods from our helpers.
        generate_proxies(self, self.msg_helper)
        generate_proxies(self, self.worker_helper)
        generate_proxies(self, self.dispatch_helper)
        generate_proxies(self, self.persistence_helper)
Exemple #10
0
 def setUp(self):
     self.persistence_helper = self.add_helper(PersistenceHelper())
     self.worker_helper = self.add_helper(WorkerHelper())
     config = {
         'deadline': 30,
         'redis_manager': {
             'key_prefix': 'heartbeats',
             'db': 5,
             'FAKE_REDIS': True,
         },
         'monitored_systems': {
             'system-1': {
                 'system_name': 'system-1',
                 'system_id': 'system-1',
                 'workers': {
                     'twitter_transport': {
                         'name': 'twitter_transport',
                         'min_procs': 2,
                     }
                 }
             }
         }
     }
     self.worker = yield self.worker_helper.get_worker(
         monitor.HeartBeatMonitor, config, start=False)
    def setUp(self):
        self.persistence_helper = self.add_helper(
            PersistenceHelper(use_riak=True))

        # Create and stash a riak manager to clean up afterwards, because we
        # don't get access to the one inside the middleware.
        self.persistence_helper.get_riak_manager()
Exemple #12
0
 def setUp(self):
     self.persistence_helper = self.add_helper(
         PersistenceHelper(use_riak=True))
     try:
         from vumi.components.message_store import MessageStore
     except ImportError, e:
         import_skip(e, 'riak')
 def setUp(self):
     self.persistence_helper = self.add_helper(
         PersistenceHelper(use_riak=True))
     if riak_import_error is not None:
         import_skip(riak_import_error, 'riak')
     self.manager = self.persistence_helper.get_riak_manager()
     self.msg_helper = self.add_helper(MessageHelper())
Exemple #14
0
 def setUp(self):
     self.persistence_helper = self.add_helper(
         PersistenceHelper(use_riak=True))
     try:
         from vumi.components.message_store_resource import (
             MessageStoreResourceWorker)
     except ImportError, e:
         import_skip(e, 'riakasaurus', 'riakasaurus.riak')
Exemple #15
0
 def setUp(self):
     self.persistence_helper = self.add_helper(
         PersistenceHelper(use_riak=True))
     try:
         from vumi.components.message_store_api import (
             MatchResource, MessageStoreAPIWorker)
     except ImportError, e:
         import_skip(e, 'riak')
 def setUp(self):
     self.persistence_helper = self.add_helper(
         PersistenceHelper(use_riak=True))
     self.manager = self.persistence_helper.get_riak_manager()
     self.add_cleanup(self.manager.close_manager)
     self.redis = yield self.persistence_helper.get_redis_manager()
     self.batch_manager = MessageStoreBatchManager(self.manager, self.redis)
     self.backend = self.batch_manager.riak_backend
     self.bi_cache = self.batch_manager.batch_info_cache
Exemple #17
0
    def setUp(self):
        self.persistence_helper = self.add_helper(
            PersistenceHelper(use_riak=True))
        self.worker_helper = self.add_helper(WorkerHelper())
        self.msg_helper = self.add_helper(MessageHelper())

        riak, redis = yield self.create_managers()
        self.operational_store = OperationalMessageStore(riak, redis)
        self.batch_manager = MessageStoreBatchManager(riak, redis)
Exemple #18
0
    def __init__(self, transport_class, use_riak=False, **msg_helper_args):
        self.transport_class = transport_class
        self.persistence_helper = PersistenceHelper(use_riak=use_riak)
        self.msg_helper = MessageHelper(**msg_helper_args)
        self.transport_name = self.msg_helper.transport_name

        self.worker_helper = WorkerHelper(connector_name=self.transport_name,
                                          status_connector_name="%s.status" %
                                          (self.transport_name, ))

        self.dispatch_helper = MessageDispatchHelper(self.msg_helper,
                                                     self.worker_helper)

        # Proxy methods from our helpers.
        generate_proxies(self, self.msg_helper)
        generate_proxies(self, self.worker_helper)
        generate_proxies(self, self.dispatch_helper)
        generate_proxies(self, self.persistence_helper)
Exemple #19
0
    def setUp(self):
        self.workerhelper = WorkerHelper()
        self.addCleanup(self.workerhelper.cleanup)

        self.persistencehelper = PersistenceHelper()
        yield self.persistencehelper.setup()
        self.addCleanup(self.persistencehelper.cleanup)

        self.messagehelper = MessageHelper()
        self.addCleanup(self.messagehelper.cleanup)
Exemple #20
0
 def setUp(self):
     self.persistence_helper = self.add_helper(PersistenceHelper())
     self.redis = yield self.persistence_helper.get_redis_manager()
     self.tagpool = TagpoolManager(self.redis)
     site = Site(TagpoolApiServer(self.tagpool))
     self.server = yield reactor.listenTCP(0, site, interface='127.0.0.1')
     self.add_cleanup(self.server.loseConnection)
     addr = self.server.getHost()
     self.proxy = Proxy("http://%s:%d/" % (addr.host, addr.port))
     yield self.setup_tags()
Exemple #21
0
    def get_worker(self, config=None):
        '''Get a new ChannelStatusWorker with the provided config'''
        if config is None:
            config = {}

        app_helper = ApplicationHelper(ChannelStatusWorker)
        yield app_helper.setup()
        self.addCleanup(app_helper.cleanup)

        persistencehelper = PersistenceHelper()
        yield persistencehelper.setup()
        self.addCleanup(persistencehelper.cleanup)

        config = conjoin(persistencehelper.mk_config({
            'channel_id': 'testchannel',
        }), config)

        worker = yield app_helper.get_application(config)
        returnValue(worker)
Exemple #22
0
    def get_worker(self, config=None):
        '''Get a new ChannelStatusWorker with the provided config'''
        if config is None:
            config = {}

        app_helper = ApplicationHelper(ChannelStatusWorker)
        yield app_helper.setup()
        self.addCleanup(app_helper.cleanup)

        persistencehelper = PersistenceHelper()
        yield persistencehelper.setup()
        self.addCleanup(persistencehelper.cleanup)

        config = conjoin(persistencehelper.mk_config({
            'channel_id': 'testchannel',
        }), config)

        worker = yield app_helper.get_application(config)
        returnValue(worker)
 def setUp(self):
     self.persistence_helper = self.add_helper(
         PersistenceHelper(use_riak=True))
     self.manager = self.persistence_helper.get_riak_manager()
     self.add_cleanup(self.manager.close_manager)
     self.redis = yield self.persistence_helper.get_redis_manager()
     self.store = OperationalMessageStore(self.manager, self.redis)
     self.backend = self.store.riak_backend
     self.bi_cache = self.store.batch_info_cache
     self.msg_helper = self.add_helper(MessageHelper())
Exemple #24
0
    def create_channel_config(self, **kw):
        self.persistencehelper = PersistenceHelper()
        yield self.persistencehelper.setup()
        self.addCleanup(self.persistencehelper.cleanup)

        config = deepcopy(self.default_channel_config)
        config.update(kw)
        channel_config = self.persistencehelper.mk_config(config)
        channel_config['redis'] = channel_config['redis_manager']
        returnValue(JunebugConfig(channel_config))
Exemple #25
0
    def __init__(self, dispatcher_class, use_riak=False, **msg_helper_args):
        self.dispatcher_class = dispatcher_class
        self.worker_helper = WorkerHelper()
        self.persistence_helper = PersistenceHelper(use_riak=use_riak)
        self.msg_helper = MessageHelper(**msg_helper_args)
        self.dispatch_helper = MessageDispatchHelper(self.msg_helper, self.worker_helper)

        # Proxy methods from our helpers.
        generate_proxies(self, self.msg_helper)
        generate_proxies(self, self.worker_helper)
        generate_proxies(self, self.dispatch_helper)
Exemple #26
0
 def setUp(self):
     self.clock = Clock()
     self.persistence_helper = self.add_helper(PersistenceHelper())
     self.redis = yield self.persistence_helper.get_redis_manager()
     self.fake_smsc = FakeSMSC(auto_accept=False)
     self.default_config = {
         'transport_name': 'sphex_transport',
         'twisted_endpoint': self.fake_smsc.endpoint,
         'system_id': 'system_id',
         'password': '******',
     }
Exemple #27
0
    def setUp(self):
        self.persistence_helper = self.add_helper(
            PersistenceHelper(use_riak=True))
        riak_manager = self.persistence_helper.get_riak_manager()
        self.account_store = AccountStore(riak_manager)
        self.user = yield self.account_store.new_user(u'testuser')

        # Some old contact proxies for testing migrations.
        per_account_manager = riak_manager.sub_manager(self.user.key)
        self.contacts_vnone = per_account_manager.proxy(ContactVNone)
        self.contacts_v1 = per_account_manager.proxy(ContactV1)
        self.contacts_v2 = per_account_manager.proxy(Contact)
Exemple #28
0
    def setUp(self):
        self.persistence_helper = self.add_helper(PersistenceHelper())
        redis = yield self.persistence_helper.get_redis_manager()
        self.window_id = 'window_id'

        # Patch the count_waiting so we can fake the race condition
        self.clock = Clock()
        self.patch(WindowManager, 'count_waiting', lambda _, window_id: 100)

        self.wm = WindowManager(redis, window_size=10, flight_lifetime=10)
        self.add_cleanup(self.wm.stop)
        yield self.wm.create_window(self.window_id)
        self.redis = self.wm.redis
Exemple #29
0
    def setUp(self):
        self.persistence_helper = self.add_helper(PersistenceHelper())
        redis = yield self.persistence_helper.get_redis_manager()
        self.window_id = 'window_id'

        # Patch the clock so we can control time
        self.clock = Clock()
        self.patch(WindowManager, 'get_clock', lambda _: self.clock)

        self.wm = WindowManager(redis, window_size=10, flight_lifetime=10)
        self.add_cleanup(self.wm.stop)
        yield self.wm.create_window(self.window_id)
        self.redis = self.wm.redis
Exemple #30
0
    def __init__(self, is_sync=False, use_riak=True):
        self.is_sync = is_sync
        self._patch_helper = PatchHelper()
        generate_proxies(self, self._patch_helper)

        self._persistence_helper = PersistenceHelper(
            use_riak=use_riak, is_sync=is_sync)
        self.broker = None  # Will be replaced by the first worker_helper.
        self._worker_helpers = {}
        self._users_created = 0
        self._user_helpers = {}
        self._vumi_api = None

        generate_proxies(self, self._persistence_helper)
Exemple #31
0
    def __init__(self, application_class, use_riak=False, **msg_helper_args):
        self.application_class = application_class
        self.persistence_helper = PersistenceHelper(use_riak=use_riak)
        self.msg_helper = MessageHelper(**msg_helper_args)
        self.transport_name = self.msg_helper.transport_name
        self.worker_helper = WorkerHelper(self.msg_helper.transport_name)
        self.dispatch_helper = MessageDispatchHelper(
            self.msg_helper, self.worker_helper)

        # Proxy methods from our helpers.
        generate_proxies(self, self.msg_helper)
        generate_proxies(self, self.worker_helper)
        generate_proxies(self, self.dispatch_helper)
        generate_proxies(self, self.persistence_helper)
Exemple #32
0
    def get_worker(self, config=None):
        '''Get a new MessageForwardingWorker with the provided config'''
        if config is None:
            config = {}

        self.app_helper = ApplicationHelper(MessageForwardingWorker)
        yield self.app_helper.setup()
        self.addCleanup(self.app_helper.cleanup)

        persistencehelper = PersistenceHelper()
        yield persistencehelper.setup()
        self.addCleanup(persistencehelper.cleanup)

        config = conjoin(persistencehelper.mk_config({
            'transport_name': 'testtransport',
            'mo_message_url': self.url.decode('utf-8'),
            'inbound_ttl': 60,
            'outbound_ttl': 60 * 60 * 24 * 2,
            'metric_window': 1.0,
        }), config)

        worker = yield self.app_helper.get_application(config)
        returnValue(worker)
Exemple #33
0
 def setUp(self):
     self.persistence_helper = self.add_helper(
         PersistenceHelper(use_riak=True, is_sync=True))
     self.riak_manager = self.persistence_helper.get_riak_manager()
     self.model = self.riak_manager.proxy(SimpleModel)
     self.model_cls_path = ".".join(
         [SimpleModel.__module__, SimpleModel.__name__])
     self.expected_bucket_prefix = "bucket"
     self.default_args = [
         "-m",
         self.model_cls_path,
         "-b",
         self.expected_bucket_prefix,
     ]
Exemple #34
0
 def setUp(self):
     self.persistence_helper = self.add_helper(
         PersistenceHelper(use_riak=True, is_sync=False))
     self.riak_manager = self.persistence_helper.get_riak_manager()
     self.old_model = self.riak_manager.proxy(SimpleModelOld)
     self.model = self.riak_manager.proxy(SimpleModel)
     self.model_cls_path = fqpn(SimpleModel)
     self.expected_bucket_prefix = "bucket"
     self.default_args = [
         "-m",
         self.model_cls_path,
         "-b",
         self.expected_bucket_prefix,
     ]
Exemple #35
0
    def setUp(self):
        super(TestGoLoggingResource, self).setUp()
        self.persistence_helper = self.add_helper(PersistenceHelper())
        self.parent_redis = yield self.persistence_helper.get_redis_manager()
        self.redis = self.parent_redis.sub_manager(
            LogManager.DEFAULT_SUB_STORE)
        yield self.create_resource({
            'redis_manager': {
                'FAKE_REDIS': self.parent_redis,
                'key_prefix': self.parent_redis.get_key_prefix(),
            }
        })

        self.user_account = Mock(key="campaign-1")
        self.conversation = Mock(key="conv-1", user_account=self.user_account)
        self.resource.app_worker.conversation = self.conversation
Exemple #36
0
 def setUp(self):
     self.persistence_helper = self.add_helper(
         PersistenceHelper(use_riak=True, is_sync=False))
     self.msg_helper = self.add_helper(MessageHelper())
     # Since we're never loading the actual objects, we can't detect
     # tombstones. Therefore, each test needs its own bucket prefix.
     self.expected_bucket_prefix = "bucket-%s" % (uuid4().hex,)
     self.riak_manager = self.persistence_helper.get_riak_manager({
         "bucket_prefix": self.expected_bucket_prefix,
     })
     self.add_cleanup(self.riak_manager.close_manager)
     self.redis_manager = yield self.persistence_helper.get_redis_manager()
     self.mdb = MessageStore(self.riak_manager, self.redis_manager)
     self.default_args = [
         "-b", self.expected_bucket_prefix,
     ]
 def setUp(self):
     self.persistence_helper = self.add_helper(
         PersistenceHelper(use_riak=True, is_sync=False))
     self.msg_helper = self.add_helper(MessageHelper())
     # Since we're never loading the actual objects, we can't detect
     # tombstones. Therefore, each test needs its own bucket prefix.
     config = self.persistence_helper.mk_config({})["riak_manager"].copy()
     config["bucket_prefix"] = "%s-%s" % (
         uuid4().hex, config["bucket_prefix"])
     self.riak_manager = self.persistence_helper.get_riak_manager(config)
     self.redis_manager = yield self.persistence_helper.get_redis_manager()
     self.mdb = MessageStore(self.riak_manager, self.redis_manager)
     self.expected_bucket_prefix = "bucket"
     self.default_args = [
         "-b", self.expected_bucket_prefix,
     ]
Exemple #38
0
 def setUp(self):
     self.persistence_helper = self.add_helper(
         PersistenceHelper(use_riak=True, is_sync=False))
     # Since we're never loading the actual objects, we can't detect
     # tombstones. Therefore, each test needs its own bucket prefix.
     self.expected_bucket_prefix = "bucket-%s" % (uuid4().hex,)
     self.riak_manager = self.persistence_helper.get_riak_manager({
         "bucket_prefix": self.expected_bucket_prefix,
     })
     self.add_cleanup(self.riak_manager.close_manager)
     self.model = self.riak_manager.proxy(SimpleModel)
     self.model_cls_path = ".".join([
         SimpleModel.__module__, SimpleModel.__name__])
     self.default_args = [
         "-m", self.model_cls_path,
         "-b", self.expected_bucket_prefix,
     ]
Exemple #39
0
    def __init__(self, transport_class, use_riak=False, **msg_helper_args):
        self.transport_class = transport_class
        self.persistence_helper = PersistenceHelper(use_riak=use_riak)
        self.msg_helper = MessageHelper(**msg_helper_args)
        self.transport_name = self.msg_helper.transport_name

        self.worker_helper = WorkerHelper(
            connector_name=self.transport_name,
            status_connector_name="%s.status" % (self.transport_name,))

        self.dispatch_helper = MessageDispatchHelper(
            self.msg_helper, self.worker_helper)

        # Proxy methods from our helpers.
        generate_proxies(self, self.msg_helper)
        generate_proxies(self, self.worker_helper)
        generate_proxies(self, self.dispatch_helper)
        generate_proxies(self, self.persistence_helper)
Exemple #40
0
class TransportHelper(object):
    """
    Test helper for transport workers.

    This helper construct and wraps several lower-level helpers and provides
    higher-level functionality for transport tests.

    :param transport_class:
        The worker class for the transport being tested.

    :param bool use_riak:
        Set to ``True`` if the test requires Riak. This is passed to the
        underlying :class:`~vumi.tests.helpers.PersistenceHelper`.

    :param \**msg_helper_args:
        All other keyword params are passed to the underlying
        :class:`~vumi.tests.helpers.MessageHelper`.
    """

    implements(IHelper)

    def __init__(self, transport_class, use_riak=False, **msg_helper_args):
        self.transport_class = transport_class
        self.persistence_helper = PersistenceHelper(use_riak=use_riak)
        self.msg_helper = MessageHelper(**msg_helper_args)
        self.transport_name = self.msg_helper.transport_name
        self.worker_helper = WorkerHelper(self.transport_name)
        self.dispatch_helper = MessageDispatchHelper(
            self.msg_helper, self.worker_helper)

        # Proxy methods from our helpers.
        generate_proxies(self, self.msg_helper)
        generate_proxies(self, self.worker_helper)
        generate_proxies(self, self.dispatch_helper)
        generate_proxies(self, self.persistence_helper)

    def setup(self):
        self.persistence_helper.setup()
        self.worker_helper.setup()

    @inlineCallbacks
    def cleanup(self):
        yield self.worker_helper.cleanup()
        yield self.persistence_helper.cleanup()

    def get_transport(self, config, cls=None, start=True):
        """
        Get an instance of a transport class.

        :param config: Config dict.
        :param cls: The transport class to instantiate.
                    Defaults to :attr:`transport_class`
        :param start: True to start the transport (default), False otherwise.

        Some default config values are helpfully provided in the
        interests of reducing boilerplate:

        * ``transport_name`` defaults to :attr:`self.transport_name`
        """

        if cls is None:
            cls = self.transport_class
        config = self.mk_config(config)
        config.setdefault('transport_name', self.transport_name)
        return self.get_worker(cls, config, start)

    def get_dispatched_failures(self, connector_name=None):
        """
        Get failures dispatched by a transport.

        :param str connector_name:
            Connector name. If ``None``, the default connector name for the
            helper instance will be used.

        :returns:
            A list of :class:`~vumi.transports.failures.FailureMessage`
            instances.
        """
        return self.get_dispatched(connector_name, 'failures', FailureMessage)
Exemple #41
0
class VumiApiHelper(object):
    # TODO: Clear bucket properties.
    #       We need two things for this:
    #        * The ability to clear bucket properties in our Riak layer.
    #        * Tracking accounts created so we know which buckets to clear.
    #
    #       The first needs to happen in vumi and requires an updated Riak
    #       client. The second isn't really worth doing unitl the first is
    #       done.

    implements(IHelper)

    def __init__(self, is_sync=False, use_riak=True):
        self.is_sync = is_sync
        self._patch_helper = PatchHelper()
        generate_proxies(self, self._patch_helper)

        self._persistence_helper = PersistenceHelper(
            use_riak=use_riak, is_sync=is_sync)
        self.broker = None  # Will be replaced by the first worker_helper.
        self._worker_helpers = {}
        self._users_created = 0
        self._user_helpers = {}
        self._vumi_api = None

        generate_proxies(self, self._persistence_helper)

    def setup(self, setup_vumi_api=True):
        self._persistence_helper.setup()
        if self.is_sync:
            self._django_amqp_setup()
        if setup_vumi_api:
            return self.setup_vumi_api()

    @maybe_async
    def cleanup(self):
        for worker_helper in self._worker_helpers.values():
            # All of these will wait for the same broker, but that's fine.
            yield worker_helper.cleanup()
        yield self._persistence_helper.cleanup()
        self._patch_helper.cleanup()

    def _django_amqp_setup(self):
        import go.base.amqp
        import go.base.utils
        # We might need an AMQP connection at some point.
        broker = self.get_worker_helper().broker
        broker.exchange_declare('vumi', 'direct')
        self.django_amqp_connection = FakeAmqpConnection(broker)
        self.monkey_patch(
            go.base.utils, 'connection', self.django_amqp_connection)
        self.monkey_patch(
            go.base.amqp, 'connection', self.django_amqp_connection)

    def get_worker_helper(self, connector_name=None):
        if connector_name not in self._worker_helpers:
            worker_helper = WorkerHelper(connector_name, self.broker)
            # If this is our first worker helper, we need to grab the broker it
            # created. If it isn't, its broker will be self.broker anyway.
            self.broker = worker_helper.broker
            self._worker_helpers[connector_name] = worker_helper
        return self._worker_helpers[connector_name]

    @proxyable
    def get_vumi_api(self):
        assert self._vumi_api is not None, "No vumi_api provided."
        return self._vumi_api

    @proxyable
    def set_vumi_api(self, vumi_api):
        assert self._vumi_api is None, "Can't override existing vumi_api."
        self._vumi_api = vumi_api
        # TODO: Find a nicer way to give everything the same fake redis.
        pcfg = self._persistence_helper._config_overrides
        pcfg['redis_manager']['FAKE_REDIS'] = vumi_api.redis

    @proxyable
    def setup_vumi_api(self):
        if self.is_sync:
            return self.setup_sync_vumi_api()
        else:
            return self.setup_async_vumi_api()

    def setup_sync_vumi_api(self):
        from django.conf import settings
        import go.base.amqp
        self._vumi_api = VumiApi.from_config_sync(
            settings.VUMI_API_CONFIG, go.base.amqp.connection)

    def setup_async_vumi_api(self):
        worker_helper = self.get_worker_helper()
        amqp_client = worker_helper.get_fake_amqp_client(worker_helper.broker)
        d = amqp_client.start_publisher(ApiCommandPublisher)
        d.addCallback(lambda cmd_publisher: VumiApi.from_config_async(
            self.mk_config({}), cmd_publisher))
        return d.addCallback(self.set_vumi_api)

    @proxyable
    @maybe_async
    def make_user(self, username, enable_search=True, django_user_pk=None):
        # NOTE: We use bytes instead of unicode here because that's what the
        #       real new_user gives us.
        key = "test-%s-user" % (len(self._user_helpers),)
        user = self.get_vumi_api().account_store.users(key, username=username)
        yield user.save()
        user_helper = UserApiHelper(self, key, django_user_pk=django_user_pk)
        self._user_helpers[key] = user_helper
        if enable_search:
            contact_store = user_helper.user_api.contact_store
            yield contact_store.contacts.enable_search()
            yield contact_store.groups.enable_search()
        returnValue(self.get_user_helper(user.key))

    @proxyable
    def get_user_helper(self, account_key):
        return self._user_helpers[account_key]

    @proxyable
    @maybe_async
    def get_or_create_user(self):
        assert len(self._user_helpers) <= 1, "Too many users."
        if not self._user_helpers:
            yield self.make_user(u"testuser")
        returnValue(self._user_helpers.values()[0])

    @proxyable
    @maybe_async
    def setup_tagpool(self, pool, tags, metadata=None):
        tags = [(pool, tag) for tag in tags]
        yield self.get_vumi_api().tpm.declare_tags(tags)
        if metadata:
            yield self.get_vumi_api().tpm.set_metadata(pool, metadata)
        returnValue(tags)

    def get_dispatched_commands(self):
        return self.get_worker_helper().get_dispatched(
            'vumi', 'api', VumiApiCommand)
Exemple #42
0
class TestBaseRouterWorker(VumiTestCase, JunebugTestBase):
    DEFAULT_ROUTER_WORKER_CONFIG = {
        'inbound_ttl': 60,
        'outbound_ttl': 60 * 60 * 24 * 2,
        'metric_window': 1.0,
        'destinations': [],
    }

    @inlineCallbacks
    def setUp(self):
        self.workerhelper = WorkerHelper()
        self.addCleanup(self.workerhelper.cleanup)

        self.persistencehelper = PersistenceHelper()
        yield self.persistencehelper.setup()
        self.addCleanup(self.persistencehelper.cleanup)

        self.messagehelper = MessageHelper()
        self.addCleanup(self.messagehelper.cleanup)

    @inlineCallbacks
    def get_router_worker(self, config=None):
        if config is None:
            config = {}

        config = conjoin(
            self.persistencehelper.mk_config(
                self.DEFAULT_ROUTER_WORKER_CONFIG),
            config)

        TestRouter._create_worker = self.workerhelper.get_worker
        worker = yield self.workerhelper.get_worker(TestRouter, config)
        returnValue(worker)

    @inlineCallbacks
    def test_start_router_worker_no_destinations(self):
        """
        If there are no destinations specified, no workers should be started.
        The setup_router function should be called on the implementation.
        """
        worker = yield self.get_router_worker()
        self.assertEqual(len(worker.namedServices), 0)
        self.assertTrue(worker.setup_called)

    @inlineCallbacks
    def test_start_router_with_destinations(self):
        """
        If there are destinations specified, then a worker should be started
        for every destination.
        """
        worker = yield self.get_router_worker({
            'destinations': [
                {
                    'id': 'test-destination1',
                },
                {
                    'id': 'test-destination2',
                },
            ],
        })
        self.assertTrue(worker.setup_called)
        self.assertEqual(sorted(worker.namedServices.keys()), [
            'test-destination1', 'test-destination2'])

        for connector in worker.connectors.values():
            self.assertFalse(connector.paused)

    @inlineCallbacks
    def test_teardown_router(self):
        """
        Tearing down a router should pause all connectors, and call the
        teardown method of the router implementation
        """
        worker = yield self.get_router_worker({
            'destinations': [{'id': 'test-destination1'}],
        })

        self.assertFalse(worker.teardown_called)
        for connector in worker.connectors.values():
            self.assertFalse(connector.paused)

        yield worker.teardown_worker()

        self.assertTrue(worker.teardown_called)
        for connector in worker.connectors.values():
            self.assertTrue(connector.paused)

    @inlineCallbacks
    def test_consume_channel(self):
        """
        consume_channel should set up the appropriate connector, as well as
        attach the specified callbacks for messages and events.
        """
        worker = yield self.get_router_worker({})

        messages = []
        events = []

        def message_callback(channelid, message):
            assert channelid == 'testchannel'
            messages.append(message)

        def event_callback(channelid, event):
            assert channelid == 'testchannel'
            events.append(event)

        yield worker.consume_channel(
            'testchannel', message_callback, event_callback)

        # Because this is only called in setup, and we're creating connectors
        # after setup, we need to unpause them
        worker.unpause_connectors()

        self.assertEqual(messages, [])
        inbound = self.messagehelper.make_inbound('test message')
        yield self.workerhelper.dispatch_inbound(inbound, 'testchannel')
        self.assertEqual(messages, [inbound])

        self.assertEqual(events, [])
        event = self.messagehelper.make_ack()
        yield self.workerhelper.dispatch_event(event, 'testchannel')
        self.assertEqual(events, [event])

    @inlineCallbacks
    def test_send_inbound_to_destination(self):
        """
        send_inbound_to_destination should send the provided inbound message
        to the specified destination worker
        """
        worker = yield self.get_router_worker({
            'destinations': [{
                'id': 'test-destination',
                'amqp_queue': 'testqueue',
            }],
        })

        inbound = self.messagehelper.make_inbound('test_message')
        yield worker.send_inbound_to_destination('test-destination', inbound)

        [message] = yield self.workerhelper.wait_for_dispatched_inbound(
            connector_name='testqueue')
        self.assertEqual(message, inbound)

    @inlineCallbacks
    def test_send_event_to_destination(self):
        """
        send_event_to_destination should send the provided event message
        to the specified destination worker
        """
        worker = yield self.get_router_worker({
            'destinations': [{
                'id': 'test-destination',
                'amqp_queue': 'testqueue',
            }],
        })

        ack = self.messagehelper.make_ack()
        yield worker.send_event_to_destination('test-destination', ack)

        [event] = yield self.workerhelper.wait_for_dispatched_events(
            connector_name='testqueue')
        self.assertEqual(event, ack)

    @inlineCallbacks
    def test_consume_destination(self):
        """
        If a callback is attached to a destination, then that callback should
        be called when an outbound is sent from a destination
        """
        worker = yield self.get_router_worker({
            'destinations': [{
                'id': 'test-destination',
                'amqp_queue': 'testqueue',
            }],
        })

        messages = []

        def message_callback(destinationid, message):
            assert destinationid == 'test-destination'
            messages.append(message)

        yield worker.consume_destination('test-destination', message_callback)
        # Because this is only called in setup, and we're creating connectors
        # after setup, we need to unpause them
        worker.unpause_connectors()

        self.assertEqual(messages, [])
        msg = self.messagehelper.make_outbound('testmessage')
        yield self.workerhelper.dispatch_outbound(msg, 'test-destination')
        self.assertEqual(messages, [msg])

    @inlineCallbacks
    def test_send_outbound_to_channel(self):
        """
        send_outbound_to_channel should send the provided outbound message to
        the specified channel
        """
        worker = yield self.get_router_worker({})

        yield worker.consume_channel('testchannel', lambda m: m, lambda e: e)

        outbound = self.messagehelper.make_outbound('test message')
        yield worker.send_outbound_to_channel('testchannel', outbound)

        [message] = yield self.workerhelper.wait_for_dispatched_outbound(
            connector_name='testchannel')
        self.assertEqual(message, outbound)
Exemple #43
0
class DispatcherHelper(object):
    """
    Test helper for dispatcher workers.

    This helper construct and wraps several lower-level helpers and provides
    higher-level functionality for dispatcher tests.

    :param dispatcher_class:
        The worker class for the dispatcher being tested.

    :param bool use_riak:
        Set to ``True`` if the test requires Riak. This is passed to the
        underlying :class:`~vumi.tests.helpers.PersistenceHelper`.

    :param \**msg_helper_args:
        All other keyword params are passed to the underlying
        :class:`~vumi.tests.helpers.MessageHelper`.
    """

    implements(IHelper)

    def __init__(self, dispatcher_class, use_riak=False, **msg_helper_args):
        self.dispatcher_class = dispatcher_class
        self.worker_helper = WorkerHelper()
        self.persistence_helper = PersistenceHelper(use_riak=use_riak)
        self.msg_helper = MessageHelper(**msg_helper_args)
        self.dispatch_helper = MessageDispatchHelper(self.msg_helper, self.worker_helper)

        # Proxy methods from our helpers.
        generate_proxies(self, self.msg_helper)
        generate_proxies(self, self.worker_helper)
        generate_proxies(self, self.dispatch_helper)

    def setup(self):
        self.persistence_helper.setup()
        self.worker_helper.setup()

    @inlineCallbacks
    def cleanup(self):
        yield self.worker_helper.cleanup()
        yield self.persistence_helper.cleanup()

    def get_dispatcher(self, config, cls=None, start=True):
        """
        Get an instance of a dispatcher class.

        :param dict config: Config dict.
        :param cls:
            The transport class to instantiate. Defaults to
            :attr:`dispatcher_class`
        :param bool start:
            ``True`` to start the dispatcher (default), ``False`` otherwise.
        """
        if cls is None:
            cls = self.dispatcher_class
        config = self.persistence_helper.mk_config(config)
        return self.get_worker(cls, config, start)

    def get_connector_helper(self, connector_name):
        """
        Construct a :class:`~DispatcherConnectorHelper` for the provided
        ``connector_name``.
        """
        return DispatcherConnectorHelper(self, connector_name)
Exemple #44
0
 def setUp(self):
     self.persistence_helper = self.add_helper(
         PersistenceHelper(is_sync=False))
     self.redis = yield self.persistence_helper.get_redis_manager()
     self.sm = SessionManager(self.redis)
Exemple #45
0
class JunebugTestBase(TestCase):
    '''Base test case that all junebug tests inherit from. Contains useful
    helper functions'''

    default_channel_properties = {
        'type': 'telnet',
        'config': {
            'twisted_endpoint': 'tcp:0',
            'worker_name': 'unnamed',
        },
        'mo_url': 'http://foo.bar',
    }

    default_channel_config = {
        'ttl': 60,
        'amqp': {},
    }

    def patch_logger(self):
        ''' Patches the logger with an in-memory logger, which is acccessable
        at "self.logging_handler".'''
        self.logging_handler = logging.handlers.MemoryHandler(100)
        logging.getLogger().addHandler(self.logging_handler)
        self.addCleanup(self._cleanup_logging_patch)

    def _cleanup_logging_patch(self):
        self.logging_handler.close()
        logging.getLogger().removeHandler(self.logging_handler)

    def create_channel_properties(self, **kw):
        properties = deepcopy(self.default_channel_properties)
        properties.update(kw)
        return properties

    @inlineCallbacks
    def create_channel_config(self, **kw):
        self.persistencehelper = PersistenceHelper()
        yield self.persistencehelper.setup()
        self.addCleanup(self.persistencehelper.cleanup)

        config = deepcopy(self.default_channel_config)
        config.update(kw)
        channel_config = self.persistencehelper.mk_config(config)
        channel_config['redis'] = channel_config['redis_manager']
        returnValue(JunebugConfig(channel_config))

    @inlineCallbacks
    def create_channel(
            self, service, redis, transport_class,
            properties=default_channel_properties, id=None):
        '''Creates and starts, and saves a channel, with a
        TelnetServerTransport transport'''
        properties = deepcopy(properties)
        config = yield self.create_channel_config()
        channel = Channel(
            redis, config, properties, id=id)
        properties['config']['transport_name'] = channel.id
        yield channel.start(self.service)
        yield channel.save()
        self.addCleanup(channel.stop)
        returnValue(channel)

    @inlineCallbacks
    def create_channel_from_id(self, redis, config, id, service):
        '''Creates an existing channel given the channel id'''
        config = yield self.create_channel_config(**config)
        channel = yield Channel.from_id(redis, config, id, service)
        returnValue(channel)

    @inlineCallbacks
    def get_redis(self):
        '''Creates and returns a redis manager'''
        if hasattr(self, 'redis'):
            returnValue(self.redis)
        persistencehelper = PersistenceHelper()
        yield persistencehelper.setup()
        self.redis = yield persistencehelper.get_redis_manager()
        self.addCleanup(persistencehelper.cleanup)
        returnValue(self.redis)

    @inlineCallbacks
    def start_server(self):
        '''Starts a junebug server. Stores the service to "self.service", and
        the url at "self.url"'''
        config = yield self.create_channel_config()
        self.service = JunebugService(config)
        self.api = JunebugApi(
            self.service, config)

        redis = yield self.persistencehelper.get_redis_manager()
        self.api = JunebugApi(self.service, config)
        yield self.api.setup(redis, self.get_message_sender())

        self.config = self.api.config
        self.redis = self.api.redis
        self.inbounds = self.api.inbounds
        self.outbounds = self.api.outbounds
        self.message_sender = self.api.message_sender

        port = reactor.listenTCP(
            0, Site(self.api.app.resource()),
            interface='127.0.0.1')
        self.addCleanup(port.stopListening)
        addr = port.getHost()
        self.url = "http://%s:%s" % (addr.host, addr.port)

    def get_message_sender(self):
        '''Creates a new MessageSender object, with a fake amqp client'''
        message_sender = MessageSender('amqp-spec-0-8.xml', None)
        spec = get_spec(vumi_resource_path('amqp-spec-0-8.xml'))
        client = FakeAmqpClient(spec)
        message_sender.client = client
        return message_sender

    def get_dispatched_messages(self, queue):
        '''Gets all messages that have been dispatched to the amqp broker.
        Should only be called after start_server, as it looks in the api for
        the amqp client'''
        amqp_client = self.api.message_sender.client
        return amqp_client.broker.get_messages(
            'vumi', queue)
Exemple #46
0
class JunebugTestBase(TestCase):
    '''Base test case that all junebug tests inherit from. Contains useful
    helper functions'''

    default_channel_properties = {
        'type': 'telnet',
        'config': {
            'twisted_endpoint': 'tcp:0',
        },
        'mo_url': 'http://foo.bar',
    }

    default_channel_config = {
        'ttl': 60,
        'amqp': {},
    }

    def patch_logger(self):
        ''' Patches the logger with an in-memory logger, which is acccessable
        at "self.logging_handler".'''
        self.logging_handler = logging.handlers.MemoryHandler(100)
        logging.getLogger().addHandler(self.logging_handler)
        self.addCleanup(self._cleanup_logging_patch)

    def patch_message_rate_clock(self):
        '''Patches the message rate clock, and returns the clock'''
        clock = Clock()
        self.patch(MessageRateStore, 'get_seconds', lambda _: clock.seconds())
        return clock

    def _cleanup_logging_patch(self):
        self.logging_handler.close()
        logging.getLogger().removeHandler(self.logging_handler)

    def create_channel_properties(self, **kw):
        properties = deepcopy(self.default_channel_properties)
        properties.update(kw)
        return properties

    @inlineCallbacks
    def create_channel_config(self, **kw):
        self.persistencehelper = PersistenceHelper()
        yield self.persistencehelper.setup()
        self.addCleanup(self.persistencehelper.cleanup)

        config = deepcopy(self.default_channel_config)
        config.update(kw)
        channel_config = self.persistencehelper.mk_config(config)
        channel_config['redis'] = channel_config['redis_manager']
        returnValue(JunebugConfig(channel_config))

    @inlineCallbacks
    def create_channel(
            self, service, redis, transport_class=None,
            properties=default_channel_properties, id=None, config=None,
            plugins=[]):
        '''Creates and starts, and saves a channel, with a
        TelnetServerTransport transport'''
        self.patch(junebug.logging_service, 'LogFile', DummyLogFile)
        if transport_class is None:
            transport_class = 'vumi.transports.telnet.TelnetServerTransport'

        properties = deepcopy(properties)
        logpath = self.mktemp()
        if config is None:
            config = yield self.create_channel_config(
                channels={
                    properties['type']: transport_class
                },
                logging_path=logpath)

        channel = Channel(
            redis, config, properties, id=id, plugins=plugins)
        yield channel.start(self.service)

        properties['config']['transport_name'] = channel.id

        yield channel.save()
        self.addCleanup(channel.stop)
        returnValue(channel)

    @inlineCallbacks
    def create_channel_from_id(self, redis, config, id, service):
        '''Creates an existing channel given the channel id'''
        config = yield self.create_channel_config(**config)
        channel = yield Channel.from_id(redis, config, id, service)
        returnValue(channel)

    @inlineCallbacks
    def get_redis(self):
        '''Creates and returns a redis manager'''
        if hasattr(self, 'redis'):
            returnValue(self.redis)
        persistencehelper = PersistenceHelper()
        yield persistencehelper.setup()
        self.redis = yield persistencehelper.get_redis_manager()
        self.addCleanup(persistencehelper.cleanup)
        returnValue(self.redis)

    @inlineCallbacks
    def start_server(self, config=None):
        '''Starts a junebug server. Stores the service to "self.service", and
        the url at "self.url"'''
        # TODO: This setup is very manual, because we don't call
        # service.startService. This must be fixed to close mirror the real
        # program with our tests.
        if config is None:
            config = yield self.create_channel_config()
        self.service = JunebugService(config)
        self.api = JunebugApi(
            self.service, config)
        self.service.api = self.api

        redis = yield self.get_redis()
        yield self.api.setup(redis, self.get_message_sender())

        self.config = self.api.config
        self.redis = self.api.redis
        self.inbounds = self.api.inbounds
        self.outbounds = self.api.outbounds
        self.message_sender = self.api.message_sender

        port = reactor.listenTCP(
            0, Site(self.api.app.resource()),
            interface='127.0.0.1')
        self.service._port = port
        self.addCleanup(self.stop_server)
        addr = port.getHost()
        self.url = "http://%s:%s" % (addr.host, addr.port)

    @inlineCallbacks
    def stop_server(self):
        # TODO: This teardown is very messy, because we don't actually call
        # service.startService. This needs to be fixed in order to ensure that
        # our tests are mirroring the real program closely.
        yield self.service.stopService()
        for service in self.service:
            service.disownServiceParent()
        for service in self.service.namedServices.values():
            service.disownServiceParent()

    def get_message_sender(self):
        '''Creates a new MessageSender object, with a fake amqp client'''
        message_sender = MessageSender('amqp-spec-0-8.xml', None)
        spec = get_spec(vumi_resource_path('amqp-spec-0-8.xml'))
        client = FakeAmqpClient(spec)
        message_sender.client = client
        return message_sender

    def get_dispatched_messages(self, queue):
        '''Gets all messages that have been dispatched to the amqp broker.
        Should only be called after start_server, as it looks in the api for
        the amqp client'''
        amqp_client = self.api.message_sender.client
        return amqp_client.broker.get_messages(
            'vumi', queue)

    def assert_was_logged(self, msg):
        self.assertTrue(any(
            msg in log.getMessage()
            for log in self.logging_handler.buffer))

    def assert_request(self, req, method=None, body=None, headers=None):
        if method is not None:
            self.assertEqual(req['request'].method, 'POST')

        if headers is not None:
            for name, values in headers.iteritems():
                self.assertEqual(
                    req['request'].requestHeaders.getRawHeaders(name),
                    values)

        if body is not None:
            self.assertEqual(json.loads(req['body']), body)

    def assert_body_contains(self, req, **fields):
        body = json.loads(req['body'])

        self.assertEqual(
            dict((k, v) for k, v in body.iteritems() if k in fields),
            fields)

    def assert_log(self, log, expected):
        '''Assert that a log matches what is expected.'''
        timestamp = log.pop('timestamp')
        self.assertTrue(isinstance(timestamp, float))
        self.assertEqual(log, expected)

    def generate_status(
            self, level=None, components={}, inbound_message_rate=0,
            outbound_message_rate=0, submitted_event_rate=0,
            rejected_event_rate=0, delivery_succeeded_rate=0,
            delivery_failed_rate=0, delivery_pending_rate=0):
        '''Generates a status that the http API would respond with, given the
        same parameters'''
        return {
            'status': level,
            'components': components,
            'inbound_message_rate': inbound_message_rate,
            'outbound_message_rate': outbound_message_rate,
            'submitted_event_rate': submitted_event_rate,
            'rejected_event_rate': rejected_event_rate,
            'delivery_succeeded_rate': delivery_succeeded_rate,
            'delivery_failed_rate': delivery_failed_rate,
            'delivery_pending_rate': delivery_pending_rate,
        }

    def assert_status(self, status, **kwargs):
        '''Assert that the current channel status is correct'''
        self.assertEqual(status, self.generate_status(**kwargs))