def setUp(self): self.persistence_helper = self.add_helper(PersistenceHelper()) self.worker_helper = self.add_helper(WorkerHelper()) config = { 'deadline': 30, 'redis_manager': { 'key_prefix': 'heartbeats', 'db': 5, 'FAKE_REDIS': True, }, 'monitored_systems': { 'system-1': { 'system_name': 'system-1', 'system_id': 'system-1', 'workers': { 'twitter_transport': { 'name': 'twitter_transport', 'min_procs': 2, } } } } } self.worker = yield self.worker_helper.get_worker( monitor.HeartBeatMonitor, config, start=False)
def setUp(self): self.persistence_helper = self.add_helper( PersistenceHelper(use_riak=True)) if riak_import_error is not None: import_skip(riak_import_error, 'riak') self.manager = self.persistence_helper.get_riak_manager() self.msg_helper = self.add_helper(MessageHelper())
def setUp(self): self.persistence_helper = self.add_helper( PersistenceHelper(use_riak=True)) # Create and stash a riak manager to clean up afterwards, because we # don't get access to the one inside the middleware. self.persistence_helper.get_riak_manager()
def get_worker(self, config=None): '''Get a new MessageForwardingWorker with the provided config''' if config is None: config = {} self.app_helper = ApplicationHelper(MessageForwardingWorker) yield self.app_helper.setup() self.addCleanup(self.app_helper.cleanup) persistencehelper = PersistenceHelper() yield persistencehelper.setup() self.addCleanup(persistencehelper.cleanup) config = conjoin( persistencehelper.mk_config({ 'transport_name': 'testtransport', 'mo_message_url': self.url.decode('utf-8'), 'inbound_ttl': 60, 'outbound_ttl': 60 * 60 * 24 * 2, 'metric_window': 1.0, }), config) worker = yield self.app_helper.get_application(config) returnValue(worker)
def setUp(self): self.persistence_helper = self.add_helper( PersistenceHelper(use_riak=True)) try: from vumi.components.message_store import MessageStore except ImportError, e: import_skip(e, 'riak')
def setUp(self): self.persistence_helper = self.add_helper( PersistenceHelper(use_riak=True)) try: from vumi.components.message_store_api import ( MatchResource, MessageStoreAPIWorker) except ImportError, e: import_skip(e, 'riak')
def setUp(self): self.persistence_helper = self.add_helper( PersistenceHelper(use_riak=True)) try: from vumi.components.message_store_resource import ( MessageStoreResourceWorker) except ImportError, e: import_skip(e, 'riakasaurus', 'riakasaurus.riak')
def get_redis(self): '''Creates and returns a redis manager''' if hasattr(self, 'redis'): returnValue(self.redis) persistencehelper = PersistenceHelper() yield persistencehelper.setup() self.redis = yield persistencehelper.get_redis_manager() self.addCleanup(persistencehelper.cleanup) returnValue(self.redis)
def setUp(self): self.persistence_helper = self.add_helper( PersistenceHelper(use_riak=True)) self.worker_helper = self.add_helper(WorkerHelper()) self.msg_helper = self.add_helper(MessageHelper()) riak, redis = yield self.create_managers() self.operational_store = OperationalMessageStore(riak, redis) self.batch_manager = MessageStoreBatchManager(riak, redis)
def setUp(self): self.persistence_helper = self.add_helper( PersistenceHelper(use_riak=True)) self.manager = self.persistence_helper.get_riak_manager() self.add_cleanup(self.manager.close_manager) self.redis = yield self.persistence_helper.get_redis_manager() self.batch_manager = MessageStoreBatchManager(self.manager, self.redis) self.backend = self.batch_manager.riak_backend self.bi_cache = self.batch_manager.batch_info_cache
def setUp(self): self.workerhelper = WorkerHelper() self.addCleanup(self.workerhelper.cleanup) self.persistencehelper = PersistenceHelper() yield self.persistencehelper.setup() self.addCleanup(self.persistencehelper.cleanup) self.messagehelper = MessageHelper() self.addCleanup(self.messagehelper.cleanup)
def setUp(self): self.persistence_helper = self.add_helper(PersistenceHelper()) self.redis = yield self.persistence_helper.get_redis_manager() self.tagpool = TagpoolManager(self.redis) site = Site(TagpoolApiServer(self.tagpool)) self.server = yield reactor.listenTCP(0, site, interface='127.0.0.1') self.add_cleanup(self.server.loseConnection) addr = self.server.getHost() self.proxy = Proxy("http://%s:%d/" % (addr.host, addr.port)) yield self.setup_tags()
def create_channel_config(self, **kw): self.persistencehelper = PersistenceHelper() yield self.persistencehelper.setup() self.addCleanup(self.persistencehelper.cleanup) config = deepcopy(self.default_channel_config) config.update(kw) channel_config = self.persistencehelper.mk_config(config) channel_config['redis'] = channel_config['redis_manager'] returnValue(JunebugConfig(channel_config))
def setUp(self): self.persistence_helper = self.add_helper( PersistenceHelper(use_riak=True)) self.manager = self.persistence_helper.get_riak_manager() self.add_cleanup(self.manager.close_manager) self.redis = yield self.persistence_helper.get_redis_manager() self.store = OperationalMessageStore(self.manager, self.redis) self.backend = self.store.riak_backend self.bi_cache = self.store.batch_info_cache self.msg_helper = self.add_helper(MessageHelper())
def setUp(self): self.clock = Clock() self.persistence_helper = self.add_helper(PersistenceHelper()) self.redis = yield self.persistence_helper.get_redis_manager() self.fake_smsc = FakeSMSC(auto_accept=False) self.default_config = { 'transport_name': 'sphex_transport', 'twisted_endpoint': self.fake_smsc.endpoint, 'system_id': 'system_id', 'password': '******', }
def __init__(self, dispatcher_class, use_riak=False, **msg_helper_args): self.dispatcher_class = dispatcher_class self.worker_helper = WorkerHelper() self.persistence_helper = PersistenceHelper(use_riak=use_riak) self.msg_helper = MessageHelper(**msg_helper_args) self.dispatch_helper = MessageDispatchHelper(self.msg_helper, self.worker_helper) # Proxy methods from our helpers. generate_proxies(self, self.msg_helper) generate_proxies(self, self.worker_helper) generate_proxies(self, self.dispatch_helper)
def setUp(self): self.persistence_helper = self.add_helper( PersistenceHelper(use_riak=True)) riak_manager = self.persistence_helper.get_riak_manager() self.account_store = AccountStore(riak_manager) self.user = yield self.account_store.new_user(u'testuser') # Some old contact proxies for testing migrations. per_account_manager = riak_manager.sub_manager(self.user.key) self.contacts_vnone = per_account_manager.proxy(ContactVNone) self.contacts_v1 = per_account_manager.proxy(ContactV1) self.contacts_v2 = per_account_manager.proxy(Contact)
def setUp(self): self.persistence_helper = self.add_helper(PersistenceHelper()) redis = yield self.persistence_helper.get_redis_manager() self.window_id = 'window_id' # Patch the count_waiting so we can fake the race condition self.clock = Clock() self.patch(WindowManager, 'count_waiting', lambda _, window_id: 100) self.wm = WindowManager(redis, window_size=10, flight_lifetime=10) self.add_cleanup(self.wm.stop) yield self.wm.create_window(self.window_id) self.redis = self.wm.redis
def setUp(self): self.persistence_helper = self.add_helper(PersistenceHelper()) redis = yield self.persistence_helper.get_redis_manager() self.window_id = 'window_id' # Patch the clock so we can control time self.clock = Clock() self.patch(WindowManager, 'get_clock', lambda _: self.clock) self.wm = WindowManager(redis, window_size=10, flight_lifetime=10) self.add_cleanup(self.wm.stop) yield self.wm.create_window(self.window_id) self.redis = self.wm.redis
def __init__(self, is_sync=False, use_riak=True): self.is_sync = is_sync self._patch_helper = PatchHelper() generate_proxies(self, self._patch_helper) self._persistence_helper = PersistenceHelper(use_riak=use_riak, is_sync=is_sync) self.broker = None # Will be replaced by the first worker_helper. self._worker_helpers = {} self._users_created = 0 self._user_helpers = {} self._vumi_api = None generate_proxies(self, self._persistence_helper)
def __init__(self, application_class, use_riak=False, **msg_helper_args): self.application_class = application_class self.persistence_helper = PersistenceHelper(use_riak=use_riak) self.msg_helper = MessageHelper(**msg_helper_args) self.transport_name = self.msg_helper.transport_name self.worker_helper = WorkerHelper(self.msg_helper.transport_name) self.dispatch_helper = MessageDispatchHelper(self.msg_helper, self.worker_helper) # Proxy methods from our helpers. generate_proxies(self, self.msg_helper) generate_proxies(self, self.worker_helper) generate_proxies(self, self.dispatch_helper) generate_proxies(self, self.persistence_helper)
def setUp(self): self.persistence_helper = self.add_helper( PersistenceHelper(use_riak=True, is_sync=False)) self.riak_manager = self.persistence_helper.get_riak_manager() self.old_model = self.riak_manager.proxy(SimpleModelOld) self.model = self.riak_manager.proxy(SimpleModel) self.model_cls_path = fqpn(SimpleModel) self.expected_bucket_prefix = "bucket" self.default_args = [ "-m", self.model_cls_path, "-b", self.expected_bucket_prefix, ]
def setUp(self): self.persistence_helper = self.add_helper( PersistenceHelper(use_riak=True, is_sync=True)) self.riak_manager = self.persistence_helper.get_riak_manager() self.model = self.riak_manager.proxy(SimpleModel) self.model_cls_path = ".".join( [SimpleModel.__module__, SimpleModel.__name__]) self.expected_bucket_prefix = "bucket" self.default_args = [ "-m", self.model_cls_path, "-b", self.expected_bucket_prefix, ]
def setUp(self): super(TestGoLoggingResource, self).setUp() self.persistence_helper = self.add_helper(PersistenceHelper()) self.parent_redis = yield self.persistence_helper.get_redis_manager() self.redis = self.parent_redis.sub_manager( LogManager.DEFAULT_SUB_STORE) yield self.create_resource({ 'redis_manager': { 'FAKE_REDIS': self.parent_redis, 'key_prefix': self.parent_redis.get_key_prefix(), } }) self.user_account = Mock(key="campaign-1") self.conversation = Mock(key="conv-1", user_account=self.user_account) self.resource.app_worker.conversation = self.conversation
def setUp(self): self.persistence_helper = self.add_helper( PersistenceHelper(use_riak=True, is_sync=False)) self.msg_helper = self.add_helper(MessageHelper()) # Since we're never loading the actual objects, we can't detect # tombstones. Therefore, each test needs its own bucket prefix. config = self.persistence_helper.mk_config({})["riak_manager"].copy() config["bucket_prefix"] = "%s-%s" % ( uuid4().hex, config["bucket_prefix"]) self.riak_manager = self.persistence_helper.get_riak_manager(config) self.redis_manager = yield self.persistence_helper.get_redis_manager() self.mdb = MessageStore(self.riak_manager, self.redis_manager) self.expected_bucket_prefix = "bucket" self.default_args = [ "-b", self.expected_bucket_prefix, ]
def setUp(self): self.persistence_helper = self.add_helper( PersistenceHelper(use_riak=True, is_sync=False)) self.msg_helper = self.add_helper(MessageHelper()) # Since we're never loading the actual objects, we can't detect # tombstones. Therefore, each test needs its own bucket prefix. self.expected_bucket_prefix = "bucket-%s" % (uuid4().hex,) self.riak_manager = self.persistence_helper.get_riak_manager({ "bucket_prefix": self.expected_bucket_prefix, }) self.add_cleanup(self.riak_manager.close_manager) self.redis_manager = yield self.persistence_helper.get_redis_manager() self.mdb = MessageStore(self.riak_manager, self.redis_manager) self.default_args = [ "-b", self.expected_bucket_prefix, ]
def setUp(self): self.persistence_helper = self.add_helper( PersistenceHelper(use_riak=True, is_sync=False)) # Since we're never loading the actual objects, we can't detect # tombstones. Therefore, each test needs its own bucket prefix. self.expected_bucket_prefix = "bucket-%s" % (uuid4().hex,) self.riak_manager = self.persistence_helper.get_riak_manager({ "bucket_prefix": self.expected_bucket_prefix, }) self.add_cleanup(self.riak_manager.close_manager) self.model = self.riak_manager.proxy(SimpleModel) self.model_cls_path = ".".join([ SimpleModel.__module__, SimpleModel.__name__]) self.default_args = [ "-m", self.model_cls_path, "-b", self.expected_bucket_prefix, ]
def __init__(self, transport_class, use_riak=False, **msg_helper_args): self.transport_class = transport_class self.persistence_helper = PersistenceHelper(use_riak=use_riak) self.msg_helper = MessageHelper(**msg_helper_args) self.transport_name = self.msg_helper.transport_name self.worker_helper = WorkerHelper(connector_name=self.transport_name, status_connector_name="%s.status" % (self.transport_name, )) self.dispatch_helper = MessageDispatchHelper(self.msg_helper, self.worker_helper) # Proxy methods from our helpers. generate_proxies(self, self.msg_helper) generate_proxies(self, self.worker_helper) generate_proxies(self, self.dispatch_helper) generate_proxies(self, self.persistence_helper)
def get_worker(self, config=None): '''Get a new ChannelStatusWorker with the provided config''' if config is None: config = {} app_helper = ApplicationHelper(ChannelStatusWorker) yield app_helper.setup() self.addCleanup(app_helper.cleanup) persistencehelper = PersistenceHelper() yield persistencehelper.setup() self.addCleanup(persistencehelper.cleanup) config = conjoin(persistencehelper.mk_config({ 'channel_id': 'testchannel', }), config) worker = yield app_helper.get_application(config) returnValue(worker)
def setUp(self): self.persistence_helper = self.add_helper( PersistenceHelper(is_sync=False)) self.redis = yield self.persistence_helper.get_redis_manager() self.sm = SessionManager(self.redis)