def get_queue_config(settings): # set the number of connections to Redis pools.set_limit(20) class QueueConfiguration(object): def __init__(self, settings): self.connection = Connection(settings['redis.url']) self.exchange = Exchange(settings['redis.exchange'], type='direct') self.queue = Queue(settings['redis.queue_es_sync'], self.exchange) return QueueConfiguration(settings)
def pool(self): """Broker connection pool: :class:`~@pool`. This attribute is not related to the workers concurrency pool. """ if self._pool is None: self._ensure_after_fork() limit = self.conf.broker_pool_limit pools.set_limit(limit) self._pool = pools.connections[self.connection_for_write()] return self._pool
def pool(self): """Broker connection pool: :class:`~@pool`. Note: This attribute is not related to the workers concurrency pool. """ if self._pool is None: self._ensure_after_fork() limit = self.conf.broker_pool_limit pools.set_limit(limit) self._pool = pools.connections[self.connection_for_write()] return self._pool
def test_set_limit(self): pools.reset() pools.set_limit(34576) limit = pools.get_limit() self.assertEqual(limit, 34576) pools.connections[Connection('memory://')] pools.set_limit(limit + 1) self.assertEqual(pools.get_limit(), limit + 1) limit = pools.get_limit() with self.assertRaises(RuntimeError): pools.set_limit(limit - 1) pools.set_limit(limit - 1, force=True) self.assertEqual(pools.get_limit(), limit - 1) pools.set_limit(pools.get_limit())
def test_set_limit(self): pools.reset() pools.set_limit(34576) limit = pools.get_limit() assert limit == 34576 conn = Connection('memory://') pool = pools.connections[conn] with pool.acquire(): pools.set_limit(limit + 1) assert pools.get_limit() == limit + 1 limit = pools.get_limit() with pytest.raises(RuntimeError): pools.set_limit(limit - 1) pools.set_limit(limit - 1, force=True) assert pools.get_limit() == limit - 1 pools.set_limit(pools.get_limit())
from kombu import pools pools.set_limit(1024) # default is 200 from dino.config import ConfigKeys from dino.endpoint.base import BasePublisher from kombu import Exchange from kombu import Queue from kombu import Connection import logging logger = logging.getLogger(__name__) class AmqpPublisher(BasePublisher): def __init__(self, env, is_external_queue: bool): super().__init__(env, is_external_queue, queue_type='amqp', logger=logger) conf = env.config queue_host = conf.get(ConfigKeys.HOST, domain=self.domain_key, default='') if queue_host is None or len(queue_host.strip()) == 0: return queue_port = conf.get(ConfigKeys.PORT, domain=self.domain_key, default=None) queue_vhost = conf.get(ConfigKeys.VHOST, domain=self.domain_key, default=None) queue_user = conf.get(ConfigKeys.USER, domain=self.domain_key, default=None) queue_pass = conf.get(ConfigKeys.PASSWORD, domain=self.domain_key, default=None) queue_host = ';'.join(['amqp://%s' % host for host in queue_host.split(';')]) queue_exchange = '%s_%s' % (
A base class implementing AM service architecture and its requirements for a synchronous publisher Endpoint. """ import logging import os import socket from typing import Any, Dict, List, Optional import cbor from kombu import Connection, Exchange from kombu.pools import producers, set_limit from eventail.gelf import GELF JSON_MODEL = Dict[str, Any] LOGGER = logging.getLogger("sync_endpoint") set_limit( 2) # two connections are more than enough; `async_service` uses just one. class Endpoint: """A synchronous publishing endpoint for AlloMedia EDA.""" ID = os.getpid() HOSTNAME = socket.gethostname() EVENT_EXCHANGE = "events" CONFIGURATION_EXCHANGE = "configurations" LOG_EXCHANGE = "logs" EVENT_EXCHANGE_TYPE = "topic" CONFIGURATION_EXCHANGE_TYPE = "topic" LOG_EXCHANGE_TYPE = "topic" def __init__(
def setup_pool_limit(self, **kwargs): from kombu import pools from celery import current_app as celery limit = kwargs.get('limit', celery.conf.BROKER_POOL_LIMIT) pools.set_limit(limit if self.needs_eventlet else 1) celery._pool = pools.connections[celery.broker_connection()]
def setup_pool_limit(self, **kwargs): from kombu import pools from celery import current_app as celery limit = kwargs.get("limit", celery.conf.BROKER_POOL_LIMIT) pools.set_limit(limit if self.needs_eventlet else 1) celery._pool = pools.connections[celery.broker_connection()]
def test_resource_resizing(self): p = pools.producers[Connection('memory://localhost:777')] c = pools.connections[Connection('memory//localhost:678')] def in_use(pool): return len(pool._resource.queue) + len(pool._dirty) pools.set_forced_resize(True) assert p.forced_resize is True pools.set_limit(10) assert in_use(p) == 10 assert in_use(p) == 10 assert p.limit == 10 assert c.limit == 10 assert p.forced_resize is True pools.set_limit(1) assert p.forced_resize is True assert in_use(p) == 1 pools.set_limit(10) assert in_use(p) == 10 pools.set_limit(1) pools.set_limit(0) assert in_use(p) == 0 with p.acquire(): pools.set_limit(5) pools.set_limit(0) with c.acquire(): pools.set_limit(20)
def process_task(self, body, message): try: x = get_response_time.delay(body['url']) logger.info("%s: %s, %s" % (datetime.datetime.today(),x.id,body)) except Exception as exc: logger.error('task raised exception: %r', exc) message.ack() if __name__ == '__main__': from kombu import Connection from kombu.pools import connections from kombu import pools from kombu.utils.debug import setup_logging pools.set_limit(500) # setup root logger setup_logging(loglevel='INFO', loggers=['']) megaphone = Register() connection = Connection(config['consumer']['connection'], heartbeat=int(config['consumer']['heartbeat'])) connection.heartbeat_check(rate=1) #with Connection('amqp://*****:*****@localhost:5672//') as conn: with connections[connection].acquire(block=True, timeout=60) as conn: try: megaphone.add() worker = Worker(conn) worker.run() except KeyboardInterrupt: conn.release() megaphone.remove()