def ready(self): super(MessagingAppConfig, self).ready() from kombu import pools from kombu import BrokerConnection global connections global producers global url global task_serializer global broker_transport_options global broker_socket_timeout global connection connections = pools.Connections(limit=100) producers = pools.Producers(limit=connections.limit) # run in-memory if broker is not available # see producer code for synchronous queue url = getattr(settings, 'BROKER_URL', 'memory://') task_serializer = getattr(settings, 'CELERY_TASK_SERIALIZER', 'pickle') broker_transport_options = getattr(settings, 'BROKER_TRANSPORT_OPTIONS', {'socket_timeout': 10}) broker_socket_timeout = getattr(broker_transport_options, 'socket_timeout', 10) connection = BrokerConnection(url, connect_timeout=broker_socket_timeout)
def __init__(self, connection='amqp:///', name=None, logger=None, limit=None): """Initialization of Client instance :param connection: connection for broker :type connection: str, None, kombu.connections.Connection, dict """ self.connection = self._get_connection(connection) self.exchanges = {} if name is None: try: name = '<client: {}>'.format(self.connection.as_uri()) except: # pragma: no cover # Errors with filesystem transport name = '<client: {}>'.format(self.connection.transport_cls) if logger is None: logger = get_logger(__name__) self.logger = InstanceLogger(self, logger) self.name = name self.logger.debug('%s built', self.name) if limit is None: # Set limit as global kombu limit. limit = pools.get_limit() self.limit = limit self.connections = pools.Connections(self.limit)
def purge_queue(self, name): """Remove all messages from queue :param name: name of queue :type name: str """ connections = pools.Connections(self.limit) with connections[self.connection].acquire() as conn: Queue(name=name, channel=conn).purge() self.logger.debug('Queue "%s" was purged', name)
def __init__(self, config): # type: (dict) self.config = config self.name = self.config.name self.get_conn_class_func = config.get_conn_class_func self.name = config.name self.conn = self.get_conn_class_func( 'out/{}'.format(self.config.name))(self.config.conn_url, frame_max=self.config.frame_max) # Kombu uses a global object to keep all connections in (pools.connections) but we cannot use it # because multiple channels or outgoing connections may be using the same definition, # thus we need to create a new connection group for each _AMQPProducers object. connections = pools.register_group(pools.Connections(limit=self.config.pool_size)) class _Producers(pools.Producers): def create(self, connection, limit): return pools.ProducerPool(connections[connection], limit=limit) self.pool = _Producers(limit=self.config.pool_size)
# # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ######################################################################### from django.conf import settings from kombu import pools from kombu import BrokerConnection connections = pools.Connections(limit=100) producers = pools.Producers(limit=connections.limit) # run in-memory if broker is not available # see producer code for synchronous queue url = getattr(settings, 'BROKER_URL', 'memory://') task_serializer = getattr(settings, 'CELERY_TASK_SERIALIZER', 'pickle') broker_transport_options = getattr(settings, 'BROKER_TRANSPORT_OPTIONS', {'socket_timeout': 10}) broker_socket_timeout = getattr(broker_transport_options, 'socket_timeout', 10) connection = BrokerConnection(url, connect_timeout=broker_socket_timeout)