def start(self, worker): dead_letter_queue = Queue( 'gen_primes_queue_dl_queue', Exchange('gen_primes_queue_dl_exchange', type='direct'), routing_key='gen_primes_queue_dl_routing_key') with worker.app.pool.acquire() as conn: dead_letter_queue.bind(conn).declare()
def start(self, worker): app = worker.app # Declare DLX and DLQ dlx = Exchange(deadletter_exchange_name, type='direct') dead_letter_queue = Queue(deadletter_queue_name, dlx, routing_key=deadletter_routing_key) with worker.app.pool.acquire() as conn: dead_letter_queue.bind(conn).declare()
def start(self, worker): """Declare deadletter queue and exchange on worker pool start.""" dlx_exchange = Exchange(FILES_PROCESSOR_EXCHANGE_DLX, type='direct') files_processor_dlx = Queue( name=FILES_PROCESSOR_QUEUE_DLX, exchange=dlx_exchange, routing_key=FILES_PROCESSOR_QUEUE_DLX ) with worker.app.pool.acquire() as conn: files_processor_dlx.bind(conn).declare()
def start(self, worker): app = worker.app capp = self.celery_app dl_exchange_name = capp.conf.deadletter_prefix + capp.conf.default_exchange dlx = Exchange(dl_exchange_name, type=capp.conf.default_exchange_type) with worker.app.pool.acquire() as conn: for q in self.queues: dl_queue = Queue(capp.conf.deadletter_prefix + q, exchange=dlx, routing_key=capp.conf.deadletter_prefix + q) dl_queue.bind(conn).declare()
def connect(self): """ 'Connects' to the bus. :returns: The same instance. :rtype: commissaire_http.bus.Bus """ if self.connection is not None: self.logger.warn('Bus already connected.') return self self.connection = Connection(self.connection_url) self._channel = self.connection.channel() self._exchange = Exchange( self.exchange_name, type='topic').bind(self._channel) self._exchange.declare() # Create queues self._queues = [] for kwargs in self.qkwargs: queue = Queue(**kwargs) queue.exchange = self._exchange queue = queue.bind(self._channel) self._queues.append(queue) self.logger.debug('Created queue {}'.format(queue.as_dict())) # Create producer for publishing on topics self.producer = Producer(self._channel, self._exchange) self.logger.debug('Bus connection finished') return self
def __init__(self, exchange_name, connection_url, qkwargs): """ Initializes a new Service instance. :param exchange_name: Name of the topic exchange. :type exchange_name: str :param connection_url: Kombu connection url. :type connection_url: str :param qkwargs: One or more dicts keyword arguments for queue creation :type qkwargs: list """ name = self.__class__.__name__ self.logger = logging.getLogger(name) self.logger.debug('Initializing {0}'.format(name)) self.connection = Connection(connection_url) self._channel = self.connection.channel() self._exchange = Exchange(exchange_name, type='topic').bind(self._channel) self._exchange.declare() # Set up queues self._queues = [] for kwargs in qkwargs: queue = Queue(**kwargs) queue.exchange = self._exchange queue = queue.bind(self._channel) self._queues.append(queue) self.logger.debug(queue.as_dict()) # Create producer for publishing on topics self.producer = Producer(self._channel, self._exchange) self.logger.debug('Initializing of {} finished'.format(name))
def test_also_binds_exchange(self): chan = get_conn().channel() b = Queue('foo', self.exchange) assert not b.is_bound assert not b.exchange.is_bound b = b.bind(chan) assert b.is_bound assert b.exchange.is_bound assert b.channel is b.exchange.channel assert b.exchange is not self.exchange
def test_also_binds_exchange(self): chan = get_conn().channel() b = Queue('foo', self.exchange) self.assertFalse(b.is_bound) self.assertFalse(b.exchange.is_bound) b = b.bind(chan) self.assertTrue(b.is_bound) self.assertTrue(b.exchange.is_bound) self.assertIs(b.channel, b.exchange.channel) self.assertIsNot(b.exchange, self.exchange)
def __init__(self, exchange_name, connection_url, qkwargs, config_file=None): """ Initializes a new Service instance. :param exchange_name: Name of the topic exchange. :type exchange_name: str :param connection_url: Kombu connection url. :type connection_url: str :param qkwargs: One or more dicts keyword arguments for queue creation :type qkwargs: list :param config_file: Path to the configuration file location. :type config_file: str or None """ name = self.__class__.__name__ self.logger = logging.getLogger(name) self.logger.debug('Initializing {}'.format(name)) # If we are given no default, use the global one # Read the configuration file self._config_data = read_config_file(config_file, self._default_config_file) if connection_url is None and 'bus_uri' in self._config_data: connection_url = self._config_data.get('bus_uri') self.logger.debug('Using connection_url=%s from config file', connection_url) if exchange_name is None and 'exchange_name' in self._config_data: self.logger.debug('Using exchange_name=%s from config file', exchange_name) exchange_name = self._config_data.get('bus_exchange') self.connection = Connection(connection_url) self._channel = self.connection.default_channel self._exchange = Exchange(exchange_name, type='topic').bind(self._channel) self._exchange.declare() # Set up queues self._queues = [] for kwargs in qkwargs: queue = Queue(**kwargs) queue.exchange = self._exchange queue = queue.bind(self._channel) self._queues.append(queue) self.logger.debug(queue.as_dict()) # Create producer for publishing on topics self.producer = Producer(self._channel, self._exchange) self.logger.debug('Initializing of {} finished'.format(name))
class StoreWorker(ConsumerMixin, LoggingMixin, PrometheusWorkerMixin): def __init__(self, connection, event_store): self.connection = connection self.event_store = event_store self.name = "store worker {}".format(self.event_store.name) self.input_queue = Queue( ('store_events_{}'.format(self.event_store.name)).replace( " ", "_"), exchange=enriched_events_exchange, durable=True) @contextmanager def extra_context(self, connection, channel): # TODO: migration! remove ? logger.info("StoreWorker migration") bound_store_worker_input_queue = self.input_queue.bind(channel) bound_store_worker_input_queue.unbind_from(events_exchange) yield def setup_prometheus_metrics(self): self.stored_events_counter = Counter("stored_events", "Stored events", ["event_type"]) def run(self, *args, **kwargs): self.log_info("run") prometheus_port = kwargs.pop("prometheus_port", None) if prometheus_port: self.start_prometheus_server(prometheus_port) super().run(*args, **kwargs) def get_consumers(self, _, default_channel): return [ Consumer(default_channel, queues=[self.input_queue], accept=['json'], callbacks=[self.do_store_event]) ] def do_store_event(self, body, message): self.log_debug("store event") try: event_type = body['_zentral']['type'] self.event_store.store(body) except Exception: logger.exception("Could add event to store %s", self.event_store.name) save_dead_letter( body, "event store {} error".format(self.event_store.name)) message.reject() else: message.ack() if self.prometheus_setup_done: self.stored_events_counter.labels(event_type).inc()