def run(self): print 'pypo Pusher' if self.action == 'update_schedule': print 'update_schedule!!' credentials = pika.PlainCredentials(MQ_USER, MQ_PASS) connection = pika.BlockingConnection(pika.ConnectionParameters(MQ_HOST, 5672, '/airtime', credentials)) channel = connection.channel() channel.queue_declare(queue='pypo-fetch', durable=True) message = { 'schedule': { 'media': {} }, 'event_type': 'update_schedule' } import json message = json.dumps(message) message = 'hallo' channel.basic_publish(exchange='airtime-pypo', routing_key='pypo-fetch', body=message) channel.close() connection.close() if self.action == 'update_schedule_kombu': print 'update_schedule!!' exchange = Exchange("airtime-pypo", "direct", durable=True, auto_delete=True) queue = Queue("pypo-fetch", exchange=exchange, key="foo", durable=True) connection = BrokerConnection(MQ_HOST, MQ_USER, MQ_PASS, MQ_VHOST) channel = connection.channel() simple_queue = SimpleQueue(channel, queue) message = { 'schedule': { 'media': {} }, 'event_type': 'update_schedule' } print simple_queue.qsize() print 'do:' producer = Producer(channel, exchange=exchange, routing_key=None, serializer="json") producer.publish(message, routing_key='pypo-fetch') print simple_queue.qsize() channel.close()
class KombuQueueReceiver(KombuMessageQueue): def __init__(self, dc, exchange, exchange_type, queue, binding_key, **kwargs): if self.class_name is None: self.class_name = "KombuQueueReceiver" KombuMessageQueue.__init__(self, dc, exchange, exchange_type, queue, binding_key, **kwargs) self.queue_obj = SimpleQueue( self.channel, self.Queue_obj, ) self.uuid = gen_uuid() if not self.queue: raise RuntimeError("queue is required") def start_consuming(self): pass def dump_cache(self): return self.queue_obj.clear() def stop_consuming(self): self.queue_obj.close() @recover_connect def get_cached_number(self): return self.queue_obj.qsize() @recover_connect def get_nowait(self): try: log_message = self.queue_obj.get_nowait() except Empty: return None log_message.ack() # p = log_message.payload # deserialized data. # if isinstance(p, buffer): # p = unicode(p) return log_message.body @recover_connect def get(self, block=True, timeout=None): # logger.debug("Enter Message get callback method..") try: log_message = self.queue_obj.get(block=True, timeout=timeout) # logger.debug("get return : %s, dir: %s, type: %s", log_message, dir(log_message), type(log_message)) debug_message(log_message) except Empty: logger.debug("Empty error when get @todo infos..") raise Empty("{cn}[{id:#x}] is raise Empty when fetch.") # p = log_message.payload # deserialized data. log_message.ack() # remove message from queue # if isinstance(p, buffer): # p = unicode(p) return log_message.body def __del__(self): if not self.closed: logger.debug( "=============== {class_name}[{id:#x}] Enter closing... ". format(class_name=self.class_name, id=id(self))) self.close() def close(self): logger.debug( "{cn}[{id:#x}] is calling KombuQueueReceiver close method".format( cn=self.class_name, id=id(self))) try: logger.debug( "{cn}[{id:#x}] is about to closing.. Queue size: {qsize}". format(cn=self.class_name, id=id(self), qsize=self.get_cached_number())) # clean Rabbitmq Queue self.dump_cache() logger.debug( "{cn}[{id:#x}] is cleared. Queue size: {qsize}".format( cn=self.class_name, id=id(self), qsize=self.get_cached_number())) # stop all active consumers self.stop_consuming() # how to measure stop all consumers @todo # delete Rabbitmq Queue r = self.Queue_obj.delete() logger.debug( "{cn}[{id:#x}] delete Queue[{qid:#x}] return: {r}".format( cn=self.class_name, id=id(self), qid=id(self.Queue_obj), r=r)) except ChannelError as e: if '404' not in e.message: logger.error("Error when {cn}[{id:#x}] close: {msg}".format( cn=self.class_name, id=id(self), msg=traceback.format_exc())) KombuMessageQueue.close(self)