class Producer: def __init__(self): self._connection = BlockingConnection(ConnectionParameters(HOST)) def produce(self, message: str, queue: str) -> None: channel = self.get_channel() channel.exchange_declare( exchange=queue, exchange_type='fanout' ) channel.basic_publish( exchange=queue, routing_key='', body=message ) print(f'Message: {message} published on queue: {queue}') def get_channel(self): return self._connection.channel() def open(self) -> None: self._connection = BlockingConnection(ConnectionParameters(HOST)) def close(self) -> None: self._connection.close()
class Manager: def __init__(self): print "AMQP init" self.connParams = ConnectionParameters(host = "localhost", credentials = PlainCredentials("guest", "guest")); self.connectionInit = Connection(parameters = self.connParams) self.connectionInit.channel().exchange_declare(exchange = "ovress", type = "fanout", durable = True, auto_delete = False) self.connectionInit.close(); self.setupRequestQueue(); self.setupResponseQueue(); def setupRequestQueue(self): self.connectionRequests = Connection(parameters = self.connParams); self.channelRequests = self.connectionRequests.channel(); self.requestQueue = str("requests-" + str(uuid())) self.channelRequests.queue_declare(queue = self.requestQueue, exclusive = True, auto_delete = True, durable = False) self.channelRequests.queue_bind(queue = self.requestQueue, exchange = "ovress", routing_key = "*") Thread(target = self.startConsumeRequests, name = "amqpReqQ").start() def startConsumeRequests(self): self.channelRequests.basic_consume(self.onRequest, queue = self.requestQueue) self.channelRequests.start_consuming() print "stopping requests" def setupResponseQueue(self): self.connectionResponses = Connection(parameters = self.connParams); self.channelResponses = self.connectionResponses.channel(); self.responseQueue = str('responses-' + str(uuid())) self.channelResponses.queue_declare(exclusive = True, queue = self.responseQueue, auto_delete = True, durable = False) Thread(target = self.startConsumeResponses, name = "amqpRespQ").start(); def startConsumeResponses(self): self.channelResponses.basic_consume(self.onResponse, queue = self.responseQueue) self.channelResponses.start_consuming() print "stopping responses" def onRequest(self, channel, delivery, properties, body): print "request", body self.channelRequests.basic_ack(delivery.delivery_tag); def onResponse(self, channel, delivery, properties, body): print "response", body self.channelRequests.basic_ack(delivery.delivery_tag); def sendHello(self): self.send(MessageHello()) def send(self, baseMessage): properties = BasicProperties(reply_to = self.responseQueue) self.channelRequests.basic_publish("ovress", "route-all-the-things", baseMessage.toJson(), properties = properties) def stop(self): self.channelRequests.close() self.connectionRequests.close() self.channelResponses.close() self.connectionResponses.close()
class Client(object): SERIALIZERS = { 'json': 'application/json', 'pickle': 'application/python-pickle', 'text': 'text/plain', } _CHNUM = 1 def __init__(self, host='127.0.0.1', port=5672, user=None, password=None, vhost='/'): if user: credentials = PlainCredentials(username=user, password=password) else: credentials = None self.__conn_params = ConnectionParameters(host=host, port=port, virtual_host=vhost, credentials=credentials) self.callbacks_hash = {} self._res_queue = "crew.master.%s" % uuid() self.__active = True self._connect() def parse_body(self, body, props): content_type = getattr(props, 'content_type', 'text/plain') if props.content_encoding == 'gzip': body = zlib.decompress(body) if 'application/json' in content_type: return json.loads(body) elif 'application/python-pickle' in content_type: return pickle.loads(body) def _connect(self): log.debug("Starting new connection to amqp://%s:%d/%s", self.__conn_params.host, self.__conn_params.port, self.__conn_params.virtual_host) self.connection = BlockingConnection(self.__conn_params) self._CHNUM += 1 log.debug("Opening channel %d", self._CHNUM) self.channel = self.connection.channel(self._CHNUM) self.channel.exchange_declare("crew.DLX", auto_delete=True, exchange_type="headers") self.channel.queue_declare(queue="crew.DLX", auto_delete=False) self.channel.queue_declare(queue=self._res_queue, exclusive=True, durable=False, auto_delete=True, arguments={"x-message-ttl": 60000}) self.channel.basic_qos(prefetch_count=1) self.channel.queue_bind( "crew.DLX", "crew.DLX", arguments={"x-original-sender": self._res_queue}) self.channel.basic_consume(self._on_dlx_received, queue="crew.DLX") self.channel.basic_consume(self._on_result, queue=self._res_queue) self.__connected = True t = Thread(target=self._consumer) t.daemon = True t.start() while not self.__connected: time.sleep(0.0001) def _on_result(self, channel, method, props, body): log.debug('PikaCient: Result message received, tag #%i len %d', method.delivery_tag, len(body)) correlation_id = getattr(props, 'correlation_id', None) try: if correlation_id not in self.callbacks_hash: log.info('Got result for task "%d", but no has callback', correlation_id) else: cb = self.callbacks_hash.pop(correlation_id) body = self.parse_body(body, props) if isinstance(body, Exception): cb.set_exception(body) else: cb.set_result(body, headers=props.headers) return except Exception as e: log.exception(e) finally: channel.basic_ack(delivery_tag=method.delivery_tag) def _on_dlx_received(self, channel, method, props, body): correlation_id = getattr(props, 'correlation_id', None) if correlation_id in self.callbacks_hash: cb = self.callbacks_hash.pop(correlation_id) try: dl = props.headers['x-death'][0] body = ExpirationError( "Dead letter received. Reason: {0}".format( dl.get('reason'))) body.reason = dl.get('reason') body.time = dl.get('time') body.expiration = int(dl.get('original-expiration')) / 1000 cb.set_exception(body) finally: channel.basic_ack(delivery_tag=method.delivery_tag) else: log.error("Method callback %s is not found", correlation_id) channel.basic_ack(delivery_tag=method.delivery_tag) return def _consumer(self): while self.__active: try: self.channel.start_consuming() except: self.__connected = False while not self.__connected: try: self._connect() except: time.sleep(5) def close(self): self.__active = False self.channel.close() self.connection.close() def call(self, channel, data=None, serializer='pickle', headers=None, persistent=True, priority=0, expiration=86400, timestamp=None, gzip=None, gzip_level=6, set_cid=None, routing_key=None): assert priority <= 255 assert isinstance(expiration, int) and expiration > 0 headers = headers or {} qname = "crew.tasks.%s" % channel serializer, content_type = self.get_serializer(serializer) if set_cid: cid = str(set_cid) if cid in self.callbacks_hash: raise DuplicateTaskId( 'Task ID: {0} already exists'.format(cid)) else: cid = "{0}.{1}".format(channel, uuid()) data = serializer(data) if gzip is None and data is not None and len(data) > 1024 * 32: gzip = True data = zlib.compress(data, gzip_level) if gzip else data headers.update({"x-original-sender": self._res_queue}) props = pika.BasicProperties( content_encoding='gzip' if gzip else 'plain', content_type=content_type, reply_to=self._res_queue if not routing_key else routing_key, correlation_id=cid, headers=headers, timestamp=int(time.time()), delivery_mode=2 if persistent else None, priority=priority, expiration="%d" % (expiration * 1000), ) callback = Result() self.callbacks_hash[props.correlation_id] = callback self.channel.basic_publish(exchange='', routing_key=qname, properties=props, body=data) return callback def get_serializer(self, name): assert name in self.SERIALIZERS if name == 'pickle': return (lambda x: pickle.dumps(x, protocol=2), self.SERIALIZERS[name]) elif name == 'json': return (json.dumps, self.SERIALIZERS[name]) elif name == 'text': return lambda x: str(x).encode('utf-8')
class Client(object): SERIALIZERS = { 'json': 'application/json', 'pickle': 'application/python-pickle', 'text': 'text/plain', } _CHNUM = 1 def __init__(self, host='127.0.0.1', port=5672, user=None, password=None, vhost='/'): if user: credentials = PlainCredentials(username=user, password=password) else: credentials = None self.__conn_params = ConnectionParameters( host=host, port=port, virtual_host=vhost, credentials=credentials ) self.callbacks_hash = {} self._res_queue = "crew.master.%s" % uuid() self.__active = True self._connect() def parse_body(self, body, props): content_type = getattr(props, 'content_type', 'text/plain') if props.content_encoding == 'gzip': body = zlib.decompress(body) if 'application/json' in content_type: return json.loads(body) elif 'application/python-pickle' in content_type: return pickle.loads(body) def _connect(self): log.debug( "Starting new connection to amqp://%s:%d/%s", self.__conn_params.host, self.__conn_params.port, self.__conn_params.virtual_host ) self.connection = BlockingConnection(self.__conn_params) self._CHNUM += 1 log.debug("Opening channel %d", self._CHNUM) self.channel = self.connection.channel(self._CHNUM) self.channel.exchange_declare("crew.DLX", auto_delete=True, exchange_type="headers") self.channel.queue_declare(queue="crew.DLX", auto_delete=False) self.channel.queue_declare( queue=self._res_queue, exclusive=True, durable=False, auto_delete=True, arguments={"x-message-ttl": 60000} ) self.channel.basic_qos(prefetch_count=1) self.channel.queue_bind("crew.DLX", "crew.DLX", arguments={"x-original-sender": self._res_queue}) self.channel.basic_consume(self._on_dlx_received, queue="crew.DLX") self.channel.basic_consume(self._on_result, queue=self._res_queue) self.__connected = True t = Thread(target=self._consumer) t.daemon = True t.start() while not self.__connected: time.sleep(0.0001) def _on_result(self, channel, method, props, body): log.debug('PikaCient: Result message received, tag #%i len %d', method.delivery_tag, len(body)) correlation_id = getattr(props, 'correlation_id', None) try: if correlation_id not in self.callbacks_hash: log.info('Got result for task "%d", but no has callback', correlation_id) else: cb = self.callbacks_hash.pop(correlation_id) body = self.parse_body(body, props) if isinstance(body, Exception): cb.set_exception(body) else: cb.set_result(body, headers=props.headers) return except Exception as e: log.exception(e) finally: channel.basic_ack(delivery_tag=method.delivery_tag) def _on_dlx_received(self, channel, method, props, body): correlation_id = getattr(props, 'correlation_id', None) if correlation_id in self.callbacks_hash: cb = self.callbacks_hash.pop(correlation_id) try: dl = props.headers['x-death'][0] body = ExpirationError( "Dead letter received. Reason: {0}".format(dl.get('reason')) ) body.reason = dl.get('reason') body.time = dl.get('time') body.expiration = int(dl.get('original-expiration')) / 1000 cb.set_exception(body) finally: channel.basic_ack(delivery_tag=method.delivery_tag) else: log.error("Method callback %s is not found", correlation_id) channel.basic_ack(delivery_tag=method.delivery_tag) return def _consumer(self): while self.__active: try: self.channel.start_consuming() except: self.__connected = False while not self.__connected: try: self._connect() except: time.sleep(5) def close(self): self.__active = False self.channel.close() self.connection.close() def call(self, channel, data=None, serializer='pickle', headers={}, persistent=True, priority=0, expiration=86400, timestamp=None, gzip=None, gzip_level=6, set_cid=None, routing_key=None): assert priority <= 255 assert isinstance(expiration, int) and expiration > 0 qname = "crew.tasks.%s" % channel serializer, content_type = self.get_serializer(serializer) if set_cid: cid = str(set_cid) if cid in self.callbacks_hash: raise DuplicateTaskId('Task ID: {0} already exists'.format(cid)) else: cid = "{0}.{1}".format(channel, uuid()) data = serializer(data) if gzip is None and data is not None and len(data) > 1024 * 32: gzip = True data = zlib.compress(data, gzip_level) if gzip else data headers.update({"x-original-sender": self._res_queue}) props = pika.BasicProperties( content_encoding='gzip' if gzip else 'plain', content_type=content_type, reply_to=self._res_queue if not routing_key else routing_key, correlation_id=cid, headers=headers, timestamp=int(time.time()), delivery_mode=2 if persistent else None, priority=priority, expiration="%d" % (expiration * 1000), ) callback = Result() self.callbacks_hash[props.correlation_id] = callback self.channel.basic_publish( exchange='', routing_key=qname, properties=props, body=data ) return callback def get_serializer(self, name): assert name in self.SERIALIZERS if name == 'pickle': return (lambda x: pickle.dumps(x, protocol=2), self.SERIALIZERS[name]) elif name == 'json': return (json.dumps, self.SERIALIZERS[name]) elif name == 'text': return lambda x: str(x).encode('utf-8')
class RabbitProducer(object): """ A RabbitMQ consumer that provides data in batch. If channel is given, host and port are ignored. If channel is not given, host and port are used to create a new channel. Args: channel (BlockingChannel): the channel instance with ack enabled. queue (str): the name of a queue. """ def __init__( self, queue: str, host: str = "localhost", port: int = 5672, channel: BlockingChannel = None, ): if not _has_pika: raise RuntimeError("Please install the python module: pika") if channel is None: self.conn = BlockingConnection( pika.ConnectionParameters(host=host, port=port)) self.channel = self.conn.channel() else: self.conn = None self.channel = channel self.queue = queue self.channel.queue_declare(queue=queue) # make sure deliveries self.channel.confirm_delivery() def send(self, msg: str) -> bool: """ send the message. (sync) Args: msg: a single msg(str) Return: bool: True on success """ return self.channel.basic_publish( "", self.queue, msg, properties=pika.BasicProperties(content_type="text/plain", delivery_mode=2), # persistent mandatory=True, ) def close(self): """ Close the connection. After the call to this method, you cannot `send`. """ if self.conn is not None: self.conn.close()
from pika.adapters import SelectConnection from pika.adapters.blocking_connection import BlockingConnection from pika.connection import ConnectionParameters from pika import BasicProperties from json import dumps import time connection = BlockingConnection(ConnectionParameters('127.0.0.1')) channel=connection.channel() channel.queue_declare(queue="test") def consume(channel, method, properties, body): print "got",body channel.basic_consume(consume, queue='test', no_ack=True) channel.start_consuming() connection.close()