def exchange_with_queue_and_binding(props, channel: pika.BlockingConnection): try: channel.exchange_declare('work.exchange') channel.queue_declare('add.to.newsletter') channel.queue_bind(exchange='work.exchange', queue='add.to.newsletter', routing_key='new_user_registration') result = channel.basic_publish(exchange='work.exchange', routing_key='new_user_registration', body=b'message body value', properties=props) if result: print('confirmed') else: print('not confirmed') except: e = sys.exc_info()[0] v = sys.exc_info()[1] print(e) print(v)
def __init__(self, host=None, port=None, vhost=None, username=None, password=None, exchange='oplog', queue=None, dump=DUMP_JSON): super(QueueHandler, self).__init__() parameters = ConnectionParameters() if host is not None: parameters.host = host if port is not None: parameters.port = port if vhost is not None: parameters.virtual_host = vhost if username is not None and password is not None: parameters.credentials = PlainCredentials(username, password) logger.info('Connect to queue.') channel = BlockingConnection(parameters).channel() if queue is None: channel.exchange_declare(exchange, 'fanout', passive=True) else: channel.exchange_delete(exchange) channel.exchange_declare(exchange, 'direct') channel.queue_declare(queue, durable=True) channel.queue_bind(queue, exchange) self.exchange = exchange self.channel = channel self.queue = queue if dump == DUMP_JSON: self.dump = lambda op: json.dumps(op, default=json_util.default) elif dump == DUMP_BSON: self.dump = lambda op: BSON.encode(op) else: raise ValueError('Invalid `dump` parameter for QueueHandler.')
class PubSub(object): @staticmethod def _build_ampq_url(): return 'amqp://{user}:{password}@{host}:5672/%2F?connection_attempts=3&heartbeat_interval=3600'.format( user=getenv('RABBITMQ_USER', 'guest'), password=getenv('RABBITMQ_PASS', 'guest'), host=getenv('RABBITMQ_HOST', 'rabbitmq')) def __init__(self, ampq_url=None): if not ampq_url: ampq_url = self._build_ampq_url() parameters = URLParameters(ampq_url) self.channel = BlockingConnection(parameters).channel() self.channel.confirm_delivery() self.process_functions = {} def publish(self, event_name, event=None): if event is None: event = Empty() # from google.protobuf.message import Message # assert parent class is Message exchange = f'{event_name}.{event.__class__.__name__}' self.channel.exchange_declare(exchange_type='fanout', exchange=event_name) properties = BasicProperties(app_id=APP_NAME, content_type=PB_CONTENT_TYPE) self.channel.basic_publish( exchange=event_name, routing_key='ALL', # routing key is ignored for 'fanout' exchange body=event.SerializeToString(), properties=properties) def on_message_callback(self, channel, method_frame, header_frame, body): # print("on_message_callback", channel, method_frame, header_frame, body) # TODO: if content_type !== PB_CONTENT_TYPE # TODO: maybe use google.protobuf.reflection ? # https://developers.google.com/protocol-buffers/docs/reference/python/google.protobuf.reflection-module#MakeClass event_name = method_frame.exchange func, EventClass = self.process_functions[event_name] event = EventClass() event.ParseFromString(body) func(event) self.channel.basic_ack(delivery_tag=method_frame.delivery_tag) def listen(self, event_name, EventClass): """A decorator that is used to register events listener function for a given channel. """ def decorator(func): queue_name = f"{APP_NAME}.{event_name}" self.channel.queue_declare(queue=queue_name, durable=True, exclusive=False, auto_delete=False) self.channel.exchange_declare(exchange_type='fanout', exchange=event_name) self.channel.queue_bind(queue=queue_name, exchange=event_name) self.process_functions[event_name] = (func, EventClass) self.channel.basic_consume(self.on_message_callback, queue=queue_name) return func return decorator def run(self): self.channel.start_consuming()