def queryTask(config, device_id, project_code, test_type, branch): try: with rabbitpy.Connection('amqp://{}:{}@{}:{}/%2F'.format( g_username, g_password, config["rabbitMQ_address"], config["rabbitMQ_port"])) as conn: with conn.channel() as channel: # dedicated queue would be consumed firstly q = rabbitpy.Queue(channel, device_id) q.durable = True q.declare() if q.__len__() > 0: msg = q.get() msg.ack() return json.loads(msg.body) # common queue would be consumed according to priority for t in test_type: for b in branch: q = rabbitpy.Queue( channel, '{}_{}_{}'.format(project_code, b, t)) q.durable = True q.declare() if q.__len__() > 0: msg = q.get() msg.ack() return json.loads(msg.body) except: LOGGER.critical(traceback.format_exc()) return None
def putget(project): channel = get_channel() properties = { 'delivery_mode': 2, 'message_id': str(uuid.uuid4()) } if request.method == 'PUT': job = request.get_json() message = rabbitpy.Message(channel, job, properties=properties) message.publish('', project) data = job if request.method == 'GET': queue = rabbitpy.Queue(channel, project) try: message = queue.get() except rabbitpy.exceptions.AMQPNotFound: return "", 204 if message is not None: msg = message.json() msg.update(message.properties) data = loads(dumps(msg, default=lambda obj: str(obj))) message.ack() else: return "", 204 return jsonify(data)
def create_msg(body, type): try: url = 'amqp://' + setting.MQUSER + ':' + setting.MQPASSWORD + '@' + setting.MQSERVER + ':' + setting.MQPORT + '/%2f' with rabbitpy.Connection(url) as conn: with conn.channel() as channel: exchange = rabbitpy.Exchange(channel=channel, name=setting.MQEXCHANGENAME, durable=True) exchange.declare() queue = rabbitpy.Queue(channel=channel, name=setting.MQQUEUENAME, durable=True) queue.declare() # Bind the queue queue.bind(exchange, setting.MQROUTINGKEY) message = rabbitpy.Message( channel, body, { 'content_type': 'text/plain', 'delivery_mode': 2, 'message_type': type }) message.publish(exchange, setting.MQROUTINGKEY) except: pass
def __init__(self, queue_name): super(Publisher, self).__init__(queue_name) queue = rabbitpy.Queue(self._channel, queue_name) queue.durable = True queue.declare() self.queue = queue self._channel.enable_publisher_confirms()
def cancelTask(config, queue, task_id): import threading lock = threading.Lock() lock.acquire() rc = 0 try: with rabbitpy.Connection('amqp://{}:{}@{}:{}/%2F'.format( g_username, g_password, config["rabbitMQ_address"], config["rabbitMQ_port"])) as conn: with conn.channel() as channel: # dedicated queue would be consumed firstly q = rabbitpy.Queue(channel, queue) q.durable = True q.declare() channel.enable_publisher_confirms() _t = [] for i in range(0, q.__len__()): msg = q.get() task = json.loads(msg.body) if task["task_id"] == task_id: msg.ack() break else: rc = -1 except: LOGGER.error(traceback.format_exc()) rc = -1 finally: lock.release() return rc
def consume(self, callback): with self.acquire() as conn: # logger.debug('consume conn = {}'.format(conn)) queue = rabbitpy.Queue(conn.channel, self._name) for message in queue.consume(prefetch=1): callback(message) message.ack()
def __register_on_task(self, queue_name=''): """ Registers task. This might be called in parallel. :param queue_name: task queue that will receive the request. default value set to random uuid queue """ with self.b_rabbit.connection.channel() as channel: task_queue = rabbitpy.Queue(channel, name=queue_name, durable=True, exclusive=True) task_queue.declare() task_queue.bind(self.exchange_name, self.routing_key) self.b_rabbit.add_active_queues(task_queue) for message in task_queue.consume(): self.channel = channel self.corr_id, self.replyTo = message.properties[ 'correlation_id'], message.properties['reply_to'] self.msg, self.deliveryTag = message, message.delivery_tag message.pprint() message.ack() try: self.task_listener(self, message.body) except Exception as e: logger.critical( 'Error in Custom Implementation of TaskExecuter') logger.exception(e.args, exc_info=False)
def setUp(self): self.connection = rabbitpy.Connection() self.channel = self.connection.channel() self.queue = rabbitpy.Queue(self.channel, exclusive=True, auto_delete=True) self.queue.declare() self.input = b'\n'.join(self.INPUT) + b'\n'
def __init__(self, addr, queue): super(MqClient, self).__init__() self.addr = addr self.q_name = queue self.conn = None self.ch = None self.queue = rabbitpy.Queue(self.channel(), self.q_name) self.queue.declare()
def declare_queue(channel): print('Declaring and the queue') queue = rabbitpy.Queue(channel, 'benchmark_queue', durable=False, arguments={'x-message-ttl': 300000}) queue.declare() return queue
def test_delete(self): name = 'delete-queue-name' rabbitpy.create_queue(os.environ['RABBITMQ_URL'], queue_name=name) rabbitpy.delete_queue(os.environ['RABBITMQ_URL'], queue_name=name) with rabbitpy.Connection(os.environ['RABBITMQ_URL']) as conn: with conn.channel() as channel: obj = rabbitpy.Queue(channel, name) self.assertRaises(exceptions.AMQPNotFound, obj.declare, True)
def create_local_queue(self): """Create a local queue :rtype: queue """ _queue = rabbitpy.Queue(self._ch, exclusive=True) _queue.declare() return _queue
def _connect(self): assert self.active_connections == 0 self.active_connections = 1 qs = urllib.parse.urlencode({ 'cacertfile': self.config['sslopts']['ca_certs'], 'certfile': self.config['sslopts']['certfile'], 'keyfile': self.config['sslopts']['keyfile'], 'verify': 'ignore', 'heartbeat': self.config['heartbeat'], 'connection_timeout': self.config['socket_timeout'], }) uri = '{scheme}://{username}:{password}@{host}:{port}/{virtual_host}?{query_str}'.format( scheme='amqps', username=self.config['userid'], password=self.config['password'], host=self.config['host'].split(":")[0], port=self.config['host'].split(":")[1], virtual_host=self.config['virtual_host'], query_str=qs, ) self.log.info("Initializing AMQP connection.") self.connection = rabbitpy.Connection(uri) # self.connection.connect() self.log.info("Connected. Creating channel.") # Channel and exchange setup self.channel = rabbitpy.AMQP( self.connection.channel(blocking_read=True)) self.log.info("Setting QoS.") self.log.info("Connection established. Setting up consumer.") if self.config['flush_queues']: self.log.info("Flushing items in queue.") self.channel.queue_purge(self.config['task_queue_name']) self.channel.queue_purge(self.config['response_queue_name']) self.log.info("Configuring queues.") self._setupQueues() if self.config['master']: self.in_queue = self.config['response_queue_name'] else: self.in_queue = self.config['task_queue_name'] qchan = self.connection.channel() qchan.prefetch_count(self.config['prefetch']) self.in_q = rabbitpy.Queue(qchan, self.in_queue)
def create_queue(self, name=None): """Create queue for messages. :type name: str :rtype: queue """ _queue = rabbitpy.Queue(self._ch, name=name, durable=True) _queue.declare() return _queue
def test_create_queue(self): name = 'simple-create-queue' rabbitpy.create_queue(os.environ['RABBITMQ_URL'], queue_name=name) with rabbitpy.Connection(os.environ['RABBITMQ_URL']) as conn: with conn.channel() as channel: queue = rabbitpy.Queue(channel, name) response = queue.declare(True) self.assertEqual(response, (0, 0)) queue.delete()
def __init__(self, exchange_name, uri=DEFAULT_RABBIT_URI): self.conn = rabbitpy.Connection(uri) self.channel = self.conn.channel() self.exchange = rabbitpy.Exchange(self.channel, exchange_name, exchange_type='fanout') self.exchange.declare() self.queue = rabbitpy.Queue(self.channel, exclusive=True) self.queue.declare() self.queue.bind(self.exchange)
def factory(exchange_name, name): channel = rabbitmq.channel() exchange = rabbitpy.Exchange(channel, exchange_name, auto_delete=False, durable=True) exchange.declare() queue = rabbitpy.Queue(channel, name, auto_delete=False, durable=True) queue.declare() queue.bind(exchange, routing_key=name) return queue, exchange
def create_queue(): channel = get_channel() name = uuid.uuid4() queue = rabbitpy.Queue( channel, name=str(name), durable=True, arguments=get_queue_settings() ) queue.declare() return jsonify(queue=str(name))
def __init__(self, b_rabbit, routing_key: str, publisher_name: str, exchange_type: str = 'topic', external: bool = False, important_subscription: bool = True, event_listener: Callable = None): """ Subscribe to events send by publisher Parameters: :param str routing_key: Routing Key which was set for event by Publisher :param str publisher_name: Name of publisher :param str exchange_type: Type of exchange :param bool external: Is Publisher external? :param callable event_listener: User event listener (eventListener(body)) """ if not b_rabbit.connection: raise Exception( 'Create Instance of Class RabbitMqCommunicationInterface first' ) self.b_rabbit = b_rabbit self.publisher_name = publisher_name with b_rabbit.connection.channel() as channel: self.exchange_name = 'External_' + publisher_name + '_events' if external else publisher_name + '_events' self.exchange = rabbitpy.Exchange(channel=channel, name=self.exchange_name, exchange_type=exchange_type, durable=True) self.exchange.declare() logger.info( f'Exchange is declared Successfully from Subscriber: {__name__} | with the name: {self.exchange_name}' ) subscriber_name = self.exchange_name + '_' + routing_key + '_' + self.__get_subscriber_name( ) + '_queue' logger.info(f'subscriber name: {subscriber_name}') queue = rabbitpy.Queue(channel, name=subscriber_name, durable=important_subscription, message_ttl=self.__msg_lifetime(), exclusive=False) queue.declare() logger.info( f'{queue.name} was successfully declared from subscriber: {subscriber_name}' ) queue.bind(self.exchange_name, routing_key) self.queue_name = queue.name self.event_listener = event_listener
def consume_message(queue_name='hello', all=False): with rabbitpy.Connection(API_KEY) as conn: with conn.channel() as channel: queue = rabbitpy.Queue(channel, queue_name) if all: # Consume all the message for message in queue: message.pprint(True) message.ack() else: queue[0].pprint(True) return queue[0]
def setup(self): self.connection = rabbitpy.Connection(config.get(AMQP_URI_KEY)) self.channel = self.connection.channel() self.exchange = rabbitpy.Exchange(self.channel, 'exch_pi') self.exchange.declare() queue = rabbitpy.Queue( self.channel, 'q_pi_buzzer', arguments={'x-message-ttl': 3600000}) # 1h queue.declare() queue.bind(self.exchange, ROUTING_KEY)
def wait_for_job(self): with self._rabbit_connection.connection.channel() as channel: self._queue = rabbitpy.Queue( channel=channel, name=self._server_name + "_" + self._routing_key + "_queue", durable=True, message_ttl=5 * 24 * 60 * 60 * 1000 # 5 days ) self._queue.declare() self._queue.bind(self._exchange, self._routing_key) self._consume(channel)
def __subscribe(self): ''' start waiting on events. You may do this in parallel. ''' with self.b_rabbit.connection.channel() as channel: queue = rabbitpy.Queue(channel, self.queue_name) self.b_rabbit.add_active_queues(queue) for message in queue.consume(): message.pprint(True) message.ack() self.event_listener(message.body)
def moveToTop(config, queue, task_id): import threading lock = threading.Lock() lock.acquire() rc = 0 try: with rabbitpy.Connection('amqp://{}:{}@{}:{}/%2F'.format( g_username, g_password, config["rabbitMQ_address"], config["rabbitMQ_port"])) as conn: with conn.channel() as channel: # dedicated queue would be consumed firstly q = rabbitpy.Queue(channel, queue) q.durable = True q.declare() channel.enable_publisher_confirms() _t = [] _r = '' for i in range(0, q.__len__()): msg = q.get() msg.ack() task = json.loads(msg.body) if task["task_id"] == task_id: _r = msg.body else: _t.append(msg.body) _t.insert(0, _r) for i in _t: msg = rabbitpy.Message(channel, i) # Publish the message, looking for the return value to be a bool True/False if msg.publish("", queue, mandatory=True): LOGGER.debug( 'Message {} publish confirmed by RabbitMQ'.format( msg.body)) else: LOGGER.error( 'Message {} publish not confirmed by RabbitMQ'. format(msg.body)) rc = -1 except: LOGGER.error(traceback.format_exc()) rc = -1 finally: lock.release() return rc #print queryTask({"rabbitMQ_address":'127.0.0.1', 'rabbitMQ_port':5672}, '127.0.0.1', 'APL', ['PIT', 'CIT'], ['OTM', 'PV']) #print queryTask1({"rabbitMQ_address":'10.239.111.152', 'rabbitMQ_port':5672},"10.239.132.227", "APL",["CIT", "PIT"], ["OTM", "PV"]) #a = getDedicatedTaskQueue({"rabbitMQ_address":'10.239.153.126', 'rabbitMQ_port':5672},"10.239.132.227") #b = getDedicatedTaskQueue_pika({"rabbitMQ_address":'10.239.153.126', 'rabbitMQ_port':5672},"APL_OTM_CIT") #print(type(b[0]), b)
def _connect(self): self._connection = rabbitpy.Connection() self._channel = self._connection.channel() self._channel.enable_publisher_confirms() self._exchange = rabbitpy.Exchange(self._channel, self.EXCHANGE_NAME, self.EXCHANGE_TYPE) self._exchange.declare() self._queue = rabbitpy.Queue(self._channel, self.QUEUE_NAME) self._queue.declare() self._queue.bind(self._exchange, self.ROUTING_KEY)
def create_queue(self, name=None, expires=1200000): # def create_queue(self, name=None, expires=100000): """Create queue for messages. :type name: str :type expires: int (time, in milliseconds, for queue to expire) :rtype: queue """ _queue = rabbitpy.Queue(self._ch, name=name, durable=True, expires=expires) _queue.declare() return _queue
def setUp(self): self.connection = rabbitpy.Connection(os.environ['RABBITMQ_URL']) self.channel = self.connection.channel() self.queue = rabbitpy.Queue(self.channel, 'redeliver-test') self.queue.declare() # Publish the message that will be rejected message = rabbitpy.Message(self.channel, 'Payload Value') message.publish('', 'redeliver-test') # Get and reject the message msg1 = self.queue.get() msg1.reject(requeue=True)
def setUp(self): self.connection = rabbitpy.Connection(os.environ['RABBITMQ_URL']) self.channel = self.connection.channel() self.channel.enable_publisher_confirms() self.exchange = rabbitpy.TopicExchange(self.channel, 'pql-test') self.exchange.declare() self.queue = rabbitpy.Queue(self.channel, 'pql-queue') self.queue.declare() self.queue.bind(self.exchange, 'test.#') for iteration in range(0, self.ITERATIONS): message = rabbitpy.Message(self.channel, str(uuid.uuid4())) if not message.publish(self.exchange, 'test.publish.pql'): LOGGER.error('Error publishing message %i', iteration)
def initConsumer(self): self.initChannel() self.exchange = rabbitpy.TopicExchange(channel=self.channel, name=self.exchangeName, durable=True, auto_delete=False, arguments=self.arguments) self.exchange.declare() self.queue = rabbitpy.Queue(self.channel, exclusive=True) self.queue.declare() # bind the queue to exchange with the appropriate binding keys for key in self.bindingKeys: self.queue.bind(self.exchange, key)
def setup_connection(queuename): """Setup and run.""" def consume(message): run(message.body) message.ack() # Use context managers as we had some strange thread issues otherwise? with rabbitpy.Connection(URL) as conn: with conn.channel() as channel: channel.prefetch_count(10) queue = rabbitpy.Queue(channel, name=queuename, durable=True) print(queue) for message in queue: consume(message) print("done")