def connect(self): """ Connect to RabbitMQ using `self.rabbitmq_url`. You usually do not have to call this yourself, as it is already done by the constructor. """ self._connection = amqpstorm.UriConnection(self.rabbitmq_url) return self._connection
def get_results(url, queue, delete): """Consumes the result objects (formatted in JSON) from the specified queue Args: url (str): The URL for the queue, including user/password queue (str): The name of the queue delete (bool): Indicates if an ACK should be sent to the queue, thus deleting the message from the queue Returns: list: List of dictionaries """ connection = amqpstorm.UriConnection(url) channel = connection.channel() channel.queue.declare(queue, durable=True) received = [] while True: msg = channel.basic.get(queue=queue, no_ack=False) if msg is None: break result = json.loads(msg.body) if any(result): # Avoid empty json objects received.append(json.loads(msg.body)) if delete: msg.ack() connection.close() LOG.debug('Got %d elements', len(received)) return received
def load_results(queue_url, queue_name): """Load the JSON objects that represent a result in the provided queue Args: queue_url (str): The URI for the results queue queue_name (str): The name of the queue Returns: list: Returns a list of dicts, where each one contains the results """ connection = amqpstorm.UriConnection(queue_url) channel = connection.channel() channel.queue.declare(queue_name, durable=True) received_ids = [] data = [] while True: message = channel.basic.get(queue=queue_name, no_ack=False) if message is None: break result = json.loads(message.body) if any(result): received_ids.append(message) data.append(result) for m in received_ids: m.nack() connection.close() return data
def init_broker(self): # username=app_config.RABBITMQ_USER, password=app_config.RABBITMQ_PASS, host=app_config.RABBITMQ_HOST, port=app_config.RABBITMQ_PORT, virtual_host=app_config.RABBITMQ_VIRTUAL_HOST, heartbeat=60 * 10 self.logger.warning(f'使用AmqpStorm包 链接mq') self.connection = amqpstorm.UriConnection( f'amqp://{frame_config.RABBITMQ_USER}:{frame_config.RABBITMQ_PASS}@{frame_config.RABBITMQ_HOST}:{frame_config.RABBITMQ_PORT}/{frame_config.RABBITMQ_VIRTUAL_HOST}?heartbeat={60 * 10}' ) self.channel = self.connection.channel() # type:amqpstorm.Channel self.channel_wrapper_by_ampqstormbaic = AmqpStormBasic(self.channel) self.queue = AmqpStormQueue(self.channel) self.queue.declare(queue=self._queue_name, durable=True)
def open(self): """Open Connection.""" self.connection = amqpstorm.UriConnection( self.url ) self.channel = self.connection.channel() self.channel.queue.declare(self.rpc_queue) result = self.channel.queue.declare(exclusive=True) self.callback_queue = result['queue'] self.channel.basic.consume(self._on_response, no_ack=True, queue=self.callback_queue) self._create_process_thread()
def __ensure_connection(self): if self.is_open: return if not self.__connection_lock.acquire(timeout=0.1): raise ConnectionError('Transport is closed.') try: if self.is_open: return self.__connection = amqpstorm.UriConnection(self.__url) self.__closed_by_user = None self.__channel_pool = ChannelPool(self.__min_channels, self.__max_channels, self.__connection) finally: self.__connection_lock.release()
def start(config): """Creates the TaskCreator object specified in the configuration file calls it and push the tasks to the Message queue Args: config (RawConfigParser): The configuration reader """ task_creator = config.get('coordinator', 'taskcreator') csv_file = config.get('coordinator', 'csvfile') url = config.get('coordinator', 'queue_url') queue_name = config.get('general', 'queue_name') creator_class = getattr(sys.modules[__name__], task_creator) creator = creator_class(csv_file, config) tasks = creator.create_tasks() connection = amqpstorm.UriConnection(url) channel = connection.channel(rpc_timeout=120) channel.queue.declare(queue_name, durable=True) for task in tasks: print('Pushing into queue:\n{0}'.format(task)) channel.basic.publish(task, queue_name, exchange='', properties={'delivery_mode': 2}) connection.close()
def connect(self, amqp_uri, exchange_name): self._exchange_name = exchange_name self._connection = amqpstorm.UriConnection(amqp_uri) self._channel = self._connection.channel() self._channel.exchange.bind(source=exchange_name)
def worker_thread(url, queue_name, results_queue): """Worker thread, for each instance """ global log empty_queue = False while not empty_queue: connection = amqpstorm.UriConnection(url) channel = connection.channel(rpc_timeout=120) channel.queue.declare(queue_name, durable=True) channel.basic.qos(1) # Fetch one message at a time log.info('Waiting for tasks') while True: message = channel.basic.get(queue=queue_name, no_ack=False) # If the queue is empty, task_data will contain only None if message is None: log.info('Nothing else to do.') connection.close() empty_queue = True break work = task.Task(message.body) log.info('Got a task %s', work.get_id()) ret_code = 0 if work.get_id() in IDS_DONE: log.warning('Task ID already done. Skipping') work = JOBS_DONE[work.get_id()] else: ret_code = work.run() JOBS_DONE[work.get_id()] = work try: if ret_code != 0: log.warning('Unexpected exit code: %d', ret_code) stdout = work.get_stdout() stderr = work.get_stderr() if stdout is not None: log.error('STDOUT: %s', stdout) if stderr is not None: log.error('STDERR: %s', stderr) message.nack() continue log.debug('Task execution finished') message.ack() except amqpstorm.AMQPConnectionError as conn_error: log.error('Connection to server died before publish') IDS_DONE.append(work.get_id()) break except Exception as ex: log.exception(ex) break with amqpstorm.UriConnection(url) as res_conn: with res_conn.channel() as res_channel: res_channel.queue.declare(results_queue, durable=True) for result in work.result(): props = { 'delivery_mode': 2 } res = amqpstorm.Message.create(res_channel, result, props) res.publish(results_queue, exchange='') log.debug('Task and result processing completed') connection.close() log.info('Thread exiting. (empty queue? %s)', repr(empty_queue))