def __init__(self, context: Context, publish_queue: str = None, subscribe_queue: str = None): """ Class initializer :param context: connection details :type context: :class:`.Context` :param publish_queue: name of the publish queue :type publish_queue: `str` """ super().__init__(context) # Keep a copy here - lots of re-use self.timeout = context.timeout() # Initialise the catalog self.catalog = catalog.MessageCatalog() if not publish_queue: # Publish not over-ridden so use context version publish_queue = context.feeds() self.start_subscriber(queue=rabbitmq.RabbitQueue(subscribe_queue)) self.start_publisher(queue=rabbitmq.RabbitQueue(publish_queue)) if subscribe_queue: self.command_queue = super().mktemp_queue() else: self.command_queue = self.subscriber.sub_queue
def test_round_trip(self): context = rabbitmq.RabbitContext.from_credentials_file( self.credentials) with rabbitmq.RabbitClient(context) as client: client.start(publish=rabbitmq.RabbitQueue(context.feeds(), purge=True, durable=True), subscribe=rabbitmq.RabbitQueue(context.replies(), durable=True)) message = {'action': 'Outbound', 'payload': '0' * 1024 * 100} #LOGGER.info(f'Client sending: {message}') client.publish(json.dumps(message)) #LOGGER.info(f'Client sent: {message}') #Now start the server side and handle the message with rabbitmq.RabbitClient(context) as server: server.start(subscribe=rabbitmq.RabbitQueue(context.feeds())) recv = server.receive(timeout=.01) #LOGGER.info(f'Server received: {recv}') #And send a reply to the client reply = {'action': 'Inbound', 'reply': 'the reply'} LOGGER.info(f'Server sending: {reply}') server.publish(json.dumps(reply), rabbitmq.RabbitQueue(context.replies())) #Now catch the reply in the client message = client.receive(timeout=.1) LOGGER.info(f"Client received: {message}")
def test_users(self): context = rabbitmq.RabbitContext.from_credentials_file( self.credentials) try: with rabbitmq.RabbitClient(context) as client: client.start(publish=rabbitmq.RabbitQueue(context.feeds(), purge=True), subscribe=rabbitmq.RabbitQueue(context.replies())) message = {'action': 'Outbound', 'payload': 'some data'} LOGGER.info(f'Client sending: {message}') client.publish(json.dumps(message)) #Now start the server side and handle the message with rabbitmq.RabbitClient(context) as server: server.start( subscribe=rabbitmq.RabbitQueue(context.feeds())) recv = server.receive() LOGGER.info(f'Server received: {recv}') #And send a reply to the client reply = {'action': 'Inbound', 'reply': 'the reply'} LOGGER.info(f'Server sending: {reply}') server.publish(json.dumps(reply), rabbitmq.RabbitQueue(context.replies())) #Now catch the reply in the client message = client.receive() LOGGER.info(f"Client received: {message}") except Exception as err: LOGGER.info("Error %r", err) raise err
def __init__(self, context: Context, publish_queue: str = None, subscribe_queue: str = None, max_msg_size: int = 2*1024*1024): """ Class initializer :param context: connection details :type context: :class:`.Context` :param publish_queue: name of the publish queue :type publish_queue: `str` :param max_msg_size: maximum permissible message length :type max_msg_size: `int` """ super(Messenger, self).__init__(context) # Max size of a message for dispatch self.max_msg_size = max_msg_size # Keep a copy here - lots of re-use self.timeout = context.timeout() if not publish_queue: # Publish not over-ridden so use context version publish_queue = context.feeds() self.start_subscriber(queue=rabbitmq.RabbitQueue(subscribe_queue)) self.start_publisher(queue=rabbitmq.RabbitQueue(publish_queue)) # Initialise the catalog with the target subscribe queue self.catalog = catalog.MessageCatalog(context.user(), self.get_subscribe_queue())
def test_timeout(self): context = rabbitmq.RabbitContext.from_credentials_file( self.credentials) with rabbitmq.RabbitClient(context) as client: client.start(publish=rabbitmq.RabbitQueue(context.feeds(), purge=True, durable=True), subscribe=rabbitmq.RabbitQueue(context.replies(), durable=True)) with self.assertRaises(rabbitmq.RabbitTimedOutException): message = client.receive(timeout=1)
def __init__(self, context, publish_queue: str = None, subscribe_queue: str = None): """ Class initializer """ super(CastorMessenger, self).__init__(context) self.correlation = 0 self.client_id = str(uuid.uuid4()) if not publish_queue: publish_queue = context.feeds() self.start_subscriber(queue=rabbitmq.RabbitQueue(subscribe_queue)) self.start_publisher(queue=rabbitmq.RabbitQueue(publish_queue))
def test_round_trip(self): context = rabbitmq.RabbitContext.from_credentials_file( self.credentials) with rabbitmq.RabbitDualClient(context) as client: client.start_subscriber( queue=rabbitmq.RabbitQueue(context.replies(), durable=True)) client.start_publisher(queue=rabbitmq.RabbitQueue( context.feeds(), purge=True, durable=True)) client.send_message(json.dumps({'blah': 'blah'})) with rabbitmq.RabbitDualClient(context) as server: server.start_publisher(queue=rabbitmq.RabbitQueue( context.replies(), durable=True)) server.start_subscriber( queue=rabbitmq.RabbitQueue(context.feeds(), durable=True)) message = server.receive_message(5) LOGGER.info(message)
def test_round_trip_no_declare(self): context = rabbitmq.RabbitContext.from_credentials_file( self.credentials) with rabbitmq.RabbitDualClient(context) as client: subscribe = rabbitmq.RabbitQueue( ) #Force generation of temp exclusive queue client.start_subscriber(queue=subscribe) client.start_publisher(queue=rabbitmq.RabbitQueue( context.feeds(), purge=True, durable=True)) client.send_message(json.dumps({'blah': 'blah'})) with rabbitmq.RabbitDualClient(context) as server: #Ensure this queue is not redeclared as it is exclusive to the client server.start_publisher( queue=rabbitmq.RabbitQueue(subscribe.name, declare=False)) server.start_subscriber( queue=rabbitmq.RabbitQueue(context.feeds(), durable=True)) message = server.receive_message(5) LOGGER.info(message)
def main(): parser = argparse.ArgumentParser(description='Messaging Client') parser.add_argument('--credentials', required=True) parser.add_argument('--feed_queue', required=True) parser.add_argument('--reply_queue', required=True) parser.add_argument('--broker_user', help='Defaults to credentials file') parser.add_argument('--broker_password', help='Defaults to credentials file') cmdline = parser.parse_args() LOGGER.info("Starting...") context = rabbitmq.RabbitContext.from_credentials_file( cmdline.credentials, cmdline.broker_user, cmdline.broker_password) try: with rabbitmq.RabbitClient(context) as client: client.start(publish=rabbitmq.RabbitQueue(cmdline.feed_queue, purge=True), subscribe=rabbitmq.RabbitQueue(cmdline.reply_queue)) message = {'action': 'Outbound', 'payload': 'some data'} LOGGER.info(f'sending {message}') client.publish(json.dumps(message)) with rabbitmq.RabbitClient(context) as server: server.start( subscribe=rabbitmq.RabbitQueue(cmdline.feed_queue)) mh = ServerMessageHandler() server.receive(mh.handler, max_messages=1) #And send a reply to the client reply = {'action': 'Inbound', 'reply': 'the reply'} server.publish(json.dumps(reply), rabbitmq.RabbitQueue(cmdline.reply_queue)) #Now catch the reply in the client message = client.receive() LOGGER.info(f"client got {message}") except Exception as err: LOGGER.info("Error %r", err) raise err
def _send(self, message: dict, queue: str = None) -> None: """ Send a message and return immediately. Throws: An exception on failure :param message: message to be sent :type message: `dict` :param queue: name of the publish queue :type queue: `str` """ message = self.context.serializer().serialize(message) pub_queue = rabbitmq.RabbitQueue(queue) if queue else None super().send_message(message, pub_queue)
def _send(self, message: dict, queue: str = None) -> dict: ''' Send a message and return immediately Throws: An exception on failure Returns: dict ''' message = serializer.Serializer.serialize(message) if len(message) > self.max_msg_size: raise BufferError(f"Messenger: payload too large: {len(message)}.") pub_queue = rabbitmq.RabbitQueue(queue) if queue else None super(Messenger, self).send_message(message, pub_queue)
def __init__(self, context: Context, publish_queue: str = None, subscribe_queue: str = None, max_msg_size: int = 2 * 1024 * 1024): '''Class initializer''' super(Messenger, self).__init__(context) #Max size of a message for dispatch self.max_msg_size = max_msg_size #Keep a copy here - lots of re-use self.timeout = context.timeout() if not publish_queue: #Publish not over-ridden so use context version publish_queue = context.feeds() self.start_subscriber(queue=rabbitmq.RabbitQueue(subscribe_queue)) self.start_publisher(queue=rabbitmq.RabbitQueue(publish_queue)) #Initialise the catalog with the target subscribe queue self.catalog = catalog.MessageCatalog(context.user(), self.get_subscribe_queue())
def _send(self, message: dict, queue: str = None) -> None: """ Send a message and return immediately. Throws: An exception on failure :param message: message to be sent :type message: `dict` :param queue: name of the publish queue :type queue: `str` """ message = serializer.Serializer.serialize(message) if len(message) > self.max_msg_size: raise BufferError(f"Messenger: payload too large: {len(message)}.") pub_queue = rabbitmq.RabbitQueue(queue) if queue else None super(Messenger, self).send_message(message, pub_queue)
def __enter__(self): self.start_subscriber(queue=rabbitmq.RabbitQueue(self.subscribe_queue)) self.start_publisher(queue=rabbitmq.RabbitQueue(self.publish_queue)) self.catalog = catalog.MessageCatalog(self.get_subscribe_queue()) return self
def __enter__(self): self.start_subscriber(queue=rabbitmq.RabbitQueue(self.subscribe_queue)) self.start_publisher(queue=rabbitmq.RabbitQueue(self.publish_queue)) return self