Beispiel #1
0
def broker_s_edge(iqa: Instance) -> Broker:
    """
    Returns the slave broker instance connected to edge 3 router
    :param iqa:
    :return:
    """
    return iqa.get_brokers('Broker.S.E3')[0]
Beispiel #2
0
def broker_s_internal(iqa: Instance) -> Broker:
    """
    Returns the slave broker instance connected to internal 2 router
    :param iqa:
    :return:
    """
    return iqa.get_brokers('Broker.S.I2')[0]
Beispiel #3
0
def broker_m_edge(iqa: Instance) -> Broker:
    """
    Returns the master broker instance connected to edge 3 router
    :param iqa:
    :return:
    """
    return iqa.get_brokers('Broker.M.E3')[0]
    def test_asynchronous_durable_subscription(self, topic_durable, broker,
                                               iqa: Instance):
        """
        This test must be defined as the second one (as tests defined in classes run sequentially in py.test).
        With that said, the previous test left the durable subscriptions available in the related Broker instance.
        So when this test runs, the Publishers will run first and will publish a pre-defined (self.MESSAGES) number
        of messages into the related multicast address (topic_durable).
        It waits till all publishers are done sending (or timed-out).
        Next it will connect one Durable Subscriber instance with the "topic_durable" address across all routers
        in the topology. Then it waits till all receivers are done receiving (or timed-out).
        Then it validates:
        - Number of messages sent
        - Number of messages received by each receiver (expecting self.MESSAGES * len(routers))
        - Integrity of received messages by comparing received body SHA1 with unique SHA1 sum from sent body.
        :param topic_durable: Fixture that provides the topic to send/receive from
        :param broker: A Broker component instance (not being used yet, but illustrates which broker is being used)
        :param iqa: IQAInstance fixture that provides a list with all routers that will be used
        :return:
        """

        # Broker instance
        broker_instance = iqa.get_brokers(broker)[0]
        assert broker_instance

        # List of routers to use
        routers = iqa.get_routers()

        # Create publisher list
        publishers = self.create_publishers(routers, topic_durable)

        # Wait till all senders have delivered their messages
        [p.join() for p in publishers]

        # Assert all senders sent correct amount of messages
        self.validate_all_messages_sent(publishers)

        # Create subscriber list
        # At this point, as previous test (synchronous) completed, a durable subscription already
        # exists, so subscribers should be able to retrieve their messages properly now
        subscribers = self.create_subscribers(routers,
                                              topic_durable,
                                              durable=True)

        # Wait till all subscribers are done
        # the stopped flag will turn into true if any of them times out
        [s.join() for s in subscribers]

        # Assert that all receivers received expected amount of messages
        self.validate_all_messages_received(publishers[0].message_body,
                                            routers, subscribers)
    def test_synchronous_nondurable_subscription(self, topic_nondurable,
                                                 broker, iqa: Instance):
        """
        Connects one Non-Durable Subscriber instance to the "topic_durable" address across all routers.
        Once all subscribers are connected, it starts one Publisher against each router
        in the topology.
        Next the test waits till all senders are done and till all receivers are done receiving (or timed-out).
        Then it validates:
        - Number of messages sent
        - Number of messages received by each receiver (expecting self.MESSAGES * len(routers))
        - Integrity of received messages by comparing received body SHA1 with unique SHA1 sum from sent body.
        :param topic_nondurable: Fixture that provides the topic to send/receive from
        :param broker: A Broker component instance (not being used yet, but illustrates which broker is being used)
        :param iqa: IQAInstance fixture that provides a list with all routers that will be used
        :return:
        """

        # Broker instance
        broker_instance = iqa.get_brokers(broker)[0]
        assert broker_instance

        # List of routers to use
        routers = iqa.get_routers()

        # Create subscriber list
        subscribers = self.create_subscribers(routers,
                                              topic_nondurable,
                                              durable=False)

        # Wait till all receivers have been created
        while not all(s.receiver for s in subscribers):
            time.sleep(TestDurableNonDurableSubscription.DELAY)

        # Create publisher list
        publishers = self.create_publishers(routers, topic_nondurable)

        # Wait till all publishers and subscribers done
        # the stopped flag will turn into true if any of them times out
        [p.join() for p in publishers]
        [s.join() for s in subscribers]

        # Assert all senders sent correct amount of messages
        self.validate_all_messages_sent(publishers)

        # Assert that all receivers received expected amount of messages
        self.validate_all_messages_received(publishers[0].message_body,
                                            routers, subscribers)
Beispiel #6
0
def broker_master(request, iqa: Instance) -> BrokerType:
    if "Broker.M." in request.param:
        broker_hostname = request.param
        return iqa.get_brokers(broker_hostname)[0]
    def test_asynchronous_nondurable_subscription(self, topic_nondurable,
                                                  broker, iqa: Instance):
        """
        Publishers run first and will publish a pre-defined (self.MESSAGES) number of messages into the related
        multi-cast address (topic_nondurable).
        It waits till all publishers are done sending (or timed-out).
        Next it will connect one Non-Durable Subscriber instance with the "topic_nondurable" address across all routers
        in the topology. Then it waits till all receivers time-out.
        Then it validates:
        - Number of messages sent
        - Expect all receivers to time-out
        - Number of messages received by each receiver (expecting 0)
        :param topic_nondurable: Fixture that provides the topic to send/receive from
        :param broker: A Broker component instance (not being used yet, but illustrates which broker is being used)
        :param iqa: IQAInstance fixture that provides a list with all routers that will be used
        :return:
        """

        async_timeout = 30

        broker_instance = iqa.get_brokers(broker)[0]
        assert broker_instance

        # List of routers to use
        routers = iqa.get_routers()

        # Create subscriber list
        subscribers = self.create_subscribers(routers,
                                              topic_nondurable,
                                              durable=False,
                                              timeout=async_timeout)

        # Wait till all receivers have been created
        while not all(s.receiver for s in subscribers):
            time.sleep(TestDurableNonDurableSubscription.DELAY)

        # Now stop all receivers to ensure non-durable subscription was discarded
        [s.stop_receiver() for s in subscribers]

        # Create publisher list
        publishers = self.create_publishers(routers, topic_nondurable)

        # Wait till all publishers are done sending
        [p.join() for p in publishers]

        # Create subscribers now with a small timeout and expect nothing to be received
        subscribers = self.create_subscribers(routers,
                                              topic_nondurable,
                                              durable=False,
                                              timeout=async_timeout)

        # Wait till all subscribers timeout
        [s.join() for s in subscribers]

        # Assert all senders sent correct amount of messages
        self.validate_all_messages_sent(publishers)

        # Assert that all receivers did not receive any message and that all of them timed out
        assert all([s.received == 0
                    for s in subscribers]), "Expecting no message received"
        assert all([s.timeout_handler.timed_out() for s in subscribers
                    ]), "Expecting all receivers to timeout"
    def test_address_translation_sending(self, address, translates_to, sender,
                                         broker, router, iqa: Instance):
        """
        Send messages to the given "address", through the provided "router" instance. It uses the given
        "sender" (ClientExternal) instance and expects the queue with name "translates_to" to exist in
        the "broker" instance and that the number of messages in it will increase from initial message count
        to the value defined in the SEND_COUNT constant.
        :param address:
        :param translates_to:
        :param sender:
        :param broker:
        :param router:
        :param iqa:
        :return:
        """

        # if not router.node.hostname.startswith('Router.I'):
        #     return

        # Get broker instance for given broker name
        broker_instance: Broker = iqa.get_brokers(broker)[0]
        assert broker_instance

        # Retrieving current number of messages in the destination address (queue)
        queue = self._get_queue(broker_instance, translates_to)
        initial_message_count = int(queue.message_count)

        # Assert queue has been found
        assert queue
        assert initial_message_count is not None

        # Url to be used by senders and receivers
        url = "amqp://%s:%s/%s" % (router.node.get_ip(), router.port, address)

        # Preparing the external sender
        logging.info("Sending messages to %s - using %s" %
                     (url, sender.implementation))
        sender.reset_command()
        sender.set_url(url)
        sender.command.control.count = self.SEND_COUNT
        sender.command.control.timeout = self.TIMEOUT  # Timeout flag for command to be executed
        sender.command.timeout = self.TIMEOUT  # Timeout for command (needed cause timeout flag is working properly)

        # Defining the message to be sent
        message = Message()
        message.body = self.MESSAGE_BODY

        # Sending and waiting for app to finish
        sender.send(message)
        sender.execution.wait()

        # Validating sender completed successfully
        logging.debug(
            "Sender exit code: %s - timed out: %s" %
            (sender.execution.returncode, sender.execution.timed_out))
        assert sender.execution.completed_successfully()

        # Delaying 5 secs to clean up everything
        time.sleep(5)

        # Validates if all messages have been delivered
        queue = self._get_queue(broker_instance, translates_to)
        logging.info(
            "Message count at queue %s - after senders completed = %s" %
            (translates_to, queue.message_count))
        assert (self.SEND_COUNT + initial_message_count) == int(
            queue.message_count)
    def test_address_translation_receiving(self, address, translates_to,
                                           receiver, broker, router,
                                           iqa: Instance):
        """
        Receive messages from the provided "address" connecting with the "router" instance.
        This test will execute an external client using the "receiver" instance and expect it to
        consume RECV_COUNT messages from the given "address". The "address" used for receiving should resolve
        as the "translates_to" value, which must be a queue name on the "broker" instance.
        The number of messages in the respective queue must be equals or greater than RECV_COUNT.
        This test will validate number of received messages as well as perform an SHA1 sum based
        on the message's body, which must match the generated SHA1 sum from message sent earlier.
        :param address:
        :param translates_to:
        :param receiver:
        :param broker:
        :param router:
        :param iqa:
        :return:
        """

        # if not router.node.hostname.startswith('Router.I'):
        #     return

        # Get broker instance for given broker name
        broker_instance: Broker = iqa.get_brokers(broker)[0]
        assert broker_instance

        # Retrieving current number of messages in the destination address (queue)
        queue = self._get_queue(broker_instance, translates_to)
        initial_message_count = int(queue.message_count)
        logging.info(
            "Initial message count at queue %s - after receivers completed = %s"
            % (translates_to, queue.message_count))

        # Assert queue has been found and senders were able to send something
        assert queue
        assert initial_message_count >= self.RECV_COUNT

        # Url to be used by receivers
        url = "amqp://%s:%s/%s" % (router.node.get_ip(), router.port, address)

        # Preparing receiver
        logging.info("Receiving messages from %s - using %s" %
                     (url, receiver.implementation))
        receiver.reset_command()
        receiver.set_url(url)
        receiver.command.control.count = self.RECV_COUNT
        # cannot be used with cli-rhea as it is "waiting" for the given amount of time (causing a timeout to happen)
        if receiver.implementation != 'nodejs':
            receiver.command.control.timeout = self.TIMEOUT  # Timeout flag for command to be executed
        receiver.command.logging.log_msgs = 'dict'
        receiver.command.timeout = self.TIMEOUT  # Timeout for command

        # Executes external receiver and waits until it finishes (or times out)
        receiver.receive()
        receiver.execution.wait()

        # Validating results
        logging.info(
            "Receiver exit code: %s - timed out: %s" %
            (receiver.execution.returncode, receiver.execution.timed_out))
        assert receiver.execution.completed_successfully()

        # Validating message integrity
        stdout_lines = receiver.execution.read_stdout(lines=True)
        assert len(stdout_lines) == self.RECV_COUNT

        # Reading each message body and comparing SHA1 sum
        for recv_msg in stdout_lines:
            # Failing if a blank line was received
            if not recv_msg:
                pytest.fail("Not expecting an empty message")
                continue
            try:
                recv_msg_dict = ast.literal_eval(recv_msg)
            except ValueError:
                pytest.fail("Invalid message body returned. Expecting a dict.")

            # Failing if invalid content received from external client
            if 'content' not in recv_msg_dict.keys():
                pytest.fail(
                    'Expecting a content element as part of message dict.')
                continue

            # Failing if message returned with empty body
            body = recv_msg_dict['content']
            if not body:
                pytest.fail("No message body available")
                continue

            # Validate integrity
            assert hashlib.sha1(
                body.encode('utf-8')).hexdigest() == self.MESSAGE_SHA1SUM

        # Delaying 5 secs to clean up everything
        time.sleep(5)

        # Validates if all messages have been received
        queue = self._get_queue(broker_instance, translates_to)
        logging.info(
            "Message count at queue %s - after receivers completed = %s" %
            (translates_to, queue.message_count))
        assert (initial_message_count - self.RECV_COUNT) == int(
            queue.message_count)