Example #1
0
 def msg_factory() -> object:
     msg = None
     if np.random.random() > 0.5:
         msg = Message1(field1=UniqueRef().ref,
                        field2=np.random.randint(0, 5000))
     else:
         msg = Message2(field3=UniqueRef().ref,
                        field4=np.random.random() * 1000)
     return msg
Example #2
0
 def __init__(self, responder_srcsink: SrcSink, address_book: List[SrcSink],
              sender_workref: UniqueWorkRef):
     self._work_ref = UniqueWorkRef(responder_srcsink.name, UniqueRef().ref)
     self._sender_srcsink = responder_srcsink
     self._address_book = address_book
     self._sender_workref = sender_workref
     return
Example #3
0
    def kpubsub_test(msg_factory: Callable[[], Any], num_msg: int,
                     msg_map_url: str) -> Tuple[List, List]:
        """
        Use message factory to create num_msg messages and send them over Kafka. This verifies the message type
        is correctly set-up to be serialized.

        Utility method that can be called by other object test classes to verify serialisation.

        :param msg_factory: Callable that creates instances of the messages type under test
        :param test_func: Test function that compares sent message with rx'ed message and return true if all is well.
        :param num_msg: The number of messages to send.
        :param msg_map_url: The URL of the Message Map YAML.
        """
        kps = TestKPubSub._bootstrap_kpubsub(msg_map_utl=msg_map_url)
        topic = UniqueRef().ref
        sent = list()
        rxed = list()
        kps.subscribe(topic=topic,
                      listener=ConsumerListener('Consumer', messages=rxed))
        time.sleep(2)
        ptc = ProducerTestClient(kps=kps,
                                 topic=topic,
                                 num_msg=num_msg,
                                 messages=sent,
                                 msg_factory=msg_factory)
        time.sleep(num_msg * 0.35)
        del ptc
        del kps
        return sent, rxed
Example #4
0
    def test_kpubsub_single_topic_single_group(self):
        """
        Test random messages being sent over single topic being consumed by a single consumer in a single group
        """
        kps = self._bootstrap_kpubsub()
        topic = UniqueRef(
        ).ref  # Topic not seen by kafka before to keep test clean
        messages_sent = list()  # Keep a chronological list of messages sent
        messages_rx = list()  # Keep a chronological list of messages received

        # Single consumer - should see all messages once in the order sent.
        kps.subscribe(topic=topic,
                      listener=ConsumerListener("Consumer-1",
                                                messages=messages_rx))
        time.sleep(2)
        ptc = ProducerTestClient(kps=kps,
                                 topic=topic,
                                 num_msg=10,
                                 messages=messages_sent,
                                 msg_factory=TestKPubSub.msg_factory)
        time.sleep(8)  # Wait for messages to flow.
        # Expect rx = sent, same number, same order
        self.assertEqual(messages_rx, messages_sent)
        del ptc
        del kps
        return
Example #5
0
 def __init__(self,
              listener,
              topic: str,
              server: str,
              port: str,
              protoc: ProtoCopy,
              message_type_map: MessageTypeMap,
              group: str = UniqueRef().ref):
     """
     Boot strap a Kafka listener
     :param listener: The callable object that will be passed the message as key word arg 'msg'
     :param topic:  The Topic to subscribe to
     :param server: The Kafka Server
     :param port: The Kafka Server Port
     :param protoc: The Protoc instance to handle serialise/de-serialise
     :param message_type_map: The mapping between message types and protobuf object handlers
     :param group: The Kafka group to listen as - default is a UUID
     """
     self.consumer = KafkaConsumer(bootstrap_servers='{}:{}'.format(
         server, port),
                                   group_id=group)
     self.consumer.subscribe([topic])
     self._stop = True
     self._listener = listener
     self._group_id = group
     self._protoc = protoc
     self._message_type_map = message_type_map
     self._runner = self._new_daemon_timer().start()
     self._stop = False
     return
Example #6
0
 def topic(cls, prefix: str = None) -> str:
     """
     Generate a universally unique topic name
     :return: Universally unique topic name
     """
     if prefix is None:
         prefix = ''
         sep = ''
     else:
         sep = '.'
     return "{}{}{}".format(prefix, sep, UniqueRef().ref)
Example #7
0
 def __init__(self,
              intial_interval: float = None,
              max_interval: float = None,
              with_back_off: bool = False):
     self._ref = UniqueRef().ref
     self._lock = threading.RLock()
     self._count = 0
     self._with_back_off = with_back_off
     self._initial_interval = intial_interval
     self._max_interval = max_interval
     return
Example #8
0
    def test_kpubsub_single_topic_multi_group_multi_consumer(self):
        """
        Create a TestPublisher that pushes various messages types on a timer with a random delay between
        0.0 and 0.5 seconds, where all messages are pushed to the same topic

        """
        kps = self._bootstrap_kpubsub()
        topic = UniqueRef(
        ).ref  # Topic not seen by kafka before to keep test clean
        group = UniqueRef().ref
        messages_sent = list()  # Keep a chronological list of messages sent
        messages_rx1 = list(
        )  # Keep a chronological list of messages received - consumer 1
        messages_rx2 = list(
        )  # Keep a chronological list of messages received - consumer 2

        # Single consumer - should see all messages once in the order sent.
        kps.subscribe(topic=topic,
                      listener=ConsumerListener("Consumer-1",
                                                messages=messages_rx1),
                      group=group)
        kps.subscribe(topic=topic,
                      listener=ConsumerListener("Consumer-2",
                                                messages=messages_rx2),
                      group=group)
        time.sleep(2)
        ptc = ProducerTestClient(kps=kps,
                                 topic=topic,
                                 num_msg=10,
                                 messages=messages_sent,
                                 msg_factory=TestKPubSub.msg_factory)
        time.sleep(10)  # Wait for messages to flow.
        # Expect rx = sent, same number, same order
        # but only one consumer should have got messages
        if len(messages_rx1) == 0:
            self.assertEqual(messages_rx2, messages_sent)
        else:
            self.assertEqual(messages_rx1, messages_sent)
        del ptc
        del kps
        return
Example #9
0
    def test_class_over_kafka(self):
        # Start the Kafka service with the /AI-Intuition/docker/run-kafka.ps1
        # you will need a docker environment (or Docker Desktop on windows). The containers are on dockerhub
        # but they can also be built locally as the Dockerfile for them are on the same directory as the run
        # script - where there is also the build script for the containers.
        logging.info("Run large scale test Class over Kafka")
        pc = ProtoCopy()
        pc.register(native_object_type=Message1, proto_buf_type=PBMessage1)

        kafka_topic = UniqueRef().ref  # Unique topic to ensure queue is empty
        logging.info("Setting up to use topic: {}".format(kafka_topic))
        kafka_pub = KafkaTestProducer()
        kafka_con = KafkaTestConsumer([kafka_topic])

        # Push Random Messages
        num_msg = 1000
        states = [State.S1, State.S2, State.S3]
        state_choice = np.random.choice(3, num_msg)
        num_tasks = 1 + np.random.choice(50, num_msg)

        messages = dict()
        for i in range(num_msg):
            task_list = list()
            for j in range(num_tasks[i]):
                rint = int(np.random.randint(10000, size=1)[0])
                task_list.append(Task(task_name="Task-{}".format(rint), task_id=rint))

            msg = Message1(field="{}-{}".format(np.random.random() * 10000, Gibberish.more_gibber()),
                           state=states[state_choice[i]],
                           tasks=task_list)
            messages[msg.field] = msg
            kafka_pub.pub(topic=kafka_topic, msg=pc.serialize(msg))
            logging.info("Sent message : {}".format(msg.field[0:100]))

        rx_msg = ""
        while rx_msg is not None:
            rx_msg = kafka_con.sub()
            if rx_msg is not None:
                rx_deserialized = pc.deserialize(rx_msg, Message1)
                expected = messages[rx_deserialized.field]
                self.assertEqual(expected, rx_deserialized)
                logging.info("Rx'ed and passed message :{}".format(expected.field[0:100]))

        return
    def register_activity(self,
                          handler_for_activity: Callable[[float], float],
                          activity_interval: float,
                          activity_name: str = None) -> None:
        """
        Create a self re-setting timer that sends a message to the Handler such that the given callback is
        invoked at the given interval.
        Activity events are injected into the main handler function so that there is a single stream of events
        for the handler to manage.
        :param handler_for_activity: Callable handler for the timer event
        :param activity_interval: the timer interval
        :param activity_name: (optional) name for the activity.
        :return:
        """
        if not callable(handler_for_activity):
            raise ValueError("Handler for message must be callable")
        if not activity_interval > float(0):
            raise ValueError("timer must be greater than zero - {} was passed".format(activity_interval))
        if activity_name is None:
            activity_name = UniqueRef().ref

        if not self.__handler_registered_for_type(self.ActivityNotification):
            self.register_handler(self.do_activity, self.ActivityNotification)  # TODO look at annotation warning <-

        activity = self.ActivityNotification(name=activity_name,
                                             interval=activity_interval,
                                             func=self.call_handler,
                                             activity_handler=handler_for_activity)
        with self._activity_lock:
            if activity_name in self._activities:
                raise ValueError(
                    "Cannot register activity with same name as existing activity {}".format(activity_name))
            self._activities[activity_name] = activity
        activity.go()

        return
Example #11
0
 def __init__(self, sender_srcsink: SrcSink,
              required_capabilities: List[Capability]):
     self._work_ref = UniqueWorkRef(sender_srcsink.name, UniqueRef().ref)
     self._sender_srcsink = sender_srcsink
     self._required_capabilities = required_capabilities
     return
Example #12
0
 def _new_ref(prefix: str, suffix: str) -> str:
     """
     Generate a universally unique work reference if
     :return: Universally unique work reference
     """
     return "{}-{}-{}".format(str(prefix), UniqueRef().ref, str(suffix))