def run( duration: int, runtime_mode: str, runner_mode: str, start_messages: int, num_of_agents: int, ) -> List[Tuple[str, Union[int, float]]]: """Test multiagent message exchange.""" # pylint: disable=import-outside-toplevel,unused-import # import manually due to some lazy imports in decision_maker import aea.decision_maker.default # noqa: F401 local_node = LocalNode() local_node.start() agents = [] skills = {} handler_name = "httpingpong" for i in range(num_of_agents): agent_name = f"agent{i}" agent = make_agent(agent_name=agent_name, runtime_mode=runtime_mode) connection = OEFLocalConnection( local_node, configuration=ConnectionConfig( connection_id=OEFLocalConnection.connection_id, ), identity=agent.identity, ) agent.resources.add_connection(connection) skill = make_skill(agent, handlers={handler_name: HttpPingPongHandler}) agent.resources.add_skill(skill) agents.append(agent) skills[agent_name] = skill runner = AEARunner(agents, runner_mode) runner.start(threaded=True) for agent in agents: wait_for_condition(lambda: agent.is_running, timeout=5) wait_for_condition(lambda: runner.is_running, timeout=5) time.sleep(1) for agent1, agent2 in itertools.permutations(agents, 2): for _ in range(int(start_messages)): cast( HttpPingPongHandler, skills[agent1.identity.address].handlers[handler_name], ).make_request(agent2.identity.address) time.sleep(duration) mem_usage = get_mem_usage_in_mb() local_node.stop() runner.stop() total_messages = sum([ cast(HttpPingPongHandler, skill.handlers[handler_name]).count for skill in skills.values() ]) rate = total_messages / duration rtt_total_time = sum([ cast(HttpPingPongHandler, skill.handlers[handler_name]).rtt_total_time for skill in skills.values() ]) rtt_count = sum([ cast(HttpPingPongHandler, skill.handlers[handler_name]).rtt_count for skill in skills.values() ]) if rtt_count == 0: rtt_count = -1 latency_total_time = sum([ cast(HttpPingPongHandler, skill.handlers[handler_name]).latency_total_time for skill in skills.values() ]) latency_count = sum([ cast(HttpPingPongHandler, skill.handlers[handler_name]).latency_count for skill in skills.values() ]) if latency_count == 0: latency_count = -1 return [ ("Total Messages handled", total_messages), ("Messages rate(envelopes/second)", rate), ("Mem usage(Mb)", mem_usage), ("RTT (ms)", rtt_total_time / rtt_count), ("Latency (ms)", latency_total_time / latency_count), ]
class TestSimpleSearchResult: """Test that a simple search result return the expected result.""" def setup(self): """Set up the test.""" self.node = LocalNode() self.node.start() self.address_1 = "address" self.multiplexer = Multiplexer( [_make_local_connection( self.address_1, self.node, )]) self.multiplexer.connect() # register a service. self.dialogues = OefSearchDialogues(self.address_1) self.data_model = DataModel( "foobar", attributes=[ Attribute("foo", int, True), Attribute("bar", str, True) ], ) service_description = Description({ "foo": 1, "bar": "baz" }, data_model=self.data_model) register_service_request, self.sending_dialogue = self.dialogues.create( counterparty=str(OEFLocalConnection.connection_id), performative=OefSearchMessage.Performative.REGISTER_SERVICE, service_description=service_description, ) envelope = Envelope( to=register_service_request.to, sender=register_service_request.sender, message=register_service_request, ) self.multiplexer.put(envelope) @pytest.mark.flaky(reruns=MAX_FLAKY_RERUNS ) # TODO: check reasons!. quite unstable test def test_not_empty_search_result(self): """Test that the search result contains one entry after a successful registration.""" # build and send the request search_services_request, sending_dialogue = self.dialogues.create( counterparty=str(OEFLocalConnection.connection_id), performative=OefSearchMessage.Performative.SEARCH_SERVICES, query=Query(constraints=[], model=self.data_model), ) envelope = Envelope( to=search_services_request.to, sender=search_services_request.sender, message=search_services_request, ) self.multiplexer.put(envelope) # check the result response_envelope = self.multiplexer.get(block=True, timeout=2.0) assert (response_envelope.protocol_specification_id == OefSearchMessage.protocol_specification_id) search_result = cast(OefSearchMessage, response_envelope.message) response_dialogue = self.dialogues.update(search_result) assert response_dialogue == sending_dialogue assert search_result.performative == OefSearchMessage.Performative.SEARCH_RESULT assert search_result.agents == (self.address_1, ) def teardown(self): """Teardown the test.""" self.multiplexer.disconnect() self.node.stop()
def run(duration, runtime_mode, runner_mode, start_messages, num_of_agents): """Test multiagent message exchange.""" # pylint: disable=import-outside-toplevel,unused-import # import manually due to some lazy imports in decision_maker import aea.decision_maker.default # noqa: F401 local_node = LocalNode() local_node.start() agents = [] skills = [] for i in range(num_of_agents): agent = make_agent(agent_name=f"agent{i}", runtime_mode=runtime_mode) connection = OEFLocalConnection( local_node, configuration=ConnectionConfig( connection_id=OEFLocalConnection.connection_id, ), identity=agent.identity, ) agent.resources.add_connection(connection) skill = make_skill(agent, handlers={"test": TestHandler}) agent.resources.add_skill(skill) agents.append(agent) skills.append(skill) runner = AEARunner(agents, runner_mode) runner.start(threaded=True) for agent in agents: wait_for_condition(lambda: agent.is_running, timeout=5) wait_for_condition(lambda: runner.is_running, timeout=5) time.sleep(1) for agent1, agent2 in itertools.permutations(agents, 2): env = make_envelope(agent1.identity.address, agent2.identity.address) for _ in range(int(start_messages)): agent1.outbox.put(env) time.sleep(duration) mem_usage = get_mem_usage_in_mb() local_node.stop() runner.stop() total_messages = sum([skill.handlers["test"].count for skill in skills]) rate = total_messages / duration rtt_total_time = sum( [skill.handlers["test"].rtt_total_time for skill in skills]) rtt_count = sum([skill.handlers["test"].rtt_count for skill in skills]) if rtt_count == 0: rtt_count = -1 latency_total_time = sum( [skill.handlers["test"].latency_total_time for skill in skills]) latency_count = sum( [skill.handlers["test"].latency_count for skill in skills]) if latency_count == 0: latency_count = -1 return [ ("Total Messages handled", total_messages), ("Messages rate(envelopes/second)", rate), ("Mem usage(Mb)", mem_usage), ("RTT (ms)", rtt_total_time / rtt_count), ("Latency (ms)", latency_total_time / latency_count), ]