def __build_report( network: Network, results: TestResult, main_graph_path: Path, query_graph_paths: Dict[str, Path], ) -> Dict[str, Any]: report: Dict[str, Any] = { "key_ids": [node_id.key_id for node_id in network.ids()], "graph": str(main_graph_path), "success_percentage": results.success_percentage, "average_num_requests": results.average_num_requests, "average_search_times_sec": results.average_search_times_sec, "searches": [], } for search_result in results.search_results: report["searches"].append({ "from_id": search_result.from_id.key_id, "to_id": search_result.to_id.key_id, "success": search_result.success, "message_id": search_result.message_id, "num_requests": search_result.num_requests, "search_times_sec": search_result.search_times_sec, "graph": str(query_graph_paths[search_result.message_id]), }) return report
def ensure_all_alive(network: Network, backend: Backend) -> None: log.debug("Ensuring all nodes in the network are alive") ids = network.ids() for attempt in range(NUM_ATTEMPTS): commands = [ CliCommand(node_id, ["list-neighbours"]) for node_id in ids ] results = backend.run_commands(commands) ids = [ result.command.node_id for result in results if not result.successful() ] if not ids: log.debug("All nodes ensured alive") return log.debug( "At attempt %d/%d, still %d nodes not responding. Sleeping %d seconds.", attempt + 1, NUM_ATTEMPTS, len(ids), ATTEMPT_WAIT_SECS, ) time.sleep(ATTEMPT_WAIT_SECS) raise AssertionError( f"{len(ids)} nodes still didn't reply after all attempts")
def main(): parser = argparse.ArgumentParser("simulation") parser.add_argument( "-c", "--network_config", type=str, required=True, help="The file to read the simulation configuration from", ) parser.add_argument( "-o", "--output_directory", type=str, default="simulation_output", help="Where to output simulation results", ) parser.add_argument( "--comparison", type=str, choices=["angle"], default=None, help="Run a comparison of the performance on a variable", ) parser.add_argument( "--benchmark", type=str, choices=["reliability", "resilience", "performance", "scalability"], default=None, help="Run a benchmark to see how well a configuration performs under " "varying conditions", ) args = parser.parse_args() network_config = Path(args.network_config) output_directory = Path(args.output_directory) key_creator = KeyCreator() with open(str(network_config), "r") as file: network = Network.from_config(yaml.load(file), key_creator) if args.benchmark is not None: if args.benchmark == "reliability": benchmark = benchmarks.ReliabilityBenchmark(output_directory) elif args.benchmark == "resilience": benchmark = benchmarks.ResilienceBenchmark(output_directory) elif args.benchmark == "performance": benchmark = benchmarks.PerformanceBenchmark(output_directory) elif args.benchmark == "scalability": benchmark = benchmarks.ScalabilityBenchmark(output_directory) else: raise ValueError(f"Unrecognized benchmark type: {args.benchmark}") log.info(f"Running {args.benchmark} benchmark") benchmark.create(network) return log.info("Running configuration") simulate(network, output_directory / "cli" / utils.get_formatted_time())
def get_network(self, network: Network, quality_rating: float) -> Network: quality = ConnectionQuality( loss_perc=quality_rating * 0.15, delay_millis=quality_rating * 250, rate_kbps=(1 - quality_rating) * 1000, ) quality_network = network._replace(connection_quality=quality) # Set timeout high so that the daemon does not exit searches that take too long. return quality_network.map_nodes( lambda n: n.replace(daemon_args={ **n.daemon_args, "search_timeout_sec": 10000 }))
def connect_network(network: Network, backend: Backend) -> None: log.info("Connecting network together") ensure_all_alive(network, backend) for i in range(network.num_connects): log.info(f"Performing connection {i + 1}/{network.num_connects}") ids = network.ids() if network.connect_type == ConnectType.CYCLICAL: connections = list(zip(ids[:-1], ids[1:])) elif network.connect_type == ConnectType.ROOTED: [root_id] = network.random_ids(1) connections = [(i, root_id) for i in ids] else: raise AssertionError() commands = [ CliCommand(a, ["connect", "--key", b.key_id, "--address", backend.get_ip_address(b),],) for a, b in connections ] results = backend.run_commands(commands) num_failed = sum(not result.successful() for result in results) log.info("Out of %d connections, %d failed", len(connections), num_failed)
def test_all_searches(self): network = Network.from_config( { "num_search_tests": 10, "ipv6": True, "groups": [{ "size": 10 }], }, KeyCreator(), ) path = Path( "simulation_output/tests/ipv6") / f"{utils.get_formatted_time()}" result = simulator.simulate(network, path) self.assertEqual(result.success_percentage, 1)
def get_network(self, network: Network, disconnect_probability: float) -> Network: return network.map_nodes( lambda n: n.replace(disconnect_before_tests=random.random() < disconnect_probability) )
def get_network(self, network: Network, malicious_probability: float) -> Network: return network.map_nodes(lambda n: _to_malicious_node(n) if random. random() < malicious_probability else n)
def get_network(self, network: Network, network_size: int) -> Network: assert len(network.nodes) >= max( NETWORK_SIZES ), "Configured network must be larger than max network size test." nodes = random.sample(network.nodes, network_size) return network.replace(nodes=nodes)