def test_big(self): path = os.path.dirname(sys.modules[self.__module__].__file__) table_path = os.path.join(path, "table1.csv.gz") table = from_csv(table_path) compressor = RangeCompressor() compressed = compressor.compress_table(table) compare_tables(table, compressed)
def test_tables(self): tables = MulticastRoutingTables() path = os.path.dirname(sys.modules[self.__module__].__file__) table_path = os.path.join(path, "table2.csv.gz") table = from_csv(table_path) tables.add_routing_table(table) compressed = range_compressor(tables) c_table = compressed.get_routing_table_for_chip(0, 0) compare_tables(table, c_table)
def test_pair_big(self): class_file = sys.modules[self.__module__].__file__ path = os.path.dirname(os.path.abspath(class_file)) j_router = os.path.join(path, "many_to_one.json.gz") original_tables = from_json(j_router) compressed_tables = pair_compressor(original_tables) for original in original_tables: compressed = compressed_tables.get_routing_table_for_chip( original.x, original.y) compare_tables(original, compressed)
def test_big(self): set_config("Mapping", "router_table_compress_as_far_as_possible", True) class_file = sys.modules[self.__module__].__file__ path = os.path.dirname(os.path.abspath(class_file)) j_router = os.path.join(path, "malloc_hard_routing_tables.json.gz") original_tables = from_json(j_router) mundy_compressor = OrderedCoveringCompressor() pre_compressor = UnorderedPairCompressor() start = time.time() mundy_tables = mundy_compressor(original_tables, True) mundy_time = time.time() pre_tables = pre_compressor(original_tables, True) pre_time = time.time() both_tables = mundy_compressor(pre_tables, True) both_time = time.time() for original in original_tables: org_routes = set() for entry in original.multicast_routing_entries: org_routes.add(entry.spinnaker_route) mundy = mundy_tables.get_routing_table_for_chip( original.x, original.y) mundy_routes = set() for entry in mundy.multicast_routing_entries: mundy_routes.add(entry.spinnaker_route) compare_tables(original, mundy) pre = pre_tables.get_routing_table_for_chip(original.x, original.y) pre_routes = set() for entry in pre.multicast_routing_entries: pre_routes.add(entry.spinnaker_route) compare_tables(original, pre) both = both_tables.get_routing_table_for_chip( original.x, original.y) both_routes = set() for entry in both.multicast_routing_entries: both_routes.add(entry.spinnaker_route) compare_tables(original, both) print("org:", original.number_of_entries, len(org_routes), "mundy:", mundy.number_of_entries, len(mundy_routes), "pre:", pre.number_of_entries, len(pre_routes), "both:", both.number_of_entries, len(both_routes)) print("Mundy", mundy_time - start) print("Unordered", pre_time - mundy_time) print("Mundy after Unordered", both_time - pre_time)
if MUNDY and PRE: both_tables = ordered_covering_compressor(pre_tables) both_time = time.time() if PAIR: pair_tables = pair_compressor(original_tables) pair_time = time.time() for original in original_tables: org_routes = set() for entry in original.multicast_routing_entries: org_routes.add(entry.spinnaker_route) if MUNDY: mundy = mundy_tables.get_routing_table_for_chip(original.x, original.y) mundy_routes = set() for entry in mundy.multicast_routing_entries: mundy_routes.add(entry.spinnaker_route) compare_tables(original, mundy) if PRE: pre = pre_tables.get_routing_table_for_chip(original.x, original.y) pre_routes = set() for entry in pre.multicast_routing_entries: pre_routes.add(entry.spinnaker_route) compare_tables(original, pre) if MUNDY and PRE: both = both_tables.get_routing_table_for_chip(original.x, original.y) both_routes = set() for entry in both.multicast_routing_entries: both_routes.add(entry.spinnaker_route) compare_tables(original, both) if PAIR: pair = pair_tables.get_routing_table_for_chip(original.x, original.y) pair_routes = set()
def check_compression(self, compressed_tables): for original in self.original_tables: compressed = compressed_tables.get_routing_table_for_chip( original.x, original.y) assert compressed.number_of_entries < original.number_of_entries compare_tables(original, original)