def test_initialized_namespaces(): Component.reset_ids() base_path = "./test_dfgs" file_path = f"{base_path}/linear_dfg.json" with open('config.json') as config_file: data = json.load(config_file) new_arch = TablaTemplate(data) test_sched = Schedule() test_sched.load_dfg(file_path) test_sched.schedule_graph(new_arch) init_nd = new_arch.cat_component_map['pe'][1].get_namespace('ND') nd_storage = init_nd.get_cycle_storage() assert nd_storage[0].src_id == 11 init_nw = new_arch.cat_component_map['pe'][1].get_namespace('NW') nw_storage = init_nw.get_cycle_storage() assert nw_storage[0].src_id == 18 init_nd = new_arch.cat_component_map['pe'][2].get_namespace('ND') nd_storage = init_nd.get_cycle_storage() assert nd_storage[0].src_id == 13 init_nw = new_arch.cat_component_map['pe'][2].get_namespace('NW') nw_storage = init_nw.get_cycle_storage() assert nw_storage[0].src_id == 20
def run_benchmark(package_name, gen_schedule, gen_mem_instr, is_training_algorithm): Component.reset_ids() dfg_name = f"{package_name}.json" package_path = f"{DFG_ROOT}/{package_name}" optimizations = { 'reorder_instr': True, 'unused_ni_opt': True, 'apply_reuse': True } file_path = f"{dfg_name}" cfg_path = f'config.json' if Path(f"{DFG_ROOT}/../../../compilation_output/{package_name}").exists(): shutil.rmtree(f"{DFG_ROOT}/../../../compilation_output/{package_name}") compile(Path(f"{DFG_ROOT}/{dfg_name}").resolve(), cfg_path, f"{package_name}_input_data.txt", f"{package_name}_input_weights.txt", "meta.txt", sort_alg="custom", gen_sched_file=gen_schedule, gen_mem_instr=gen_mem_instr, save_data=True, debug=False, optimizations=optimizations, show_ns_utilization=["NI", "NW", "ND"], is_training_algorithm=is_training_algorithm) #
def test_path_creation(): Component.reset_ids() base_path = "./test_dfgs" file_path = f"{base_path}/logistic_dfg.json" with open('config.json') as config_file: data = json.load(config_file) new_arch = TablaTemplate(data) test_sched = Schedule() test_sched.load_dfg(file_path) test_sched.schedule_graph(new_arch)
def test_node_depth(): Component.reset_ids() base_path = "./test_dfgs" file_path = f"{base_path}/logistic_dfg.json" with open('config.json') as config_file: data = json.load(config_file) new_arch = TablaTemplate(data) test_sched = Schedule() test_sched.load_dfg(file_path) test_sched.schedule_graph(new_arch) sched_node = test_sched.get_schedule_node(15) # This is the sigmoid operation node assert sched_node.depth == 5
def test_data_insertion(): Component.reset_ids() base_path = "./test_dfgs" file_path = f"{base_path}/linear_dfg.json" with open('config.json') as config_file: data = json.load(config_file) new_arch = TablaTemplate(data) test_sched = Schedule() test_sched.load_dfg(file_path) pe_id = new_arch.cat_component_map['pe'][0].component_id sched_edge = test_sched.get_schedule_edge(14) sched_edge.set_ready_cycle(0) sched_edge.add_source_component(pe_id) sched_edge.add_dest_component(pe_id) _ = new_arch.add_namespace_data(1, pe_id, sched_edge.namespace_name, sched_edge)
def test_reco(): Component.reset_ids() base_path = "./test_dfgs" dfg_name = "reco_dfg.json" file_path = f"{base_path}/{dfg_name}" with open('config.json') as config_file: data = json.load(config_file) new_arch = TablaTemplate(data) test_sched = Schedule() test_sched.load_dfg(file_path) test_sched.schedule_graph(new_arch) test_sched.print_schedule_graph(f"./test_outputs/schedule_{dfg_name}") validate_graph(test_sched, new_arch) print(f"Graph width: {test_sched.get_max_width()}\nAverage pe utilization: {new_arch.pe_utilization()}") pprint.pprint(new_arch.pu_utilization())