def test_initialized_namespaces(): Component.reset_ids() base_path = "./test_dfgs" file_path = f"{base_path}/linear_dfg.json" with open('config.json') as config_file: data = json.load(config_file) new_arch = TablaTemplate(data) test_sched = Schedule() test_sched.load_dfg(file_path) test_sched.schedule_graph(new_arch) init_nd = new_arch.cat_component_map['pe'][1].get_namespace('ND') nd_storage = init_nd.get_cycle_storage() assert nd_storage[0].src_id == 11 init_nw = new_arch.cat_component_map['pe'][1].get_namespace('NW') nw_storage = init_nw.get_cycle_storage() assert nw_storage[0].src_id == 18 init_nd = new_arch.cat_component_map['pe'][2].get_namespace('ND') nd_storage = init_nd.get_cycle_storage() assert nd_storage[0].src_id == 13 init_nw = new_arch.cat_component_map['pe'][2].get_namespace('NW') nw_storage = init_nw.get_cycle_storage() assert nw_storage[0].src_id == 20
def test_path_creation(): Component.reset_ids() base_path = "./test_dfgs" file_path = f"{base_path}/logistic_dfg.json" with open('config.json') as config_file: data = json.load(config_file) new_arch = TablaTemplate(data) test_sched = Schedule() test_sched.load_dfg(file_path) test_sched.schedule_graph(new_arch)
def test_node_depth(): Component.reset_ids() base_path = "./test_dfgs" file_path = f"{base_path}/logistic_dfg.json" with open('config.json') as config_file: data = json.load(config_file) new_arch = TablaTemplate(data) test_sched = Schedule() test_sched.load_dfg(file_path) test_sched.schedule_graph(new_arch) sched_node = test_sched.get_schedule_node(15) # This is the sigmoid operation node assert sched_node.depth == 5
def test_data_insertion(): Component.reset_ids() base_path = "./test_dfgs" file_path = f"{base_path}/linear_dfg.json" with open('config.json') as config_file: data = json.load(config_file) new_arch = TablaTemplate(data) test_sched = Schedule() test_sched.load_dfg(file_path) pe_id = new_arch.cat_component_map['pe'][0].component_id sched_edge = test_sched.get_schedule_edge(14) sched_edge.set_ready_cycle(0) sched_edge.add_source_component(pe_id) sched_edge.add_dest_component(pe_id) _ = new_arch.add_namespace_data(1, pe_id, sched_edge.namespace_name, sched_edge)
def test_reco(): Component.reset_ids() base_path = "./test_dfgs" dfg_name = "reco_dfg.json" file_path = f"{base_path}/{dfg_name}" with open('config.json') as config_file: data = json.load(config_file) new_arch = TablaTemplate(data) test_sched = Schedule() test_sched.load_dfg(file_path) test_sched.schedule_graph(new_arch) test_sched.print_schedule_graph(f"./test_outputs/schedule_{dfg_name}") validate_graph(test_sched, new_arch) print(f"Graph width: {test_sched.get_max_width()}\nAverage pe utilization: {new_arch.pe_utilization()}") pprint.pprint(new_arch.pu_utilization())
def main(args): with open(args.config_file) as config_file: config = json.load(config_file) # Instantiate an architecture for scheduling architecture = TablaTemplate(config) schedule = Schedule() schedule.load_dfg(args.dfg_file) schedule.schedule_graph(architecture) print(f'Generating instructions for Weight Data') # Get a list of weights (DFG nodes) weights = get_input_weight_nodes(schedule, architecture) weight_file = args.weight_file if weight_file is None: weight_file = "input_weights.txt" n_data_points = len(weights) #weight_data = [n for n in range(n_data_points)] weight_data = np.random.randint(0, 5, n_data_points) write_to_input_file(weight_data, weight_file) # Set data values to DFG nodes set_values_to_nodes(weight_file, weights) print_pe_assignments(weights, architecture) # Print weight to PE assignment for each AXI meminst_gen = MemoryInstructionGenerator(weights, Dtype.WEIGHT, config['num_axi'], config['num_lanes'], config['pes_per_lane'], architecture) meminst_gen.axi_controller.print_axi_contents() # Prepare directories to write outputs in mem_interface_artifacts_dir = args.output_directory if not os.path.exists(mem_interface_artifacts_dir): os.makedirs(mem_interface_artifacts_dir) axi_dir = os.path.join(mem_interface_artifacts_dir, 'axi') if not os.path.exists(axi_dir): os.makedirs(axi_dir) axi_weight_dir = os.path.join(axi_dir, 'weights') if not os.path.exists(axi_weight_dir): os.makedirs(axi_weight_dir) # Put placeholder values for each AXI and write to AXI files print('After filling in placeholders') for axi_index in range(config['num_axi']): weight_data = meminst_gen.axi_controller.gen_matrix_for_axi(axi_index) max_num_weights = meminst_gen.axi_controller.find_max_number_of_weights( weight_data) for lane in weight_data: if len(lane) < max_num_weights: num_placeholders = max_num_weights - len(lane) lane.extend([0 for _ in range(num_placeholders)]) weight_data = np.array(weight_data) print(f'AXI {axi_index}') print(weight_data) print() meminst_gen.axi_controller.write_weights_from_axi( weight_data, os.path.join(axi_weight_dir, f'axi_{axi_index}.txt')) # TODO Write weights to corresponding PE files # write_pe_files(weight_file, weights, mem_interface_artifacts_dir, architecture, Dtype.WEIGHT) # Generate weight config file (weightInst.txt) wconf_gen = WeightConfigGenerator(architecture) wconf_gen.gen_weightconf( weights, os.path.join(mem_interface_artifacts_dir, 'weight_insts.txt')) print(f'Generating instructions for Input Data') # Get a list of input data (DFG nodes) input_data_nodes = get_input_data_nodes(schedule) input_data_file = args.input_data_file if input_data_file is None: input_data_file = "input_data.txt" n_data_points = len(input_data_nodes) #input_data = [n for n in range(n_data_points)] input_data = np.random.randint(0, 5, 784) input_data = np.append(input_data, [500]) write_to_input_file(input_data, input_data_file) # Set data values to DFG nodes set_values_to_nodes(input_data_file, input_data_nodes) print_pe_assignments(input_data_nodes, architecture) meminst_gen = MemoryInstructionGenerator(input_data_nodes, Dtype.DATA, config['num_axi'], config['num_lanes'], config['pes_per_lane'], architecture) # Write AXI data to file axi_input_data_dir = os.path.join(axi_dir, 'input_data') if not os.path.exists(axi_input_data_dir): os.makedirs(axi_input_data_dir) meminst_gen.axi_controller.write_axi_data(axi_input_data_dir) # Generate memory instructions meminst_gen.gen_inst( os.path.join(mem_interface_artifacts_dir, 'meminst.json')) meminst_gen.gen_binary( os.path.join(mem_interface_artifacts_dir, 'meminst.txt')) # Write input data to corresponding PE files write_pe_files(input_data_file, input_data_nodes, mem_interface_artifacts_dir, architecture, Dtype.DATA) print(f'Generating Verilog file for metadata') # Generate Verilog files for loading metadata meta_nodes = get_input_meta_nodes(schedule) meta_file = args.meta_file if meta_file is None: meta_file = 'meta.txt' meta_data = [1] write_to_input_file(meta_data, meta_file) # Set meta value to meta nodes for meta in meta_nodes: meta.value = meta_data[0] print_pe_assignments(meta_nodes, architecture) meta_gen = MetadataLoadGenerator(architecture) meta_gen.assign_meta_to_pe(meta_nodes) meta_loader = os.path.join(mem_interface_artifacts_dir, 'meta.v') meta_gen.generate_pe_instructions(schedule, architecture, meta_loader)