Beispiel #1
0
def test_load_schedule():
    base_path = "./test_dfgs"
    test_files = os.listdir(base_path)
    for file in test_files:
        if os.path.isfile(file) and file[:-4] == ".json":
            test_sched = Schedule()
            test_sched.load_dfg(f"{base_path}/{file}")
Beispiel #2
0
def test_data_insertion():
    Component.reset_ids()
    base_path = "./test_dfgs"
    file_path = f"{base_path}/linear_dfg.json"

    with open('config.json') as config_file:
        data = json.load(config_file)

    new_arch = TablaTemplate(data)
    test_sched = Schedule()
    test_sched.load_dfg(file_path)
    pe_id = new_arch.cat_component_map['pe'][0].component_id
    sched_edge = test_sched.get_schedule_edge(14)
    sched_edge.set_ready_cycle(0)
    sched_edge.add_source_component(pe_id)
    sched_edge.add_dest_component(pe_id)
    _ = new_arch.add_namespace_data(1, pe_id, sched_edge.namespace_name, sched_edge)
Beispiel #3
0
def test_initialized_namespaces():
    Component.reset_ids()
    base_path = "./test_dfgs"
    file_path = f"{base_path}/linear_dfg.json"

    with open('config.json') as config_file:
        data = json.load(config_file)

    new_arch = TablaTemplate(data)
    test_sched = Schedule()
    test_sched.load_dfg(file_path)
    test_sched.schedule_graph(new_arch)
    init_nd = new_arch.cat_component_map['pe'][1].get_namespace('ND')
    nd_storage = init_nd.get_cycle_storage()
    assert nd_storage[0].src_id == 11

    init_nw = new_arch.cat_component_map['pe'][1].get_namespace('NW')
    nw_storage = init_nw.get_cycle_storage()
    assert nw_storage[0].src_id == 18


    init_nd = new_arch.cat_component_map['pe'][2].get_namespace('ND')
    nd_storage = init_nd.get_cycle_storage()
    assert nd_storage[0].src_id == 13

    init_nw = new_arch.cat_component_map['pe'][2].get_namespace('NW')
    nw_storage = init_nw.get_cycle_storage()
    assert nw_storage[0].src_id == 20
Beispiel #4
0
def test_schedule_printing():
    Component.reset_ids()
    base_path = "./test_dfgs"
    file_path = f"{base_path}/logistic_dfg.json"

    with open('config.json') as config_file:
        data = json.load(config_file)

    new_arch = TablaTemplate(data)
    test_sched = Schedule()
    test_sched.load_dfg(file_path)
    test_sched.schedule_graph(new_arch)

    test_sched.print_schedule_graph("./test_outputs/schedule_logistic_dfg.json")
Beispiel #5
0
def test_src_child_edge():
    base_path = "./test_dfgs"
    file_path = f"{base_path}/linear_dfg.json"

    test_sched = Schedule()
    test_sched.load_dfg(file_path)
    sched_edge_id = test_sched.get_parent_edge_id(10, 3)
    sched_edge = test_sched.get_schedule_edge(sched_edge_id)
    assert sched_edge.is_src_edge
Beispiel #6
0
def test_reco():
    Component.reset_ids()
    base_path = "./test_dfgs"
    dfg_name = "reco_dfg.json"
    file_path = f"{base_path}/{dfg_name}"

    with open('config.json') as config_file:
        data = json.load(config_file)

    new_arch = TablaTemplate(data)
    test_sched = Schedule()
    test_sched.load_dfg(file_path)
    test_sched.schedule_graph(new_arch)
    test_sched.print_schedule_graph(f"./test_outputs/schedule_{dfg_name}")
    validate_graph(test_sched, new_arch)
    print(f"Graph width: {test_sched.get_max_width()}\nAverage pe utilization: {new_arch.pe_utilization()}")
    pprint.pprint(new_arch.pu_utilization())
Beispiel #7
0
def test_data_node():
    base_path = "./test_dfgs"
    file_path = f"{base_path}/linear_dfg.json"

    test_sched = Schedule()
    test_sched.load_dfg(file_path)
    sched_node = test_sched.get_schedule_node(2)
    non_data_node = test_sched.get_schedule_node(11)
    assert sched_node.is_data_node()
    assert not non_data_node.is_data_node()
Beispiel #8
0
def test_get_node():
    base_path = "./test_dfgs"
    file_path = f"{base_path}/linear_dfg.json"

    test_sched = Schedule()
    test_sched.load_dfg(file_path)
    sched_node = test_sched.get_schedule_node(0)
    assert sched_node.op_name == 'source'
    with pytest.raises(KeyError):
        test_invalid_node = test_sched.get_schedule_node(1000)
Beispiel #9
0
def test_graph_width():
    Component.reset_ids()
    base_path = "./test_dfgs"
    file_path = f"{base_path}/logistic_dfg.json"

    with open('config.json') as config_file:
        data = json.load(config_file)

    new_arch = TablaTemplate(data)
    test_sched = Schedule()
    test_sched.load_dfg(file_path)
    test_sched.schedule_graph(new_arch)

    assert test_sched.get_max_width() == 3
Beispiel #10
0
def test_node_depth():
    Component.reset_ids()
    base_path = "./test_dfgs"
    file_path = f"{base_path}/logistic_dfg.json"

    with open('config.json') as config_file:
        data = json.load(config_file)

    new_arch = TablaTemplate(data)
    test_sched = Schedule()
    test_sched.load_dfg(file_path)
    test_sched.schedule_graph(new_arch)
    sched_node = test_sched.get_schedule_node(15) # This is the sigmoid operation node

    assert sched_node.depth == 5
Beispiel #11
0
def test_create_schedule():
    Component.reset_ids()
    base_path = "./test_dfgs"
    file_path = f"{base_path}/linear_dfg.json"

    with open('config.json') as config_file:
        data = json.load(config_file)

    new_arch = TablaTemplate(data)
    test_sched = Schedule()
    test_sched.load_dfg(file_path)
    test_sched.schedule_graph(new_arch)
    validate_graph(test_sched, new_arch)
Beispiel #12
0
    def generate_pe_instructions(self,
                                 schedule: Schedule,
                                 arch: TablaTemplate,
                                 filename,
                                 debug="values"):
        if not schedule.is_dfg_loaded():
            raise RuntimeError(f"Schedule has not loaded a DFG yet.")
        pes = [
            pe for _, pe in arch.category_component_dict["pe"].items()
            if isinstance(pe, PE)
        ]
        config = self.compute_instr_widths(arch.config, debug=debug)
        address_width = int(np.ceil(np.log2(arch.max_instr)))

        pe_blocks = ["generate\n"]
        max_pe = -1
        for pe_id, pe in enumerate(pes):
            if pe.component_id <= max_pe:
                raise RuntimeError(
                    f"Not iterating over components in correct sequence")
            else:
                max_pe = pe.component_id
            pe_str = f"if(peId == {pe_id}) begin\n" \
                     f"\talways @(*) begin\n" \
                     f"\t\tcase(address)\n"
            pe_meta_nodes = pe.meta_nodes
            for index, meta_node in enumerate(pe_meta_nodes):
                bin_str = f"\t\t\t{address_width}'d{index} : rdata = {config['instr_len']}'b{np.binary_repr(meta_node.value, width=config['instr_len'])};\n"
                pe_str += bin_str
            pe_str += f"\t\t\tdefault : rdata = {config['instr_len']}'b{np.binary_repr(0, width=config['instr_len'])};\n"
            pe_str += f"\t\tendcase\n"
            pe_str += f"\tend\n"
            pe_str += f"end\n"
            pe_str += f"\n"
            pe_blocks.append(pe_str)
        pe_blocks.append("endgenerate")
        self.write_instr_file(pe_blocks, filename)
Beispiel #13
0
def main(args):
    with open(args.config_file) as config_file:
        config = json.load(config_file)

    # Instantiate an architecture for scheduling
    architecture = TablaTemplate(config)
    schedule = Schedule()
    schedule.load_dfg(args.dfg_file)
    schedule.schedule_graph(architecture)

    print(f'Generating instructions for Weight Data')
    # Get a list of weights (DFG nodes)
    weights = get_input_weight_nodes(schedule, architecture)
    weight_file = args.weight_file
    if weight_file is None:
        weight_file = "input_weights.txt"
        n_data_points = len(weights)
        #weight_data = [n for n in range(n_data_points)]
        weight_data = np.random.randint(0, 5, n_data_points)
        write_to_input_file(weight_data, weight_file)

    # Set data values to DFG nodes
    set_values_to_nodes(weight_file, weights)
    print_pe_assignments(weights, architecture)

    # Print weight to PE assignment for each AXI
    meminst_gen = MemoryInstructionGenerator(weights, Dtype.WEIGHT,
                                             config['num_axi'],
                                             config['num_lanes'],
                                             config['pes_per_lane'],
                                             architecture)
    meminst_gen.axi_controller.print_axi_contents()

    # Prepare directories to write outputs in
    mem_interface_artifacts_dir = args.output_directory
    if not os.path.exists(mem_interface_artifacts_dir):
        os.makedirs(mem_interface_artifacts_dir)
    axi_dir = os.path.join(mem_interface_artifacts_dir, 'axi')
    if not os.path.exists(axi_dir):
        os.makedirs(axi_dir)
    axi_weight_dir = os.path.join(axi_dir, 'weights')
    if not os.path.exists(axi_weight_dir):
        os.makedirs(axi_weight_dir)

    # Put placeholder values for each AXI and write to AXI files
    print('After filling in placeholders')
    for axi_index in range(config['num_axi']):
        weight_data = meminst_gen.axi_controller.gen_matrix_for_axi(axi_index)
        max_num_weights = meminst_gen.axi_controller.find_max_number_of_weights(
            weight_data)
        for lane in weight_data:
            if len(lane) < max_num_weights:
                num_placeholders = max_num_weights - len(lane)
                lane.extend([0 for _ in range(num_placeholders)])
        weight_data = np.array(weight_data)
        print(f'AXI {axi_index}')
        print(weight_data)
        print()
        meminst_gen.axi_controller.write_weights_from_axi(
            weight_data, os.path.join(axi_weight_dir, f'axi_{axi_index}.txt'))

    # TODO Write weights to corresponding PE files
    # write_pe_files(weight_file, weights, mem_interface_artifacts_dir, architecture, Dtype.WEIGHT)

    # Generate weight config file (weightInst.txt)
    wconf_gen = WeightConfigGenerator(architecture)
    wconf_gen.gen_weightconf(
        weights, os.path.join(mem_interface_artifacts_dir, 'weight_insts.txt'))

    print(f'Generating instructions for Input Data')
    # Get a list of input data (DFG nodes)
    input_data_nodes = get_input_data_nodes(schedule)
    input_data_file = args.input_data_file
    if input_data_file is None:
        input_data_file = "input_data.txt"
        n_data_points = len(input_data_nodes)
        #input_data = [n for n in range(n_data_points)]
        input_data = np.random.randint(0, 5, 784)
        input_data = np.append(input_data, [500])
        write_to_input_file(input_data, input_data_file)

    # Set data values to DFG nodes
    set_values_to_nodes(input_data_file, input_data_nodes)
    print_pe_assignments(input_data_nodes, architecture)

    meminst_gen = MemoryInstructionGenerator(input_data_nodes, Dtype.DATA,
                                             config['num_axi'],
                                             config['num_lanes'],
                                             config['pes_per_lane'],
                                             architecture)
    # Write AXI data to file
    axi_input_data_dir = os.path.join(axi_dir, 'input_data')
    if not os.path.exists(axi_input_data_dir):
        os.makedirs(axi_input_data_dir)
    meminst_gen.axi_controller.write_axi_data(axi_input_data_dir)

    # Generate memory instructions
    meminst_gen.gen_inst(
        os.path.join(mem_interface_artifacts_dir, 'meminst.json'))
    meminst_gen.gen_binary(
        os.path.join(mem_interface_artifacts_dir, 'meminst.txt'))

    # Write input data to corresponding PE files
    write_pe_files(input_data_file, input_data_nodes,
                   mem_interface_artifacts_dir, architecture, Dtype.DATA)

    print(f'Generating Verilog file for metadata')
    # Generate Verilog files for loading metadata
    meta_nodes = get_input_meta_nodes(schedule)
    meta_file = args.meta_file
    if meta_file is None:
        meta_file = 'meta.txt'
        meta_data = [1]
        write_to_input_file(meta_data, meta_file)

    # Set meta value to meta nodes
    for meta in meta_nodes:
        meta.value = meta_data[0]
    print_pe_assignments(meta_nodes, architecture)

    meta_gen = MetadataLoadGenerator(architecture)
    meta_gen.assign_meta_to_pe(meta_nodes)
    meta_loader = os.path.join(mem_interface_artifacts_dir, 'meta.v')
    meta_gen.generate_pe_instructions(schedule, architecture, meta_loader)