Пример #1
0
 def test_in_roi_overlay(self):
     region_dict = {}
     region_dict['pr1'] = (10, 58, 0, 51)
     region_dict['pr2'] = (10, 58, 52, 103)
     overlay = Overlay(region_dict)
     self.assertFalse(overlay.tile_in_roi(GridLoc(18, 50)))
     self.assertFalse(overlay.tile_in_roi(GridLoc(18, 84)))
     self.assertTrue(overlay.tile_in_roi(GridLoc(8, 50)))
     self.assertTrue(overlay.tile_in_roi(GridLoc(18, 112)))
     self.assertTrue(overlay.tile_in_roi(GridLoc(80, 40)))
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('--db_root',
                        required=True,
                        help='Project X-Ray Database')
    parser.add_argument('--part', required=True, help='FPGA part')
    parser.add_argument('--read_rr_graph',
                        required=True,
                        help='Input rr_graph file')
    parser.add_argument('--write_rr_graph',
                        required=True,
                        help='Output rr_graph file')
    parser.add_argument('--write_rr_node_map',
                        required=True,
                        help='Output map of graph_node_pkey to rr inode file')
    parser.add_argument('--connection_database',
                        help='Database of fabric connectivity',
                        required=True)
    parser.add_argument(
        '--synth_tiles',
        help=
        'If using an ROI, synthetic tile defintion from prjxray-arch-import')
    parser.add_argument('--overlay',
                        action='store_true',
                        required=False,
                        help='Use synth tiles for Overlay instead of ROI')
    parser.add_argument(
        '--graph_limit',
        help='Limit grid to specified dimensions in x_min,y_min,x_max,y_max',
    )
    parser.add_argument(
        '--vpr_capnp_schema_dir',
        help='Directory container VPR schema files',
    )

    print('{} Starting routing import'.format(now()))
    args = parser.parse_args()

    db = prjxray.db.Database(args.db_root, args.part)
    populate_hclk_cmt_tiles(db)

    synth_tiles = None
    if args.overlay:
        assert args.synth_tiles
        use_roi = True
        with open(args.synth_tiles) as f:
            synth_tiles = json.load(f)

        region_dict = dict()
        for r in synth_tiles['info']:
            bounds = (r['GRID_X_MIN'], r['GRID_X_MAX'], r['GRID_Y_MIN'],
                      r['GRID_Y_MAX'])
            region_dict[r['name']] = bounds

        roi = Overlay(region_dict=region_dict)

        print('{} generating routing graph for Overlay.'.format(now()))
    elif args.synth_tiles:
        use_roi = True
        with open(args.synth_tiles) as f:
            synth_tiles = json.load(f)

        roi = Roi(
            db=db,
            x1=synth_tiles['info']['GRID_X_MIN'],
            y1=synth_tiles['info']['GRID_Y_MIN'],
            x2=synth_tiles['info']['GRID_X_MAX'],
            y2=synth_tiles['info']['GRID_Y_MAX'],
        )

        print('{} generating routing graph for ROI.'.format(now()))
    elif args.graph_limit:
        use_roi = True
        x_min, y_min, x_max, y_max = map(int, args.graph_limit.split(','))
        roi = Roi(
            db=db,
            x1=x_min,
            y1=y_min,
            x2=x_max,
            y2=y_max,
        )
    else:
        use_roi = False
        roi = None
        synth_tiles = None

    capnp_graph = capnp_graph2.Graph(
        rr_graph_schema_fname=os.path.join(args.vpr_capnp_schema_dir,
                                           'rr_graph_uxsdcxx.capnp'),
        input_file_name=args.read_rr_graph,
        progressbar=progressbar_utils.progressbar,
        output_file_name=args.write_rr_graph,
    )

    graph = capnp_graph.graph

    if synth_tiles is None:
        synth_tiles = find_constant_network(graph)

    if args.overlay:
        synth_tiles_const = find_constant_network(graph)
        synth_tiles['tiles'].update(synth_tiles_const['tiles'])

    with sqlite3.connect("file:{}?mode=ro".format(args.connection_database),
                         uri=True) as conn:

        populate_bufg_rebuf_map(conn)

        cur = conn.cursor()
        for name, internal_capacitance, drive_resistance, intrinsic_delay, penalty_cost, \
                switch_type in cur.execute("""
SELECT
    name,
    internal_capacitance,
    drive_resistance,
    intrinsic_delay,
    penalty_cost,
    switch_type
FROM
    switch;"""):
            # Add back missing switchs, which were unused in arch xml, and so
            # were not  emitted in rrgraph XML.
            #
            # TODO: This can be removed once
            # https://github.com/verilog-to-routing/vtr-verilog-to-routing/issues/354
            # is fixed.

            try:
                graph.get_switch_id(name)
                continue
            except KeyError:
                capnp_graph.add_switch(
                    graph2.Switch(
                        id=None,
                        name=name,
                        type=graph2.SwitchType[switch_type.upper()],
                        timing=graph2.SwitchTiming(
                            r=drive_resistance,
                            c_in=0.0,
                            c_out=0.0,
                            c_internal=internal_capacitance,
                            t_del=intrinsic_delay,
                            p_cost=penalty_cost,
                        ),
                        sizing=graph2.SwitchSizing(
                            mux_trans_size=0,
                            buf_size=0,
                        ),
                    ))

        # Mapping of graph_node.pkey to rr node id.
        node_mapping = {}

        print('{} Creating connection box list'.format(now()))
        connection_box_map = create_connection_boxes(conn, graph)

        # Match site pins rr nodes with graph_node's in the connection_database.
        print('{} Importing graph nodes'.format(now()))
        import_graph_nodes(conn, graph, node_mapping, connection_box_map)

        # Walk all track graph nodes and add them.
        print('{} Creating tracks'.format(now()))
        segment_id = graph.get_segment_id_from_name('dummy')
        create_track_rr_graph(conn, graph, node_mapping, use_roi, roi,
                              synth_tiles, segment_id)

        # Set of (src, sink, switch_id) tuples that pip edges have been sent to
        # VPR.  VPR cannot handle duplicate paths with the same switch id.
        print('{} Adding synthetic edges'.format(now()))
        add_synthetic_edges(conn, graph, node_mapping, grid, synth_tiles,
                            args.overlay)

        print('{} Creating channels.'.format(now()))
        channels_obj = create_channels(conn)

        node_remap = create_node_remap(capnp_graph.graph.nodes, channels_obj)

        x_dim, y_dim = phy_grid_dims(conn)
        connection_box_obj = graph.create_connection_box_object(x_dim=x_dim,
                                                                y_dim=y_dim)

        num_edges = get_number_graph_edges(conn, graph, node_mapping)
        print('{} Serializing to disk.'.format(now()))

        capnp_graph.serialize_to_capnp(
            channels_obj=channels_obj,
            connection_box_obj=connection_box_obj,
            num_nodes=len(capnp_graph.graph.nodes),
            nodes_obj=yield_nodes(capnp_graph.graph.nodes),
            num_edges=num_edges,
            edges_obj=import_graph_edges(conn, graph, node_mapping),
            node_remap=node_remap,
        )

        for k in node_mapping:
            node_mapping[k] = node_remap(node_mapping[k])

        print('{} Writing node map.'.format(now()))
        with open(args.write_rr_node_map, 'wb') as f:
            pickle.dump(node_mapping, f)
        print('{} Done writing node map.'.format(now()))
Пример #3
0
def main():
    parser = argparse.ArgumentParser(description="Generate arch.xml")
    parser.add_argument(
        '--db_root', required=True, help="Project X-Ray database to use."
    )
    parser.add_argument('--part', required=True, help="FPGA part")
    parser.add_argument(
        '--output-arch',
        nargs='?',
        type=argparse.FileType('w'),
        help="""File to output arch."""
    )
    parser.add_argument(
        '--tile-types', required=True, help="Semi-colon seperated tile types."
    )
    parser.add_argument(
        '--pb_types',
        required=True,
        help="Semi-colon seperated pb_types types."
    )
    parser.add_argument(
        '--pin_assignments', required=True, type=argparse.FileType('r')
    )
    parser.add_argument('--use_roi', required=False)
    parser.add_argument('--use_overlay', required=False)
    parser.add_argument('--device', required=True)
    parser.add_argument('--synth_tiles', required=False)
    parser.add_argument('--connection_database', required=True)
    parser.add_argument(
        '--graph_limit',
        help='Limit grid to specified dimensions in x_min,y_min,x_max,y_max',
    )

    args = parser.parse_args()

    tile_types = args.tile_types.split(',')
    pb_types = args.pb_types.split(',')

    model_xml_spec = "../../tiles/{0}/{0}.model.xml"
    pbtype_xml_spec = "../../tiles/{0}/{0}.pb_type.xml"
    tile_xml_spec = "../../tiles/{0}/{0}.tile.xml"

    xi_url = "http://www.w3.org/2001/XInclude"
    ET.register_namespace('xi', xi_url)
    xi_include = "{%s}include" % xi_url

    arch_xml = ET.Element(
        'architecture',
        {},
        nsmap={'xi': xi_url},
    )

    model_xml = ET.SubElement(arch_xml, 'models')
    for pb_type in pb_types:
        ET.SubElement(
            model_xml, xi_include, {
                'href': model_xml_spec.format(pb_type.lower()),
                'xpointer': "xpointer(models/child::node())",
            }
        )

    tiles_xml = ET.SubElement(arch_xml, 'tiles')
    tile_capacity = {}
    for tile_type in tile_types:
        uri = tile_xml_spec.format(tile_type.lower())
        ET.SubElement(tiles_xml, xi_include, {
            'href': uri,
        })

        with open(uri) as f:
            tile_xml = ET.parse(f, ET.XMLParser())

            tile_root = tile_xml.getroot()
            assert tile_root.tag == 'tile'
            tile_capacity[tile_type] = 0

            for sub_tile in tile_root.iter('sub_tile'):
                if 'capacity' in sub_tile.attrib:
                    tile_capacity[tile_type] += int(
                        sub_tile.attrib['capacity']
                    )
                else:
                    tile_capacity[tile_type] += 1

    complexblocklist_xml = ET.SubElement(arch_xml, 'complexblocklist')
    for pb_type in pb_types:
        ET.SubElement(
            complexblocklist_xml, xi_include, {
                'href': pbtype_xml_spec.format(pb_type.lower()),
            }
        )

    layout_xml = ET.SubElement(arch_xml, 'layout')
    db = prjxray.db.Database(args.db_root, args.part)
    g = db.grid()

    synth_tiles = {}
    synth_tiles['tiles'] = {}
    synth_loc_map = {}
    synth_tile_map = {}
    roi = None
    if args.use_roi:
        with open(args.use_roi) as f:
            j = json.load(f)

        with open(args.synth_tiles) as f:
            synth_tiles = json.load(f)

        roi = Roi(
            db=db,
            x1=j['info']['GRID_X_MIN'],
            y1=j['info']['GRID_Y_MIN'],
            x2=j['info']['GRID_X_MAX'],
            y2=j['info']['GRID_Y_MAX'],
        )

        for _, tile_info in synth_tiles['tiles'].items():
            if tile_info['pins'][0]['port_type'] in ['GND', 'VCC']:
                continue

            assert tuple(tile_info['loc']) not in synth_loc_map
            tile_name = tile_info['tile_name']
            num_input = len(
                list(
                    filter(
                        lambda t: t['port_type'] == 'output', tile_info['pins']
                    )
                )
            )
            num_output = len(
                list(
                    filter(
                        lambda t: t['port_type'] == 'input', tile_info['pins']
                    )
                )
            )

            create_synth_io_tile(
                complexblocklist_xml, tiles_xml, tile_name, num_input,
                num_output
            )

            synth_loc_map[tuple(tile_info['loc'])] = tile_name

        create_synth_pb_types(model_xml, complexblocklist_xml)

        synth_tile_map = add_constant_synthetic_tiles(
            model_xml, complexblocklist_xml, tiles_xml
        )

        for _, tile_info in synth_tiles['tiles'].items():
            if tile_info['pins'][0]['port_type'] not in ['GND', 'VCC']:
                continue

            assert tuple(tile_info['loc']) not in synth_loc_map

            vpr_tile_type = synth_tile_map[tile_info['pins'][0]['port_type']]
            synth_loc_map[tuple(tile_info['loc'])] = vpr_tile_type

    elif args.graph_limit:
        x_min, y_min, x_max, y_max = map(int, args.graph_limit.split(','))
        roi = Roi(
            db=db,
            x1=x_min,
            y1=y_min,
            x2=x_max,
            y2=y_max,
        )
    elif args.use_overlay:
        with open(args.use_overlay) as f:
            j = json.load(f)

        with open(args.synth_tiles) as f:
            synth_tiles = json.load(f)

        region_dict = dict()
        for r in synth_tiles['info']:
            bounds = (
                r['GRID_X_MIN'], r['GRID_X_MAX'], r['GRID_Y_MIN'],
                r['GRID_Y_MAX']
            )
            region_dict[r['name']] = bounds

        roi = Overlay(region_dict=region_dict)

        for _, tile_info in synth_tiles['tiles'].items():
            if tile_info['pins'][0]['port_type'] in ['GND', 'VCC']:
                continue

            assert tuple(tile_info['loc']) not in synth_loc_map
            tile_name = tile_info['tile_name']
            num_input = len(
                list(
                    filter(
                        lambda t: t['port_type'] == 'output', tile_info['pins']
                    )
                )
            )
            num_output = len(
                list(
                    filter(
                        lambda t: t['port_type'] == 'input', tile_info['pins']
                    )
                )
            )

            create_synth_io_tile(
                complexblocklist_xml, tiles_xml, tile_name, num_input,
                num_output
            )

            synth_loc_map[tuple(tile_info['loc'])] = tile_name

        create_synth_pb_types(model_xml, complexblocklist_xml, True)

    with DatabaseCache(args.connection_database, read_only=True) as conn:
        c = conn.cursor()

        if 'GND' not in synth_tile_map:
            synth_tile_map, synth_loc_map_const = insert_constant_tiles(
                conn, model_xml, complexblocklist_xml, tiles_xml
            )

            synth_loc_map.update(synth_loc_map_const)

        # Find the grid extent.
        y_max = 0
        x_max = 0
        for grid_x, grid_y in c.execute("SELECT grid_x, grid_y FROM tile"):
            x_max = max(grid_x + 2, x_max)
            y_max = max(grid_y + 2, y_max)

        name = '{}-test'.format(args.device)
        fixed_layout_xml = ET.SubElement(
            layout_xml, 'fixed_layout', {
                'name': name,
                'height': str(y_max),
                'width': str(x_max),
            }
        )

        for vpr_tile_type, grid_x, grid_y, metadata_function in get_tiles(
                conn=conn,
                g=g,
                roi=roi,
                synth_loc_map=synth_loc_map,
                synth_tile_map=synth_tile_map,
                tile_types=tile_types,
                tile_capacity=tile_capacity,
        ):
            single_xml = ET.SubElement(
                fixed_layout_xml, 'single', {
                    'priority': '1',
                    'type': vpr_tile_type,
                    'x': str(grid_x),
                    'y': str(grid_y),
                }
            )
            metadata_function(single_xml)

        switchlist_xml = ET.SubElement(arch_xml, 'switchlist')

        for name, internal_capacitance, drive_resistance, intrinsic_delay, \
                switch_type in c.execute("""
SELECT
    name,
    internal_capacitance,
    drive_resistance,
    intrinsic_delay,
    switch_type
FROM
    switch
WHERE
    name != "__vpr_delayless_switch__";"""):

            attrib = {
                'type': switch_type,
                'name': name,
                "R": str(drive_resistance),
                "Cin": str(0),
                "Cout": str(0),
                "Tdel": str(intrinsic_delay),
            }

            if internal_capacitance != 0:
                attrib["Cinternal"] = str(internal_capacitance)

            if False:
                attrib["mux_trans_size"] = str(0)
                attrib["buf_size"] = str(0)

            ET.SubElement(switchlist_xml, 'switch', attrib)

        segmentlist_xml = ET.SubElement(arch_xml, 'segmentlist')

        # VPR requires a segment, so add one.
        dummy_xml = ET.SubElement(
            segmentlist_xml, 'segment', {
                'name': 'dummy',
                'length': '2',
                'freq': '1.0',
                'type': 'bidir',
                'Rmetal': '0',
                'Cmetal': '0',
            }
        )
        ET.SubElement(dummy_xml, 'wire_switch', {
            'name': 'buffer',
        })
        ET.SubElement(dummy_xml, 'opin_switch', {
            'name': 'buffer',
        })
        ET.SubElement(dummy_xml, 'sb', {
            'type': 'pattern',
        }).text = ' '.join('1' for _ in range(3))
        ET.SubElement(dummy_xml, 'cb', {
            'type': 'pattern',
        }).text = ' '.join('1' for _ in range(2))

        for (name, length) in c.execute("SELECT name, length FROM segment"):
            if length is None:
                length = 1

            segment_xml = ET.SubElement(
                segmentlist_xml, 'segment', {
                    'name': name,
                    'length': str(length),
                    'freq': '1.0',
                    'type': 'bidir',
                    'Rmetal': '0',
                    'Cmetal': '0',
                }
            )
            ET.SubElement(segment_xml, 'wire_switch', {
                'name': 'buffer',
            })
            ET.SubElement(segment_xml, 'opin_switch', {
                'name': 'buffer',
            })
            ET.SubElement(segment_xml, 'sb', {
                'type': 'pattern',
            }).text = ' '.join('1' for _ in range(length + 1))
            ET.SubElement(segment_xml, 'cb', {
                'type': 'pattern',
            }).text = ' '.join('1' for _ in range(length))

    ET.SubElement(
        switchlist_xml,
        'switch',
        {
            'type': 'mux',
            'name': 'buffer',
            "R": "551",
            "Cin": ".77e-15",
            "Cout": "4e-15",
            # TODO: This value should be the "typical" pip switch delay from
            # This value is the dominate term in the inter-cluster delay
            # estimate.
            "Tdel": "0.178e-9",
            "mux_trans_size": "2.630740",
            "buf_size": "27.645901"
        }
    )

    device_xml = ET.SubElement(arch_xml, 'device')

    ET.SubElement(
        device_xml, 'sizing', {
            "R_minW_nmos": "6065.520020",
            "R_minW_pmos": "18138.500000",
        }
    )
    ET.SubElement(device_xml, 'area', {
        "grid_logic_tile_area": "14813.392",
    })
    ET.SubElement(
        device_xml, 'connection_block', {
            "input_switch_name": "buffer",
        }
    )
    ET.SubElement(device_xml, 'switch_block', {
        "type": "wilton",
        "fs": "3",
    })
    chan_width_distr_xml = ET.SubElement(device_xml, 'chan_width_distr')

    ET.SubElement(
        chan_width_distr_xml, 'x', {
            'distr': 'uniform',
            'peak': '1.0',
        }
    )
    ET.SubElement(
        chan_width_distr_xml, 'y', {
            'distr': 'uniform',
            'peak': '1.0',
        }
    )

    directlist_xml = ET.SubElement(arch_xml, 'directlist')

    pin_assignments = json.load(args.pin_assignments)

    # Choose smallest distance for block to block connections with multiple
    # direct_connections.  VPR cannot handle multiple block to block connections.
    directs = {}
    for direct in pin_assignments['direct_connections']:
        key = (direct['from_pin'], direct['to_pin'])

        if key not in directs:
            directs[key] = []

        directs[key].append(
            (abs(direct['x_offset']) + abs(direct['y_offset']), direct)
        )

    ALLOWED_ZERO_OFFSET_DIRECT = [
        "GTP_CHANNEL_0",
        "GTP_CHANNEL_1",
        "GTP_CHANNEL_2",
        "GTP_CHANNEL_3",
        "GTP_CHANNEL_0_MID_LEFT",
        "GTP_CHANNEL_1_MID_LEFT",
        "GTP_CHANNEL_2_MID_LEFT",
        "GTP_CHANNEL_3_MID_LEFT",
        "GTP_CHANNEL_0_MID_RIGHT",
        "GTP_CHANNEL_1_MID_RIGHT",
        "GTP_CHANNEL_2_MID_RIGHT",
        "GTP_CHANNEL_3_MID_RIGHT",
        "GTP_COMMON_MID_LEFT",
        "GTP_COMMON_MID_RIGHT",
    ]

    zero_offset_directs = dict()

    for direct in directs.values():
        _, direct = min(direct, key=lambda v: v[0])
        from_tile = direct['from_pin'].split('.')[0]
        to_tile = direct['to_pin'].split('.')[0]

        if from_tile not in tile_types:
            continue
        if to_tile not in tile_types:
            continue

        # In general, the Z offset is 0, except for special cases
        # such as for the GTP tiles, where there are direct connections
        # within the same (x, y) cooredinates, but between different sub_tiles
        direct['z_offset'] = 0

        if direct['x_offset'] == 0 and direct['y_offset'] == 0:
            if from_tile == to_tile and from_tile in ALLOWED_ZERO_OFFSET_DIRECT:
                if from_tile not in zero_offset_directs:
                    zero_offset_directs[from_tile] = list()

                zero_offset_directs[from_tile].append(direct)

            continue

        add_direct(directlist_xml, direct)

    for tile, directs in zero_offset_directs.items():
        uri = tile_xml_spec.format(tile.lower())
        ports = list()

        with open(uri) as f:
            tile_xml = ET.parse(f, ET.XMLParser())

            tile_root = tile_xml.getroot()

            for capacity, sub_tile in enumerate(tile_root.iter('sub_tile')):
                for in_port in sub_tile.iter('input'):
                    ports.append((in_port.attrib["name"], capacity))
                for out_port in sub_tile.iter('output'):
                    ports.append((out_port.attrib["name"], capacity))
                for clk_port in sub_tile.iter('clock'):
                    ports.append((clk_port.attrib["name"], capacity))

        for direct in directs:
            tile_type, from_port = direct['from_pin'].split('.')
            _, to_port = direct['to_pin'].split('.')

            if tile != tile_type:
                continue

            from_port_capacity = None
            to_port_capacity = None
            for port, capacity in ports:
                if port == from_port:
                    from_port_capacity = capacity
                if port == to_port:
                    to_port_capacity = capacity

            assert from_port_capacity is not None and to_port_capacity is not None, (
                tile, from_port, to_port
            )
            direct["z_offset"] = to_port_capacity - from_port_capacity

            add_direct(directlist_xml, direct)

    arch_xml_str = ET.tostring(arch_xml, pretty_print=True).decode('utf-8')
    args.output_arch.write(arch_xml_str)
    args.output_arch.close()