def main(): parser = argparse.ArgumentParser() parser.add_argument('--db_root', help='Project X-Ray Database', required=True) parser.add_argument('--connection_database', help='Connection database', required=True) args = parser.parse_args() if os.path.exists(args.connection_database): os.remove(args.connection_database) with DatabaseCache(args.connection_database) as conn: create_tables(conn) print("{}: About to load database".format(datetime.datetime.now())) db = prjxray.db.Database(args.db_root) grid = db.grid() import_grid(db, grid, conn) print("{}: Initial database formed".format(datetime.datetime.now())) import_nodes(db, grid, conn) print("{}: Connections made".format(datetime.datetime.now())) count_sites_and_pips_on_nodes(conn) print("{}: Counted sites and pips".format(datetime.datetime.now())) classify_nodes(conn) print("{}: Nodes classified".format(datetime.datetime.now())) form_tracks(conn) print("{}: Tracks formed".format(datetime.datetime.now())) print('{} Flushing database back to file "{}"'.format( datetime.datetime.now(), args.connection_database))
def main(): parser = argparse.ArgumentParser() parser.add_argument('--db_root', help='Project X-Ray Database', required=True) parser.add_argument('--connection_database', help='Database of fabric connectivity', required=True) parser.add_argument( '--pin_assignments', help= 'Output JSON assigning pins to tile types and direction connections', required=True) args = parser.parse_args() db = prjxray.db.Database(args.db_root) edge_assignments = {} with DatabaseCache(args.connection_database, read_only=True) as conn: c = conn.cursor() edge_assignments, wires_in_tile_types = initialize_edge_assignments( db, conn) direct_connections = set() print('{} Processing direct connections.'.format(now())) handle_direction_connections(conn, direct_connections, edge_assignments) wires_not_in_channels = {} c = conn.cursor() print('{} Processing non-channel nodes.'.format(now())) for node_pkey, classification in progressbar_utils.progressbar( c.execute( """ SELECT pkey, classification FROM node WHERE classification != ?; """, (NodeClassification.CHANNEL.value, ))): reason = NodeClassification(classification) for (tile_type, wire) in yield_logical_wire_info_from_node(conn, node_pkey): key = (tile_type, wire) # Sometimes nodes in particular tile instances are disconnected, # disregard classification changes if this is the case. if reason != NodeClassification.NULL: if key not in wires_not_in_channels: wires_not_in_channels[key] = reason else: other_reason = wires_not_in_channels[key] assert reason == other_reason, (tile_type, wire, reason, other_reason) if key in wires_in_tile_types: wires_in_tile_types.remove(key) # List of nodes that are channels. channel_nodes = [] # Map of (tile, wire) to track. This will be used to find channels for pips # that come from EDGES_TO_CHANNEL. channel_wires_to_tracks = {} # Generate track models and verify that wires are either in a channel # or not in a channel. print('{} Creating models from tracks.'.format(now())) for node_pkey, track_pkey in progressbar_utils.progressbar( c.execute( """ SELECT pkey, track_pkey FROM node WHERE classification = ?; """, (NodeClassification.CHANNEL.value, ))): assert track_pkey is not None tracks_model, _ = get_track_model(conn, track_pkey) channel_nodes.append(tracks_model) channel_wires_to_tracks[track_pkey] = tracks_model for (tile_type, wire) in yield_logical_wire_info_from_node(conn, node_pkey): key = (tile_type, wire) # Make sure all wires in channels always are in channels assert key not in wires_not_in_channels if key in wires_in_tile_types: wires_in_tile_types.remove(key) # Make sure all wires appear to have been assigned. if len(wires_in_tile_types) > 0: for tile_type, wire in sorted(wires_in_tile_types): print(tile_type, wire) assert len(wires_in_tile_types) == 0 # Verify that all tracks are sane. for node in channel_nodes: node.verify_tracks() null_tile_wires = set() # Verify that all nodes that are classified as edges to channels have at # least one site, and at least one live connection to a channel. # # If no live connections from the node are present, this node should've # been marked as NULL during channel formation. print('{} Handling edges to channels.'.format(now())) handle_edges_to_channels(conn, null_tile_wires, edge_assignments, channel_wires_to_tracks) print('{} Processing edge assignments.'.format(now())) final_edge_assignments = {} for key, available_pins in progressbar_utils.progressbar( edge_assignments.items()): (tile_type, wire) = key available_pins = [pins for pins in available_pins if len(pins) > 0] if len(available_pins) == 0: if (tile_type, wire) not in null_tile_wires: # TODO: Figure out what is going on with these wires. Appear to # tile internal connections sometimes? print((tile_type, wire)) final_edge_assignments[key] = [tracks.Direction.RIGHT] continue pins = set(available_pins[0]) for p in available_pins[1:]: pins &= set(p) if len(pins) > 0: final_edge_assignments[key] = [list(pins)[0]] else: # More than 2 pins are required, final the minimal number of pins pins = set() for p in available_pins: pins |= set(p) while len(pins) > 2: pins = list(pins) prev_len = len(pins) for idx in range(len(pins)): pins_subset = list(pins) del pins_subset[idx] pins_subset = set(pins_subset) bad_subset = False for p in available_pins: if len(pins_subset & set(p)) == 0: bad_subset = True break if not bad_subset: pins = list(pins_subset) break # Failed to remove any pins, stop. if len(pins) == prev_len: break final_edge_assignments[key] = pins for key, available_pins in edge_assignments.items(): (tile_type, wire) = key pins = set(final_edge_assignments[key]) for required_pins in available_pins: if len(required_pins) == 0: continue assert len(pins & set(required_pins)) > 0, (tile_type, wire, pins, required_pins, available_pins) pin_directions = {} for key, pins in progressbar_utils.progressbar( final_edge_assignments.items()): (tile_type, wire) = key if tile_type not in pin_directions: pin_directions[tile_type] = {} pin_directions[tile_type][wire] = [pin._name_ for pin in pins] with open(args.pin_assignments, 'w') as f: json.dump( { 'pin_directions': pin_directions, 'direct_connections': [d._asdict() for d in direct_connections], }, f, indent=2) print('{} Flushing database back to file "{}"'.format( now(), args.connection_database))
def main(): parser = argparse.ArgumentParser() parser.add_argument('--db_root', required=True, help='Project X-Ray Database') parser.add_argument('--connection_database', help='Database of fabric connectivity', required=True) parser.add_argument('--pin_assignments', help='Pin assignments JSON', required=True) parser.add_argument( '--synth_tiles', help= 'If using an ROI, synthetic tile defintion from prjxray-arch-import') args = parser.parse_args() pool = multiprocessing.Pool(20) db = prjxray.db.Database(args.db_root) grid = db.grid() with DatabaseCache(args.connection_database) as conn: with open(args.pin_assignments) as f: pin_assignments = json.load(f) tile_wires = [] for tile_type, wire_map in pin_assignments['pin_directions'].items(): for wire in wire_map.keys(): tile_wires.append((tile_type, wire)) for tile_type, wire in progressbar.progressbar(tile_wires): pins = [ direction_to_enum(pin) for pin in pin_assignments['pin_directions'][tile_type][wire] ] add_graph_nodes_for_pins(conn, tile_type, wire, pins) if args.synth_tiles: use_roi = True with open(args.synth_tiles) as f: synth_tiles = json.load(f) roi = Roi( db=db, x1=synth_tiles['info']['GRID_X_MIN'], y1=synth_tiles['info']['GRID_Y_MIN'], x2=synth_tiles['info']['GRID_X_MAX'], y2=synth_tiles['info']['GRID_Y_MAX'], ) print('{} generating routing graph for ROI.'.format(now())) else: use_roi = False output_only_nodes = set() input_only_nodes = set() find_pip = create_find_pip(conn) find_wire = create_find_wire(conn) find_connector = create_find_connector(conn) print('{} Finding nodes belonging to ROI'.format(now())) if use_roi: for loc in progressbar.progressbar(grid.tile_locations()): gridinfo = grid.gridinfo_at_loc(loc) tile_name = grid.tilename_at_loc(loc) if tile_name in synth_tiles['tiles']: assert len(synth_tiles['tiles'][tile_name]['pins']) == 1 for pin in synth_tiles['tiles'][tile_name]['pins']: _, _, node_pkey = find_wire(tile_name, gridinfo.tile_type, pin['wire']) if pin['port_type'] == 'input': # This track can output be used as a sink. input_only_nodes |= set((node_pkey, )) elif pin['port_type'] == 'output': # This track can output be used as a src. output_only_nodes |= set((node_pkey, )) else: assert False, pin c = conn.cursor() c.execute('SELECT pkey FROM switch WHERE name = ?;', ('routing', )) switch_pkey = c.fetchone()[0] edges = [] edge_set = set() for loc in progressbar.progressbar(grid.tile_locations()): gridinfo = grid.gridinfo_at_loc(loc) tile_name = grid.tilename_at_loc(loc) # Not a synth node, check if in ROI. if use_roi and not roi.tile_in_roi(loc): continue tile_type = db.get_tile_type(gridinfo.tile_type) for pip in tile_type.get_pips(): if pip.is_pseudo: continue if not pip.is_directional: # TODO: Handle bidirectional pips? continue connections = make_connection( conn=conn, input_only_nodes=input_only_nodes, output_only_nodes=output_only_nodes, find_pip=find_pip, find_wire=find_wire, find_connector=find_connector, tile_name=tile_name, loc=loc, tile_type=gridinfo.tile_type, pip=pip, switch_pkey=switch_pkey) if connections: # TODO: Skip duplicate connections, until they have unique # switches for connection in connections: key = tuple(connection[0:3]) if key in edge_set: continue edge_set.add(key) edges.append(connection) print('{} Created {} edges, inserting'.format(now(), len(edges))) c.execute("""BEGIN EXCLUSIVE TRANSACTION;""") for edge in progressbar.progressbar(edges): c.execute( """ INSERT INTO graph_edge( src_graph_node_pkey, dest_graph_node_pkey, switch_pkey, tile_pkey, pip_in_tile_pkey) VALUES (?, ?, ?, ?, ?)""", edge) c.execute("""COMMIT TRANSACTION;""") print('{} Inserted edges'.format(now())) c.execute( """CREATE INDEX src_node_index ON graph_edge(src_graph_node_pkey);""" ) c.execute( """CREATE INDEX dest_node_index ON graph_edge(dest_graph_node_pkey);""" ) c.connection.commit() print('{} Indices created, marking track liveness'.format(now())) mark_track_liveness(conn, pool, input_only_nodes, output_only_nodes) print('{} Flushing database back to file "{}"'.format( now(), args.connection_database))
def main(): mydir = os.path.dirname(__file__) prjxray_db = os.path.abspath( os.path.join(mydir, "..", "..", "third_party", "prjxray-db") ) db_types = prjxray.db.get_available_databases(prjxray_db) parser = argparse.ArgumentParser(description="Generate arch.xml") parser.add_argument( '--part', choices=[os.path.basename(db_type) for db_type in db_types], help="""Project X-Ray database to use.""" ) parser.add_argument( '--output-arch', nargs='?', type=argparse.FileType('w'), help="""File to output arch.""" ) parser.add_argument( '--tile-types', help="Semi-colon seperated tile types." ) parser.add_argument( '--pin_assignments', required=True, type=argparse.FileType('r') ) parser.add_argument('--use_roi', required=False) parser.add_argument('--device', required=True) parser.add_argument('--synth_tiles', required=False) parser.add_argument('--connection_database', required=True) parser.add_argument( '--graph_limit', help='Limit grid to specified dimensions in x_min,y_min,x_max,y_max', ) args = parser.parse_args() tile_types = args.tile_types.split(',') tile_model = "../../tiles/{0}/{0}.model.xml" tile_pbtype = "../../tiles/{0}/{0}.pb_type.xml" tile_tile = "../../tiles/{0}/{0}.tile.xml" xi_url = "http://www.w3.org/2001/XInclude" ET.register_namespace('xi', xi_url) xi_include = "{%s}include" % xi_url arch_xml = ET.Element( 'architecture', {}, nsmap={'xi': xi_url}, ) model_xml = ET.SubElement(arch_xml, 'models') for tile_type in tile_types: ET.SubElement( model_xml, xi_include, { 'href': tile_model.format(tile_type.lower()), 'xpointer': "xpointer(models/child::node())", } ) tiles_xml = ET.SubElement(arch_xml, 'tiles') for tile_type in tile_types: ET.SubElement( tiles_xml, xi_include, { 'href': tile_tile.format(tile_type.lower()), } ) complexblocklist_xml = ET.SubElement(arch_xml, 'complexblocklist') for tile_type in tile_types: ET.SubElement( complexblocklist_xml, xi_include, { 'href': tile_pbtype.format(tile_type.lower()), } ) layout_xml = ET.SubElement(arch_xml, 'layout') db = prjxray.db.Database(os.path.join(prjxray_db, args.part)) g = db.grid() synth_tiles = {} synth_tiles['tiles'] = {} synth_loc_map = {} synth_tile_map = {} roi = None if args.use_roi: with open(args.use_roi) as f: j = json.load(f) with open(args.synth_tiles) as f: synth_tiles = json.load(f) roi = Roi( db=db, x1=j['info']['GRID_X_MIN'], y1=j['info']['GRID_Y_MIN'], x2=j['info']['GRID_X_MAX'], y2=j['info']['GRID_Y_MAX'], ) synth_tile_map = add_synthetic_tiles( model_xml, complexblocklist_xml, tiles_xml, need_io=True ) for _, tile_info in synth_tiles['tiles'].items(): assert tuple(tile_info['loc']) not in synth_loc_map assert len(tile_info['pins']) == 1 vpr_tile_type = synth_tile_map[tile_info['pins'][0]['port_type']] synth_loc_map[tuple(tile_info['loc'])] = vpr_tile_type elif args.graph_limit: x_min, y_min, x_max, y_max = map(int, args.graph_limit.split(',')) roi = Roi( db=db, x1=x_min, y1=y_min, x2=x_max, y2=y_max, ) with DatabaseCache(args.connection_database, read_only=True) as conn: c = conn.cursor() if 'GND' not in synth_tile_map: synth_tile_map, synth_loc_map = insert_constant_tiles( conn, model_xml, complexblocklist_xml, tiles_xml ) # Find the grid extent. y_max = 0 x_max = 0 for grid_x, grid_y in c.execute("SELECT grid_x, grid_y FROM tile"): x_max = max(grid_x + 2, x_max) y_max = max(grid_y + 2, y_max) name = '{}-test'.format(args.device) fixed_layout_xml = ET.SubElement( layout_xml, 'fixed_layout', { 'name': name, 'height': str(y_max), 'width': str(x_max), } ) for vpr_tile_type, grid_x, grid_y, metadata_function in get_tiles( conn=conn, g=g, roi=roi, synth_loc_map=synth_loc_map, synth_tile_map=synth_tile_map, tile_types=tile_types, ): single_xml = ET.SubElement( fixed_layout_xml, 'single', { 'priority': '1', 'type': vpr_tile_type, 'x': str(grid_x), 'y': str(grid_y), } ) metadata_function(single_xml) switchlist_xml = ET.SubElement(arch_xml, 'switchlist') for name, internal_capacitance, drive_resistance, intrinsic_delay, \ switch_type in c.execute(""" SELECT name, internal_capacitance, drive_resistance, intrinsic_delay, switch_type FROM switch;"""): attrib = { 'type': switch_type, 'name': name, "R": str(drive_resistance), "Cin": str(0), "Cout": str(0), "Tdel": str(intrinsic_delay), } if internal_capacitance != 0: attrib["Cinternal"] = str(internal_capacitance) if False: attrib["mux_trans_size"] = str(0) attrib["buf_size"] = str(0) ET.SubElement(switchlist_xml, 'switch', attrib) segmentlist_xml = ET.SubElement(arch_xml, 'segmentlist') # VPR requires a segment, so add one. dummy_xml = ET.SubElement( segmentlist_xml, 'segment', { 'name': 'dummy', 'length': '2', 'freq': '1.0', 'type': 'bidir', 'Rmetal': '0', 'Cmetal': '0', } ) ET.SubElement(dummy_xml, 'wire_switch', { 'name': 'buffer', }) ET.SubElement(dummy_xml, 'opin_switch', { 'name': 'buffer', }) ET.SubElement(dummy_xml, 'sb', { 'type': 'pattern', }).text = ' '.join('1' for _ in range(3)) ET.SubElement(dummy_xml, 'cb', { 'type': 'pattern', }).text = ' '.join('1' for _ in range(2)) for (name, length) in c.execute("SELECT name, length FROM segment"): if length is None: length = 1 segment_xml = ET.SubElement( segmentlist_xml, 'segment', { 'name': name, 'length': str(length), 'freq': '1.0', 'type': 'bidir', 'Rmetal': '0', 'Cmetal': '0', } ) ET.SubElement(segment_xml, 'wire_switch', { 'name': 'buffer', }) ET.SubElement(segment_xml, 'opin_switch', { 'name': 'buffer', }) ET.SubElement(segment_xml, 'sb', { 'type': 'pattern', }).text = ' '.join('1' for _ in range(length + 1)) ET.SubElement(segment_xml, 'cb', { 'type': 'pattern', }).text = ' '.join('1' for _ in range(length)) ET.SubElement( switchlist_xml, 'switch', { 'type': 'mux', 'name': 'buffer', "R": "551", "Cin": ".77e-15", "Cout": "4e-15", # TODO: This value should be the "typical" pip switch delay from # This value is the dominate term in the inter-cluster delay # estimate. "Tdel": "0.178e-9", "mux_trans_size": "2.630740", "buf_size": "27.645901" } ) device_xml = ET.SubElement(arch_xml, 'device') ET.SubElement( device_xml, 'sizing', { "R_minW_nmos": "6065.520020", "R_minW_pmos": "18138.500000", } ) ET.SubElement(device_xml, 'area', { "grid_logic_tile_area": "14813.392", }) ET.SubElement( device_xml, 'connection_block', { "input_switch_name": "buffer", } ) ET.SubElement(device_xml, 'switch_block', { "type": "wilton", "fs": "3", }) chan_width_distr_xml = ET.SubElement(device_xml, 'chan_width_distr') ET.SubElement( chan_width_distr_xml, 'x', { 'distr': 'uniform', 'peak': '1.0', } ) ET.SubElement( chan_width_distr_xml, 'y', { 'distr': 'uniform', 'peak': '1.0', } ) directlist_xml = ET.SubElement(arch_xml, 'directlist') pin_assignments = json.load(args.pin_assignments) # Choose smallest distance for block to block connections with multiple # direct_connections. VPR cannot handle multiple block to block connections. directs = {} for direct in pin_assignments['direct_connections']: key = (direct['from_pin'], direct['to_pin']) if key not in directs: directs[key] = [] directs[key].append( (abs(direct['x_offset']) + abs(direct['y_offset']), direct) ) for direct in directs.values(): _, direct = min(direct, key=lambda v: v[0]) if direct['from_pin'].split('.')[0] not in tile_types: continue if direct['to_pin'].split('.')[0] not in tile_types: continue if direct['x_offset'] == 0 and direct['y_offset'] == 0: continue ET.SubElement( directlist_xml, 'direct', { 'name': '{}_to_{}_dx_{}_dy_{}'.format( direct['from_pin'], direct['to_pin'], direct['x_offset'], direct['y_offset'] ), 'from_pin': add_vpr_tile_prefix(direct['from_pin']), 'to_pin': add_vpr_tile_prefix(direct['to_pin']), 'x_offset': str(direct['x_offset']), 'y_offset': str(direct['y_offset']), 'z_offset': '0', 'switch_name': direct['switch_name'], } ) arch_xml_str = ET.tostring(arch_xml, pretty_print=True).decode('utf-8') args.output_arch.write(arch_xml_str) args.output_arch.close()
def main(): parser = argparse.ArgumentParser() parser.add_argument('--db_root', required=True, help='Project X-Ray Database') parser.add_argument('--read_rr_graph', required=True, help='Input rr_graph file') parser.add_argument('--write_rr_graph', required=True, help='Output rr_graph file') parser.add_argument('--write_rr_node_map', required=True, help='Output map of graph_node_pkey to rr inode file') parser.add_argument('--connection_database', help='Database of fabric connectivity', required=True) parser.add_argument( '--synth_tiles', help= 'If using an ROI, synthetic tile defintion from prjxray-arch-import') parser.add_argument( '--graph_limit', help='Limit grid to specified dimensions in x_min,y_min,x_max,y_max', ) args = parser.parse_args() db = prjxray.db.Database(args.db_root) synth_tiles = None if args.synth_tiles: use_roi = True with open(args.synth_tiles) as f: synth_tiles = json.load(f) roi = Roi( db=db, x1=synth_tiles['info']['GRID_X_MIN'], y1=synth_tiles['info']['GRID_Y_MIN'], x2=synth_tiles['info']['GRID_X_MAX'], y2=synth_tiles['info']['GRID_Y_MAX'], ) print('{} generating routing graph for ROI.'.format(now())) elif args.graph_limit: use_roi = True x_min, y_min, x_max, y_max = map(int, args.graph_limit.split(',')) roi = Roi( db=db, x1=x_min, y1=y_min, x2=x_max, y2=y_max, ) else: use_roi = False roi = None synth_tiles = None # Convert input rr graph into graph2.Graph object. input_rr_graph = read_xml_file(args.read_rr_graph) if synth_tiles is None: synth_tiles = find_constant_network(input_rr_graph) xml_graph = xml_graph2.Graph( input_rr_graph, progressbar=progressbar_utils.progressbar, output_file_name=args.write_rr_graph, ) graph = xml_graph.graph tool_version = input_rr_graph.getroot().attrib['tool_version'] tool_comment = input_rr_graph.getroot().attrib['tool_comment'] with DatabaseCache(args.connection_database, True) as conn: cur = conn.cursor() for name, internal_capacitance, drive_resistance, intrinsic_delay, \ switch_type in cur.execute(""" SELECT name, internal_capacitance, drive_resistance, intrinsic_delay, switch_type FROM switch;"""): # Add back missing switchs, which were unused in arch xml, and so # were not emitted in rrgraph XML. # # TODO: This can be removed once # https://github.com/verilog-to-routing/vtr-verilog-to-routing/issues/354 # is fixed. try: graph.get_switch_id(name) continue except KeyError: xml_graph.add_switch( graph2.Switch( id=None, name=name, type=graph2.SwitchType[switch_type.upper()], timing=graph2.SwitchTiming( r=drive_resistance, c_in=0.0, c_out=0.0, c_internal=internal_capacitance, t_del=intrinsic_delay, ), sizing=graph2.SwitchSizing( mux_trans_size=0, buf_size=0, ), )) # Mapping of graph_node.pkey to rr node id. node_mapping = {} print('{} Creating connection box list'.format(now())) connection_box_map = create_connection_boxes(conn, graph) # Match site pins rr nodes with graph_node's in the connection_database. print('{} Importing graph nodes'.format(now())) import_graph_nodes(conn, graph, node_mapping, connection_box_map) # Walk all track graph nodes and add them. print('{} Creating tracks'.format(now())) segment_id = graph.get_segment_id_from_name('dummy') create_track_rr_graph(conn, graph, node_mapping, use_roi, roi, synth_tiles, segment_id) # Set of (src, sink, switch_id) tuples that pip edges have been sent to # VPR. VPR cannot handle duplicate paths with the same switch id. if use_roi: print('{} Adding synthetic edges'.format(now())) add_synthetic_edges(conn, graph, node_mapping, grid, synth_tiles) print('{} Creating channels.'.format(now())) channels_obj = create_channels(conn) x_dim, y_dim = phy_grid_dims(conn) connection_box_obj = graph.create_connection_box_object(x_dim=x_dim, y_dim=y_dim) print('{} Serializing to disk.'.format(now())) with xml_graph: xml_graph.start_serialize_to_xml( tool_version=tool_version, tool_comment=tool_comment, channels_obj=channels_obj, connection_box_obj=connection_box_obj, ) xml_graph.serialize_nodes(yield_nodes(xml_graph.graph.nodes)) xml_graph.serialize_edges( import_graph_edges(conn, graph, node_mapping)) print('{} Writing node map.'.format(now())) with open(args.write_rr_node_map, 'wb') as f: pickle.dump(node_mapping, f) print('{} Done writing node map.'.format(now()))
def main(): parser = argparse.ArgumentParser(description="Generate synth_tiles.json") parser.add_argument('--db_root', required=True) parser.add_argument('--part', required=True) parser.add_argument('--roi', required=False) parser.add_argument('--overlay', required=False) parser.add_argument('--connection_database', help='Connection database', required=True) parser.add_argument('--synth_tiles', required=True) args = parser.parse_args() db = prjxray.db.Database(args.db_root, args.part) g = db.grid() synth_tiles = {} synth_tiles['tiles'] = {} rois = dict() if args.roi: with open(args.roi) as f: j = json.load(f) synth_tiles['info'] = j['info'] roi = Roi( db=db, x1=j['info']['GRID_X_MIN'], y1=j['info']['GRID_Y_MIN'], x2=j['info']['GRID_X_MAX'], y2=j['info']['GRID_Y_MAX'], ) rois[roi] = j elif args.overlay: with open(args.overlay) as f: j = json.load(f) synth_tiles['info'] = list() for r in j: roi = Roi( db=db, x1=r['info']['GRID_X_MIN'], y1=r['info']['GRID_Y_MIN'], x2=r['info']['GRID_X_MAX'], y2=r['info']['GRID_Y_MAX'], ) rois[roi] = r else: assert False, 'Synth tiles must be for roi or overlay' with DatabaseCache(args.connection_database, read_only=True) as conn: tile_in_use = set() for roi, j in rois.items(): if args.overlay: synth_tiles['info'].append(j['info']) tile_pin_count = dict() num_synth_tiles = 0 for port in sorted(j['ports'], key=lambda i: (i['type'], i['name'])): if port['type'] == 'out': port_type = 'input' if not args.overlay else 'output' is_clock = False elif port['type'] == 'in': is_clock = False port_type = 'output' if not args.overlay else 'input' elif port['type'] == 'clk': port_type = 'output' if not args.overlay else 'input' is_clock = True else: assert False, port if 'wire' not in port: tile, wire = find_wire_from_node(conn, g, roi, port['node'], overlay=bool( args.overlay)) else: tile, wire = port['wire'].split('/') tile_in_use.add(tile) # Make sure connecting wire is not in ROI! loc = g.loc_of_tilename(tile) if bool(args.overlay) ^ roi.tile_in_roi(loc): # Or if in the ROI, make sure it has no sites. gridinfo = g.gridinfo_at_tilename(tile) assert len( db.get_tile_type(gridinfo.tile_type).get_sites() ) == 0, "{}/{}".format(tile, wire) vpr_loc = map_tile_to_vpr_coord(conn, tile) if tile not in synth_tiles['tiles']: tile_name = 'SYN-IOPAD-{}'.format(num_synth_tiles) synth_tiles['tiles'][tile] = { 'pins': [], 'loc': vpr_loc, 'tile_name': tile_name, } num_synth_tiles += 1 tile_pin_count[tile] = 0 synth_tiles['tiles'][tile]['pins'].append({ 'roi_name': port['name'].replace('[', '_').replace(']', '_'), 'wire': wire, 'pad': port['pin'], 'port_type': port_type, 'is_clock': is_clock, 'z_loc': tile_pin_count[tile], }) tile_pin_count[tile] += 1 if not args.overlay: # Find two VBRK's in the corner of the fabric to use as the synthetic VCC/ # GND source. vbrk_loc = None vbrk_tile = None vbrk2_loc = None vbrk2_tile = None for tile in g.tiles(): if tile in tile_in_use: continue loc = g.loc_of_tilename(tile) if not roi.tile_in_roi(loc): continue gridinfo = g.gridinfo_at_tilename(tile) if 'VBRK' not in gridinfo.tile_type: continue assert len(db.get_tile_type( gridinfo.tile_type).get_sites()) == 0, tile if vbrk_loc is None: vbrk2_loc = vbrk_loc vbrk2_tile = vbrk_tile vbrk_loc = loc vbrk_tile = tile else: if (loc.grid_x < vbrk_loc.grid_x and loc.grid_y < vbrk_loc.grid_y) or vbrk2_loc is None: vbrk2_loc = vbrk_loc vbrk2_tile = vbrk_tile vbrk_loc = loc vbrk_tile = tile assert vbrk_loc is not None assert vbrk_tile is not None assert vbrk_tile not in synth_tiles['tiles'] vbrk_vpr_loc = map_tile_to_vpr_coord(conn, vbrk_tile) synth_tiles['tiles'][vbrk_tile] = { 'loc': vbrk_vpr_loc, 'pins': [ { 'wire': 'VCC', 'pad': 'VCC', 'port_type': 'VCC', 'is_clock': False, 'z_loc': '0', }, ], } assert vbrk2_loc is not None assert vbrk2_tile is not None assert vbrk2_tile not in synth_tiles['tiles'] vbrk2_vpr_loc = map_tile_to_vpr_coord(conn, vbrk2_tile) synth_tiles['tiles'][vbrk2_tile] = { 'loc': vbrk2_vpr_loc, 'pins': [ { 'wire': 'GND', 'pad': 'GND', 'port_type': 'GND', 'is_clock': False, 'z_loc': '0', }, ], } with open(args.synth_tiles, 'w') as f: json.dump(synth_tiles, f, indent=2)
def main(): parser = argparse.ArgumentParser(description="Generate synth_tiles.json") parser.add_argument('--db_root', required=True) parser.add_argument('--part', required=True) parser.add_argument('--roi', required=True) parser.add_argument('--connection_database', help='Connection database', required=True) parser.add_argument('--synth_tiles', required=True) args = parser.parse_args() db = prjxray.db.Database(args.db_root, args.part) g = db.grid() synth_tiles = {} synth_tiles['tiles'] = {} with open(args.roi) as f: j = json.load(f) roi = Roi( db=db, x1=j['info']['GRID_X_MIN'], y1=j['info']['GRID_Y_MIN'], x2=j['info']['GRID_X_MAX'], y2=j['info']['GRID_Y_MAX'], ) with DatabaseCache(args.connection_database, read_only=True) as conn: synth_tiles['info'] = j['info'] vbrk_in_use = set() for port in j['ports']: if port['name'].startswith('dout['): port_type = 'input' is_clock = False elif port['name'].startswith('din['): is_clock = False port_type = 'output' elif port['name'].startswith('clk'): port_type = 'output' is_clock = True else: assert False, port tile, wire = port['wire'].split('/') vbrk_in_use.add(tile) # Make sure connecting wire is not in ROI! loc = g.loc_of_tilename(tile) if roi.tile_in_roi(loc): # Or if in the ROI, make sure it has no sites. gridinfo = g.gridinfo_at_tilename(tile) assert len(db.get_tile_type( gridinfo.tile_type).get_sites()) == 0, tile vpr_loc = map_tile_to_vpr_coord(conn, tile) if tile not in synth_tiles['tiles']: synth_tiles['tiles'][tile] = { 'pins': [], 'loc': vpr_loc, } synth_tiles['tiles'][tile]['pins'].append({ 'roi_name': port['name'].replace('[', '_').replace(']', '_'), 'wire': wire, 'pad': port['pin'], 'port_type': port_type, 'is_clock': is_clock, }) # Find two VBRK's in the corner of the fabric to use as the synthetic VCC/ # GND source. vbrk_loc = None vbrk_tile = None vbrk2_loc = None vbrk2_tile = None for tile in g.tiles(): if tile in vbrk_in_use: continue loc = g.loc_of_tilename(tile) if not roi.tile_in_roi(loc): continue gridinfo = g.gridinfo_at_tilename(tile) if 'VBRK' not in gridinfo.tile_type: continue assert len(db.get_tile_type( gridinfo.tile_type).get_sites()) == 0, tile if vbrk_loc is None: vbrk2_loc = vbrk_loc vbrk2_tile = vbrk_tile vbrk_loc = loc vbrk_tile = tile else: if (loc.grid_x < vbrk_loc.grid_x and loc.grid_y < vbrk_loc.grid_y) or vbrk2_loc is None: vbrk2_loc = vbrk_loc vbrk2_tile = vbrk_tile vbrk_loc = loc vbrk_tile = tile assert vbrk_loc is not None assert vbrk_tile is not None assert vbrk_tile not in synth_tiles['tiles'] vbrk_vpr_loc = map_tile_to_vpr_coord(conn, vbrk_tile) synth_tiles['tiles'][vbrk_tile] = { 'loc': vbrk_vpr_loc, 'pins': [ { 'wire': 'VCC', 'pad': 'VCC', 'port_type': 'VCC', 'is_clock': False, }, ], } assert vbrk2_loc is not None assert vbrk2_tile is not None assert vbrk2_tile not in synth_tiles['tiles'] vbrk2_vpr_loc = map_tile_to_vpr_coord(conn, vbrk2_tile) synth_tiles['tiles'][vbrk2_tile] = { 'loc': vbrk2_vpr_loc, 'pins': [ { 'wire': 'GND', 'pad': 'GND', 'port_type': 'GND', 'is_clock': False, }, ], } with open(args.synth_tiles, 'w') as f: json.dump(synth_tiles, f, indent=2)
def main(): parser = argparse.ArgumentParser(description="Generate arch.xml") parser.add_argument( '--db_root', required=True, help="Project X-Ray database to use." ) parser.add_argument('--part', required=True, help="FPGA part") parser.add_argument( '--output-arch', nargs='?', type=argparse.FileType('w'), help="""File to output arch.""" ) parser.add_argument( '--tile-types', required=True, help="Semi-colon seperated tile types." ) parser.add_argument( '--pb_types', required=True, help="Semi-colon seperated pb_types types." ) parser.add_argument( '--pin_assignments', required=True, type=argparse.FileType('r') ) parser.add_argument('--use_roi', required=False) parser.add_argument('--use_overlay', required=False) parser.add_argument('--device', required=True) parser.add_argument('--synth_tiles', required=False) parser.add_argument('--connection_database', required=True) parser.add_argument( '--graph_limit', help='Limit grid to specified dimensions in x_min,y_min,x_max,y_max', ) args = parser.parse_args() tile_types = args.tile_types.split(',') pb_types = args.pb_types.split(',') model_xml_spec = "../../tiles/{0}/{0}.model.xml" pbtype_xml_spec = "../../tiles/{0}/{0}.pb_type.xml" tile_xml_spec = "../../tiles/{0}/{0}.tile.xml" xi_url = "http://www.w3.org/2001/XInclude" ET.register_namespace('xi', xi_url) xi_include = "{%s}include" % xi_url arch_xml = ET.Element( 'architecture', {}, nsmap={'xi': xi_url}, ) model_xml = ET.SubElement(arch_xml, 'models') for pb_type in pb_types: ET.SubElement( model_xml, xi_include, { 'href': model_xml_spec.format(pb_type.lower()), 'xpointer': "xpointer(models/child::node())", } ) tiles_xml = ET.SubElement(arch_xml, 'tiles') tile_capacity = {} for tile_type in tile_types: uri = tile_xml_spec.format(tile_type.lower()) ET.SubElement(tiles_xml, xi_include, { 'href': uri, }) with open(uri) as f: tile_xml = ET.parse(f, ET.XMLParser()) tile_root = tile_xml.getroot() assert tile_root.tag == 'tile' tile_capacity[tile_type] = 0 for sub_tile in tile_root.iter('sub_tile'): if 'capacity' in sub_tile.attrib: tile_capacity[tile_type] += int( sub_tile.attrib['capacity'] ) else: tile_capacity[tile_type] += 1 complexblocklist_xml = ET.SubElement(arch_xml, 'complexblocklist') for pb_type in pb_types: ET.SubElement( complexblocklist_xml, xi_include, { 'href': pbtype_xml_spec.format(pb_type.lower()), } ) layout_xml = ET.SubElement(arch_xml, 'layout') db = prjxray.db.Database(args.db_root, args.part) g = db.grid() synth_tiles = {} synth_tiles['tiles'] = {} synth_loc_map = {} synth_tile_map = {} roi = None if args.use_roi: with open(args.use_roi) as f: j = json.load(f) with open(args.synth_tiles) as f: synth_tiles = json.load(f) roi = Roi( db=db, x1=j['info']['GRID_X_MIN'], y1=j['info']['GRID_Y_MIN'], x2=j['info']['GRID_X_MAX'], y2=j['info']['GRID_Y_MAX'], ) for _, tile_info in synth_tiles['tiles'].items(): if tile_info['pins'][0]['port_type'] in ['GND', 'VCC']: continue assert tuple(tile_info['loc']) not in synth_loc_map tile_name = tile_info['tile_name'] num_input = len( list( filter( lambda t: t['port_type'] == 'output', tile_info['pins'] ) ) ) num_output = len( list( filter( lambda t: t['port_type'] == 'input', tile_info['pins'] ) ) ) create_synth_io_tile( complexblocklist_xml, tiles_xml, tile_name, num_input, num_output ) synth_loc_map[tuple(tile_info['loc'])] = tile_name create_synth_pb_types(model_xml, complexblocklist_xml) synth_tile_map = add_constant_synthetic_tiles( model_xml, complexblocklist_xml, tiles_xml ) for _, tile_info in synth_tiles['tiles'].items(): if tile_info['pins'][0]['port_type'] not in ['GND', 'VCC']: continue assert tuple(tile_info['loc']) not in synth_loc_map vpr_tile_type = synth_tile_map[tile_info['pins'][0]['port_type']] synth_loc_map[tuple(tile_info['loc'])] = vpr_tile_type elif args.graph_limit: x_min, y_min, x_max, y_max = map(int, args.graph_limit.split(',')) roi = Roi( db=db, x1=x_min, y1=y_min, x2=x_max, y2=y_max, ) elif args.use_overlay: with open(args.use_overlay) as f: j = json.load(f) with open(args.synth_tiles) as f: synth_tiles = json.load(f) region_dict = dict() for r in synth_tiles['info']: bounds = ( r['GRID_X_MIN'], r['GRID_X_MAX'], r['GRID_Y_MIN'], r['GRID_Y_MAX'] ) region_dict[r['name']] = bounds roi = Overlay(region_dict=region_dict) for _, tile_info in synth_tiles['tiles'].items(): if tile_info['pins'][0]['port_type'] in ['GND', 'VCC']: continue assert tuple(tile_info['loc']) not in synth_loc_map tile_name = tile_info['tile_name'] num_input = len( list( filter( lambda t: t['port_type'] == 'output', tile_info['pins'] ) ) ) num_output = len( list( filter( lambda t: t['port_type'] == 'input', tile_info['pins'] ) ) ) create_synth_io_tile( complexblocklist_xml, tiles_xml, tile_name, num_input, num_output ) synth_loc_map[tuple(tile_info['loc'])] = tile_name create_synth_pb_types(model_xml, complexblocklist_xml, True) with DatabaseCache(args.connection_database, read_only=True) as conn: c = conn.cursor() if 'GND' not in synth_tile_map: synth_tile_map, synth_loc_map_const = insert_constant_tiles( conn, model_xml, complexblocklist_xml, tiles_xml ) synth_loc_map.update(synth_loc_map_const) # Find the grid extent. y_max = 0 x_max = 0 for grid_x, grid_y in c.execute("SELECT grid_x, grid_y FROM tile"): x_max = max(grid_x + 2, x_max) y_max = max(grid_y + 2, y_max) name = '{}-test'.format(args.device) fixed_layout_xml = ET.SubElement( layout_xml, 'fixed_layout', { 'name': name, 'height': str(y_max), 'width': str(x_max), } ) for vpr_tile_type, grid_x, grid_y, metadata_function in get_tiles( conn=conn, g=g, roi=roi, synth_loc_map=synth_loc_map, synth_tile_map=synth_tile_map, tile_types=tile_types, tile_capacity=tile_capacity, ): single_xml = ET.SubElement( fixed_layout_xml, 'single', { 'priority': '1', 'type': vpr_tile_type, 'x': str(grid_x), 'y': str(grid_y), } ) metadata_function(single_xml) switchlist_xml = ET.SubElement(arch_xml, 'switchlist') for name, internal_capacitance, drive_resistance, intrinsic_delay, \ switch_type in c.execute(""" SELECT name, internal_capacitance, drive_resistance, intrinsic_delay, switch_type FROM switch WHERE name != "__vpr_delayless_switch__";"""): attrib = { 'type': switch_type, 'name': name, "R": str(drive_resistance), "Cin": str(0), "Cout": str(0), "Tdel": str(intrinsic_delay), } if internal_capacitance != 0: attrib["Cinternal"] = str(internal_capacitance) if False: attrib["mux_trans_size"] = str(0) attrib["buf_size"] = str(0) ET.SubElement(switchlist_xml, 'switch', attrib) segmentlist_xml = ET.SubElement(arch_xml, 'segmentlist') # VPR requires a segment, so add one. dummy_xml = ET.SubElement( segmentlist_xml, 'segment', { 'name': 'dummy', 'length': '2', 'freq': '1.0', 'type': 'bidir', 'Rmetal': '0', 'Cmetal': '0', } ) ET.SubElement(dummy_xml, 'wire_switch', { 'name': 'buffer', }) ET.SubElement(dummy_xml, 'opin_switch', { 'name': 'buffer', }) ET.SubElement(dummy_xml, 'sb', { 'type': 'pattern', }).text = ' '.join('1' for _ in range(3)) ET.SubElement(dummy_xml, 'cb', { 'type': 'pattern', }).text = ' '.join('1' for _ in range(2)) for (name, length) in c.execute("SELECT name, length FROM segment"): if length is None: length = 1 segment_xml = ET.SubElement( segmentlist_xml, 'segment', { 'name': name, 'length': str(length), 'freq': '1.0', 'type': 'bidir', 'Rmetal': '0', 'Cmetal': '0', } ) ET.SubElement(segment_xml, 'wire_switch', { 'name': 'buffer', }) ET.SubElement(segment_xml, 'opin_switch', { 'name': 'buffer', }) ET.SubElement(segment_xml, 'sb', { 'type': 'pattern', }).text = ' '.join('1' for _ in range(length + 1)) ET.SubElement(segment_xml, 'cb', { 'type': 'pattern', }).text = ' '.join('1' for _ in range(length)) ET.SubElement( switchlist_xml, 'switch', { 'type': 'mux', 'name': 'buffer', "R": "551", "Cin": ".77e-15", "Cout": "4e-15", # TODO: This value should be the "typical" pip switch delay from # This value is the dominate term in the inter-cluster delay # estimate. "Tdel": "0.178e-9", "mux_trans_size": "2.630740", "buf_size": "27.645901" } ) device_xml = ET.SubElement(arch_xml, 'device') ET.SubElement( device_xml, 'sizing', { "R_minW_nmos": "6065.520020", "R_minW_pmos": "18138.500000", } ) ET.SubElement(device_xml, 'area', { "grid_logic_tile_area": "14813.392", }) ET.SubElement( device_xml, 'connection_block', { "input_switch_name": "buffer", } ) ET.SubElement(device_xml, 'switch_block', { "type": "wilton", "fs": "3", }) chan_width_distr_xml = ET.SubElement(device_xml, 'chan_width_distr') ET.SubElement( chan_width_distr_xml, 'x', { 'distr': 'uniform', 'peak': '1.0', } ) ET.SubElement( chan_width_distr_xml, 'y', { 'distr': 'uniform', 'peak': '1.0', } ) directlist_xml = ET.SubElement(arch_xml, 'directlist') pin_assignments = json.load(args.pin_assignments) # Choose smallest distance for block to block connections with multiple # direct_connections. VPR cannot handle multiple block to block connections. directs = {} for direct in pin_assignments['direct_connections']: key = (direct['from_pin'], direct['to_pin']) if key not in directs: directs[key] = [] directs[key].append( (abs(direct['x_offset']) + abs(direct['y_offset']), direct) ) ALLOWED_ZERO_OFFSET_DIRECT = [ "GTP_CHANNEL_0", "GTP_CHANNEL_1", "GTP_CHANNEL_2", "GTP_CHANNEL_3", "GTP_CHANNEL_0_MID_LEFT", "GTP_CHANNEL_1_MID_LEFT", "GTP_CHANNEL_2_MID_LEFT", "GTP_CHANNEL_3_MID_LEFT", "GTP_CHANNEL_0_MID_RIGHT", "GTP_CHANNEL_1_MID_RIGHT", "GTP_CHANNEL_2_MID_RIGHT", "GTP_CHANNEL_3_MID_RIGHT", "GTP_COMMON_MID_LEFT", "GTP_COMMON_MID_RIGHT", ] zero_offset_directs = dict() for direct in directs.values(): _, direct = min(direct, key=lambda v: v[0]) from_tile = direct['from_pin'].split('.')[0] to_tile = direct['to_pin'].split('.')[0] if from_tile not in tile_types: continue if to_tile not in tile_types: continue # In general, the Z offset is 0, except for special cases # such as for the GTP tiles, where there are direct connections # within the same (x, y) cooredinates, but between different sub_tiles direct['z_offset'] = 0 if direct['x_offset'] == 0 and direct['y_offset'] == 0: if from_tile == to_tile and from_tile in ALLOWED_ZERO_OFFSET_DIRECT: if from_tile not in zero_offset_directs: zero_offset_directs[from_tile] = list() zero_offset_directs[from_tile].append(direct) continue add_direct(directlist_xml, direct) for tile, directs in zero_offset_directs.items(): uri = tile_xml_spec.format(tile.lower()) ports = list() with open(uri) as f: tile_xml = ET.parse(f, ET.XMLParser()) tile_root = tile_xml.getroot() for capacity, sub_tile in enumerate(tile_root.iter('sub_tile')): for in_port in sub_tile.iter('input'): ports.append((in_port.attrib["name"], capacity)) for out_port in sub_tile.iter('output'): ports.append((out_port.attrib["name"], capacity)) for clk_port in sub_tile.iter('clock'): ports.append((clk_port.attrib["name"], capacity)) for direct in directs: tile_type, from_port = direct['from_pin'].split('.') _, to_port = direct['to_pin'].split('.') if tile != tile_type: continue from_port_capacity = None to_port_capacity = None for port, capacity in ports: if port == from_port: from_port_capacity = capacity if port == to_port: to_port_capacity = capacity assert from_port_capacity is not None and to_port_capacity is not None, ( tile, from_port, to_port ) direct["z_offset"] = to_port_capacity - from_port_capacity add_direct(directlist_xml, direct) arch_xml_str = ET.tostring(arch_xml, pretty_print=True).decode('utf-8') args.output_arch.write(arch_xml_str) args.output_arch.close()
def main(): parser = argparse.ArgumentParser() parser.add_argument('--db_root', required=True, help='Project X-Ray Database') parser.add_argument('--read_rr_graph', required=True, help='Input rr_graph file') parser.add_argument('--write_rr_graph', required=True, help='Output rr_graph file') parser.add_argument('--connection_database', help='Database of fabric connectivity', required=True) parser.add_argument( '--synth_tiles', help= 'If using an ROI, synthetic tile defintion from prjxray-arch-import') args = parser.parse_args() db = prjxray.db.Database(args.db_root) grid = db.grid() if args.synth_tiles: use_roi = True with open(args.synth_tiles) as f: synth_tiles = json.load(f) roi = Roi( db=db, x1=synth_tiles['info']['GRID_X_MIN'], y1=synth_tiles['info']['GRID_Y_MIN'], x2=synth_tiles['info']['GRID_X_MAX'], y2=synth_tiles['info']['GRID_Y_MAX'], ) print('{} generating routing graph for ROI.'.format(now())) else: use_roi = False # Convert input rr graph into graph2.Graph object. input_rr_graph = read_xml_file(args.read_rr_graph) xml_graph = xml_graph2.Graph( input_rr_graph, progressbar=progressbar.progressbar, output_file_name=args.write_rr_graph, ) graph = xml_graph.graph # Add back short switch, which is unused in arch xml, so is not emitted in # rrgraph XML. # # TODO: This can be removed once # https://github.com/verilog-to-routing/vtr-verilog-to-routing/issues/354 # is fixed. try: short = graph.get_switch_id('short') except KeyError: short = xml_graph.add_switch( graph2.Switch( id=None, name='short', type=graph2.SwitchType.SHORT, timing=None, sizing=graph2.SwitchSizing( mux_trans_size=0, buf_size=0, ), )) tool_version = input_rr_graph.getroot().attrib['tool_version'] tool_comment = input_rr_graph.getroot().attrib['tool_comment'] with DatabaseCache(args.connection_database, True) as conn: # Mapping of graph_node.pkey to rr node id. node_mapping = {} # Match site pins rr nodes with graph_node's in the connection_database. print('{} Importing graph nodes'.format(now())) import_graph_nodes(conn, graph, node_mapping) # Walk all track graph nodes and add them. print('{} Creating tracks'.format(now())) segment_id = graph.get_segment_id_from_name('dummy') create_track_rr_graph(conn, graph, node_mapping, use_roi, roi, synth_tiles, segment_id) # Set of (src, sink, switch_id) tuples that pip edges have been sent to # VPR. VPR cannot handle duplicate paths with the same switch id. if use_roi: print('{} Adding synthetic edges'.format(now())) add_synthetic_edges(conn, graph, node_mapping, grid, synth_tiles) print('{} Creating channels.'.format(now())) channels_obj = create_channels(conn) print('{} Serializing to disk.'.format(now())) with xml_graph: xml_graph.start_serialize_to_xml( tool_version=tool_version, tool_comment=tool_comment, channels_obj=channels_obj, ) xml_graph.serialize_nodes(yield_nodes(xml_graph.graph.nodes)) xml_graph.serialize_edges( import_graph_edges(conn, graph, node_mapping))