def rebuild_graph(fn, fn_out, rcw=6, verbose=False): """ Add rcw tracks spanning full channel to both X and Y channels Connect all of those to all the adjacent pins Fully connect tracks at intersections For intersections this means we actually have two edges per intersection since source and sink must be specified """ print('Importing input g') xml_graph = xml_graph2.Graph( fn, output_file_name=fn_out, ) graph = xml_graph.graph grid_width = max(p.x for p in graph.grid) + 1 grid_height = max(p.y for p in graph.grid) + 1 mux = graph.get_switch_id('mux') try: short = graph.get_switch_id('short') except KeyError: short = xml_graph.add_switch( graph2.Switch( id=None, name='short', type=graph2.SwitchType.SHORT, timing=None, sizing=graph2.SwitchSizing( mux_trans_size=0, buf_size=0, ), ) ) create_tracks(graph, grid_width, grid_height, rcw, verbose=verbose) create_global_constant_tracks(graph, mux, short, grid_width, grid_height) connect_blocks_to_tracks(graph, grid_width, grid_height, rcw, switch=mux) connect_tracks_to_tracks(graph, switch=mux, verbose=verbose) print("Completed rebuild") xml_graph.root_attrib["tool_version"] = "dev" xml_graph.root_attrib["tool_comment"] = "Generated from black magic" channels_obj = graph.create_channels(pad_segment=graph.segments[0].id) xml_graph.serialize_to_xml( channels_obj=channels_obj, connection_box_obj=None, nodes_obj=graph.nodes, edges_obj=graph.edges )
def read_switch(sw): timing = sw.timing sizing = sw.sizing return graph2.Switch( id=sw.id, name=str(sw.name), type=enum_from_string(graph2.SwitchType, sw.type), timing=graph2.SwitchTiming( r=timing.r, c_in=timing.cin, c_out=timing.cout, c_internal=timing.cinternal, t_del=timing.tdel, ), sizing=graph2.SwitchSizing( buf_size=sizing.bufSize, mux_trans_size=sizing.muxTransSize, ), )
def graph_from_xml(input_xml, progressbar=None): if progressbar is None: progressbar = lambda x: x switches = [] for switch in input_xml.find('switches').iter('switch'): timing_xml = switch.find('timing') if timing_xml is not None: timing = graph2.SwitchTiming( r=float(timing_xml.attrib['R']), c_in=float(timing_xml.attrib['Cin']), c_out=float(timing_xml.attrib['Cout']), t_del=float(timing_xml.attrib['Tdel']), ) else: timing = None sizing_xml = switch.find('sizing') if sizing_xml is not None: sizing = graph2.SwitchSizing( mux_trans_size=float(sizing_xml.attrib['mux_trans_size']), buf_size=float(sizing_xml.attrib['buf_size']), ) else: sizing = None switches.append( graph2.Switch( id=int(switch.attrib['id']), type=enum_from_string(graph2.SwitchType, switch.attrib['type']), name=switch.attrib['name'], timing=timing, sizing=sizing, )) segments = [] for segment in input_xml.find('segments').iter('segment'): timing_xml = segment.find('timing') if timing_xml is not None: timing = graph2.SegmentTiming( r_per_meter=float(timing_xml.attrib['R_per_meter']), c_per_meter=float(timing_xml.attrib['C_per_meter']), ) else: timing = None segments.append( graph2.Segment( id=int(segment.attrib['id']), name=segment.attrib['name'], timing=timing, )) block_types = [] for block_type in input_xml.find('block_types').iter('block_type'): pin_classes = [] for pin_class in block_type.iter('pin_class'): pins = [] for pin in pin_class.iter('pin'): pins.append( graph2.Pin( ptc=int(pin.attrib['ptc']), name=pin.text, )) pin_classes.append( graph2.PinClass( type=enum_from_string(graph2.PinType, pin_class.attrib['type']), pin=pins, )) block_types.append( graph2.BlockType( id=int(block_type.attrib['id']), name=block_type.attrib['name'], width=int(block_type.attrib['width']), height=int(block_type.attrib['height']), pin_class=pin_classes, )) grid = [] for grid_loc in input_xml.find('grid').iter('grid_loc'): grid.append( graph2.GridLoc( x=int(grid_loc.attrib['x']), y=int(grid_loc.attrib['y']), block_type_id=int(grid_loc.attrib['block_type_id']), width_offset=int(grid_loc.attrib['width_offset']), height_offset=int(grid_loc.attrib['height_offset']), )) nodes = [] for node in progressbar(input_xml.find('rr_nodes').iter('node')): node_type = enum_from_string(graph2.NodeType, node.attrib['type']) if node_type in [ graph2.NodeType.SOURCE, graph2.NodeType.SINK, graph2.NodeType.OPIN, graph2.NodeType.IPIN ]: loc_xml = node.find('loc') if loc_xml is not None: if 'side' in loc_xml.attrib: side = enum_from_string(tracks.Direction, loc_xml.attrib['side']) else: side = None loc = graph2.NodeLoc(x_low=int(loc_xml.attrib['xlow']), y_low=int(loc_xml.attrib['ylow']), x_high=int(loc_xml.attrib['xhigh']), y_high=int(loc_xml.attrib['yhigh']), ptc=int(loc_xml.attrib['ptc']), side=side) else: loc = None timing_xml = node.find('timing') if timing_xml is not None: timing = graph2.NodeTiming( r=float(timing_xml.attrib['R']), c=float(timing_xml.attrib['C']), ) else: timing = None # Not expecting any metadata on the input. assert node.find('metadata') is None metadata = None nodes.append( graph2.Node( id=int(node.attrib['id']), type=node_type, direction=graph2.NodeDirection.NO_DIR, capacity=int(node.attrib['capacity']), loc=loc, timing=timing, metadata=metadata, segment=None, )) return dict(switches=switches, segments=segments, block_types=block_types, grid=grid, nodes=nodes)
def graph_from_xml(input_file_name, progressbar=None, filter_nodes=True): """ Loads relevant information about the routing resource graph from an XML file. """ if progressbar is None: progressbar = lambda x: x # noqa: E731 root_attrib = {} switches = [] segments = [] block_types = [] grid = [] nodes = [] # Itertate over XML elements switch_timing = None switch_sizing = None segment_timing = None pins = [] pin_classes = [] node_loc = None node_timing = None for path, element in progressbar(iterate_xml(input_file_name)): # Root tag if path == "" and element.tag == "rr_graph": root_attrib = dict(element.attrib) # Switch timing if path == "rr_graph/switches/switch" and element.tag == "timing": switch_timing = graph2.SwitchTiming( r=float(element.attrib['R']), c_in=float(element.attrib['Cin']), c_out=float(element.attrib['Cout']), c_internal=float(element.attrib.get('Cinternal', 0)), t_del=float(element.attrib['Tdel']), ) # Switch sizing if path == "rr_graph/switches/switch" and element.tag == "sizing": switch_sizing = graph2.SwitchSizing( mux_trans_size=float(element.attrib['mux_trans_size']), buf_size=float(element.attrib['buf_size']), ) # Switch if path == "rr_graph/switches" and element.tag == "switch": switches.append( graph2.Switch( id=int(element.attrib['id']), type=enum_from_string(graph2.SwitchType, element.attrib['type']), name=element.attrib['name'], timing=switch_timing, sizing=switch_sizing, )) switch_timing = None switch_sizing = None # Segment timing if path == "rr_graph/segments/segment" and element.tag == "timing": segment_timing = graph2.SegmentTiming( r_per_meter=float(element.attrib['R_per_meter']), c_per_meter=float(element.attrib['C_per_meter']), ) # Segment if path == "rr_graph/segments" and element.tag == "segment": segments.append( graph2.Segment( id=int(element.attrib['id']), name=element.attrib['name'], timing=segment_timing, )) segment_timing = None # Block type - pin if path == "rr_graph/block_types/block_type/pin_class" and element.tag == "pin": pins.append( graph2.Pin( ptc=int(element.attrib['ptc']), name=element.text, )) # Block type - pin_class if path == "rr_graph/block_types/block_type" and element.tag == "pin_class": pin_classes.append( graph2.PinClass( type=enum_from_string(graph2.PinType, element.attrib['type']), pin=pins, )) pins = [] # Block type if path == "rr_graph/block_types" and element.tag == "block_type": block_types.append( graph2.BlockType( id=int(element.attrib['id']), name=element.attrib['name'], width=int(element.attrib['width']), height=int(element.attrib['height']), pin_class=pin_classes, )) pin_classes = [] # Grid if path == "rr_graph/grid" and element.tag == "grid_loc": grid.append( graph2.GridLoc( x=int(element.attrib['x']), y=int(element.attrib['y']), block_type_id=int(element.attrib['block_type_id']), width_offset=int(element.attrib['width_offset']), height_offset=int(element.attrib['height_offset']), )) # Node - loc if path == "rr_graph/rr_nodes/node" and element.tag == "loc": if 'side' in element.attrib: side = enum_from_string(tracks.Direction, element.attrib['side']) else: side = None node_loc = graph2.NodeLoc(x_low=int(element.attrib['xlow']), y_low=int(element.attrib['ylow']), x_high=int(element.attrib['xhigh']), y_high=int(element.attrib['yhigh']), ptc=int(element.attrib['ptc']), side=side) # Node - timing if path == "rr_graph/rr_nodes/node" and element.tag == "timing": node_timing = graph2.NodeTiming( r=float(element.attrib['R']), c=float(element.attrib['C']), ) # Node if path == "rr_graph/rr_nodes" and element.tag == "node": node_type = enum_from_string(graph2.NodeType, element.attrib['type']) if filter_nodes and node_type not in [ graph2.NodeType.SOURCE, graph2.NodeType.SINK, graph2.NodeType.OPIN, graph2.NodeType.IPIN ]: continue # Dropping metadata for now metadata = None nodes.append( graph2.Node( id=int(element.attrib['id']), type=node_type, direction=graph2.NodeDirection.NO_DIR, capacity=int(element.attrib['capacity']), loc=node_loc, timing=node_timing, metadata=metadata, segment=None, canonical_loc=None, connection_box=None, )) node_loc = None node_timing = None return dict(root_attrib=root_attrib, switches=switches, segments=segments, block_types=block_types, grid=grid, nodes=nodes)
def main(): parser = argparse.ArgumentParser() parser.add_argument('--db_root', required=True, help='Project X-Ray Database') parser.add_argument('--read_rr_graph', required=True, help='Input rr_graph file') parser.add_argument('--write_rr_graph', required=True, help='Output rr_graph file') parser.add_argument('--write_rr_node_map', required=True, help='Output map of graph_node_pkey to rr inode file') parser.add_argument('--connection_database', help='Database of fabric connectivity', required=True) parser.add_argument( '--synth_tiles', help= 'If using an ROI, synthetic tile defintion from prjxray-arch-import') parser.add_argument( '--graph_limit', help='Limit grid to specified dimensions in x_min,y_min,x_max,y_max', ) args = parser.parse_args() db = prjxray.db.Database(args.db_root) synth_tiles = None if args.synth_tiles: use_roi = True with open(args.synth_tiles) as f: synth_tiles = json.load(f) roi = Roi( db=db, x1=synth_tiles['info']['GRID_X_MIN'], y1=synth_tiles['info']['GRID_Y_MIN'], x2=synth_tiles['info']['GRID_X_MAX'], y2=synth_tiles['info']['GRID_Y_MAX'], ) print('{} generating routing graph for ROI.'.format(now())) elif args.graph_limit: use_roi = True x_min, y_min, x_max, y_max = map(int, args.graph_limit.split(',')) roi = Roi( db=db, x1=x_min, y1=y_min, x2=x_max, y2=y_max, ) else: use_roi = False roi = None synth_tiles = None # Convert input rr graph into graph2.Graph object. input_rr_graph = read_xml_file(args.read_rr_graph) if synth_tiles is None: synth_tiles = find_constant_network(input_rr_graph) xml_graph = xml_graph2.Graph( input_rr_graph, progressbar=progressbar_utils.progressbar, output_file_name=args.write_rr_graph, ) graph = xml_graph.graph tool_version = input_rr_graph.getroot().attrib['tool_version'] tool_comment = input_rr_graph.getroot().attrib['tool_comment'] with DatabaseCache(args.connection_database, True) as conn: cur = conn.cursor() for name, internal_capacitance, drive_resistance, intrinsic_delay, \ switch_type in cur.execute(""" SELECT name, internal_capacitance, drive_resistance, intrinsic_delay, switch_type FROM switch;"""): # Add back missing switchs, which were unused in arch xml, and so # were not emitted in rrgraph XML. # # TODO: This can be removed once # https://github.com/verilog-to-routing/vtr-verilog-to-routing/issues/354 # is fixed. try: graph.get_switch_id(name) continue except KeyError: xml_graph.add_switch( graph2.Switch( id=None, name=name, type=graph2.SwitchType[switch_type.upper()], timing=graph2.SwitchTiming( r=drive_resistance, c_in=0.0, c_out=0.0, c_internal=internal_capacitance, t_del=intrinsic_delay, ), sizing=graph2.SwitchSizing( mux_trans_size=0, buf_size=0, ), )) # Mapping of graph_node.pkey to rr node id. node_mapping = {} print('{} Creating connection box list'.format(now())) connection_box_map = create_connection_boxes(conn, graph) # Match site pins rr nodes with graph_node's in the connection_database. print('{} Importing graph nodes'.format(now())) import_graph_nodes(conn, graph, node_mapping, connection_box_map) # Walk all track graph nodes and add them. print('{} Creating tracks'.format(now())) segment_id = graph.get_segment_id_from_name('dummy') create_track_rr_graph(conn, graph, node_mapping, use_roi, roi, synth_tiles, segment_id) # Set of (src, sink, switch_id) tuples that pip edges have been sent to # VPR. VPR cannot handle duplicate paths with the same switch id. if use_roi: print('{} Adding synthetic edges'.format(now())) add_synthetic_edges(conn, graph, node_mapping, grid, synth_tiles) print('{} Creating channels.'.format(now())) channels_obj = create_channels(conn) x_dim, y_dim = phy_grid_dims(conn) connection_box_obj = graph.create_connection_box_object(x_dim=x_dim, y_dim=y_dim) print('{} Serializing to disk.'.format(now())) with xml_graph: xml_graph.start_serialize_to_xml( tool_version=tool_version, tool_comment=tool_comment, channels_obj=channels_obj, connection_box_obj=connection_box_obj, ) xml_graph.serialize_nodes(yield_nodes(xml_graph.graph.nodes)) xml_graph.serialize_edges( import_graph_edges(conn, graph, node_mapping)) print('{} Writing node map.'.format(now())) with open(args.write_rr_node_map, 'wb') as f: pickle.dump(node_mapping, f) print('{} Done writing node map.'.format(now()))
def main(): parser = argparse.ArgumentParser() parser.add_argument('--db_root', required=True, help='Project X-Ray Database') parser.add_argument('--part', required=True, help='FPGA part') parser.add_argument('--read_rr_graph', required=True, help='Input rr_graph file') parser.add_argument('--write_rr_graph', required=True, help='Output rr_graph file') parser.add_argument('--write_rr_node_map', required=True, help='Output map of graph_node_pkey to rr inode file') parser.add_argument('--connection_database', help='Database of fabric connectivity', required=True) parser.add_argument( '--synth_tiles', help= 'If using an ROI, synthetic tile defintion from prjxray-arch-import') parser.add_argument('--overlay', action='store_true', required=False, help='Use synth tiles for Overlay instead of ROI') parser.add_argument( '--graph_limit', help='Limit grid to specified dimensions in x_min,y_min,x_max,y_max', ) parser.add_argument( '--vpr_capnp_schema_dir', help='Directory container VPR schema files', ) print('{} Starting routing import'.format(now())) args = parser.parse_args() db = prjxray.db.Database(args.db_root, args.part) populate_hclk_cmt_tiles(db) synth_tiles = None if args.overlay: assert args.synth_tiles use_roi = True with open(args.synth_tiles) as f: synth_tiles = json.load(f) region_dict = dict() for r in synth_tiles['info']: bounds = (r['GRID_X_MIN'], r['GRID_X_MAX'], r['GRID_Y_MIN'], r['GRID_Y_MAX']) region_dict[r['name']] = bounds roi = Overlay(region_dict=region_dict) print('{} generating routing graph for Overlay.'.format(now())) elif args.synth_tiles: use_roi = True with open(args.synth_tiles) as f: synth_tiles = json.load(f) roi = Roi( db=db, x1=synth_tiles['info']['GRID_X_MIN'], y1=synth_tiles['info']['GRID_Y_MIN'], x2=synth_tiles['info']['GRID_X_MAX'], y2=synth_tiles['info']['GRID_Y_MAX'], ) print('{} generating routing graph for ROI.'.format(now())) elif args.graph_limit: use_roi = True x_min, y_min, x_max, y_max = map(int, args.graph_limit.split(',')) roi = Roi( db=db, x1=x_min, y1=y_min, x2=x_max, y2=y_max, ) else: use_roi = False roi = None synth_tiles = None capnp_graph = capnp_graph2.Graph( rr_graph_schema_fname=os.path.join(args.vpr_capnp_schema_dir, 'rr_graph_uxsdcxx.capnp'), input_file_name=args.read_rr_graph, progressbar=progressbar_utils.progressbar, output_file_name=args.write_rr_graph, ) graph = capnp_graph.graph if synth_tiles is None: synth_tiles = find_constant_network(graph) if args.overlay: synth_tiles_const = find_constant_network(graph) synth_tiles['tiles'].update(synth_tiles_const['tiles']) with sqlite3.connect("file:{}?mode=ro".format(args.connection_database), uri=True) as conn: populate_bufg_rebuf_map(conn) cur = conn.cursor() for name, internal_capacitance, drive_resistance, intrinsic_delay, penalty_cost, \ switch_type in cur.execute(""" SELECT name, internal_capacitance, drive_resistance, intrinsic_delay, penalty_cost, switch_type FROM switch;"""): # Add back missing switchs, which were unused in arch xml, and so # were not emitted in rrgraph XML. # # TODO: This can be removed once # https://github.com/verilog-to-routing/vtr-verilog-to-routing/issues/354 # is fixed. try: graph.get_switch_id(name) continue except KeyError: capnp_graph.add_switch( graph2.Switch( id=None, name=name, type=graph2.SwitchType[switch_type.upper()], timing=graph2.SwitchTiming( r=drive_resistance, c_in=0.0, c_out=0.0, c_internal=internal_capacitance, t_del=intrinsic_delay, p_cost=penalty_cost, ), sizing=graph2.SwitchSizing( mux_trans_size=0, buf_size=0, ), )) # Mapping of graph_node.pkey to rr node id. node_mapping = {} print('{} Creating connection box list'.format(now())) connection_box_map = create_connection_boxes(conn, graph) # Match site pins rr nodes with graph_node's in the connection_database. print('{} Importing graph nodes'.format(now())) import_graph_nodes(conn, graph, node_mapping, connection_box_map) # Walk all track graph nodes and add them. print('{} Creating tracks'.format(now())) segment_id = graph.get_segment_id_from_name('dummy') create_track_rr_graph(conn, graph, node_mapping, use_roi, roi, synth_tiles, segment_id) # Set of (src, sink, switch_id) tuples that pip edges have been sent to # VPR. VPR cannot handle duplicate paths with the same switch id. print('{} Adding synthetic edges'.format(now())) add_synthetic_edges(conn, graph, node_mapping, grid, synth_tiles, args.overlay) print('{} Creating channels.'.format(now())) channels_obj = create_channels(conn) node_remap = create_node_remap(capnp_graph.graph.nodes, channels_obj) x_dim, y_dim = phy_grid_dims(conn) connection_box_obj = graph.create_connection_box_object(x_dim=x_dim, y_dim=y_dim) num_edges = get_number_graph_edges(conn, graph, node_mapping) print('{} Serializing to disk.'.format(now())) capnp_graph.serialize_to_capnp( channels_obj=channels_obj, connection_box_obj=connection_box_obj, num_nodes=len(capnp_graph.graph.nodes), nodes_obj=yield_nodes(capnp_graph.graph.nodes), num_edges=num_edges, edges_obj=import_graph_edges(conn, graph, node_mapping), node_remap=node_remap, ) for k in node_mapping: node_mapping[k] = node_remap(node_mapping[k]) print('{} Writing node map.'.format(now())) with open(args.write_rr_node_map, 'wb') as f: pickle.dump(node_mapping, f) print('{} Done writing node map.'.format(now()))
def main(): # Parse arguments parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument("--vpr-db", type=str, required=True, help="VPR database file") parser.add_argument("--rr-graph-in", type=str, required=True, help="Input RR graph XML file") parser.add_argument("--rr-graph-out", type=str, default="rr_graph.xml", help="Output RR graph XML file (def. rr_graph.xml)") args = parser.parse_args() # Load data from the database print("Loading database...") with open(args.vpr_db, "rb") as fp: db = pickle.load(fp) vpr_quadrants = db["vpr_quadrants"] vpr_clock_cells = db["vpr_clock_cells"] loc_map = db["loc_map"] vpr_tile_types = db["vpr_tile_types"] vpr_tile_grid = db["vpr_tile_grid"] vpr_switchbox_types = db["vpr_switchbox_types"] vpr_switchbox_grid = db["vpr_switchbox_grid"] connections = db["connections"] switches = db["switches"] # Load the routing graph, build SOURCE -> OPIN and IPIN -> SINK edges. print("Loading rr graph...") xml_graph = rr_xml.Graph(input_file_name=args.rr_graph_in, output_file_name=args.rr_graph_out, progressbar=progressbar_utils.progressbar) # Add back the switches that were unused in the arch.xml and got pruned # byt VPR. for switch in switches: try: xml_graph.graph.get_switch_id(switch.name) continue except KeyError: xml_graph.add_switch( rr.Switch( id=None, name=switch.name, type=rr.SwitchType[switch.type.upper()], timing=rr.SwitchTiming( r=switch.r, c_in=switch.c_in, c_out=switch.c_out, c_internal=switch.c_int, t_del=switch.t_del, ), sizing=rr.SwitchSizing( mux_trans_size=0, buf_size=0, ), )) print("Building maps...") # Add a switch map to the graph switch_map = {} for switch in xml_graph.graph.switches: assert switch.id not in switch_map, switch switch_map[switch.id] = switch xml_graph.graph.switch_map = switch_map # Build node id to node map nodes_by_id = {node.id: node for node in xml_graph.graph.nodes} # Build tile pin names to rr node ids map tile_pin_to_node = build_tile_pin_to_node_map(xml_graph.graph, nodes_by_id, vpr_tile_types, vpr_tile_grid) # Add const network const_node_map = {} for const in ["VCC", "GND"]: m = add_tracks_for_const_network(xml_graph.graph, const, vpr_tile_grid) const_node_map[const] = m # Connection loc (endpoint) to node map. Map ConnectionLoc objects to VPR # rr graph node ids. connection_loc_to_node = {} # Build a map of connections to/from tiles and rr nodes. The map points # to an IPIN/OPIN node for a connection loc that mentions it. node_map = build_tile_connection_map(xml_graph.graph, nodes_by_id, vpr_tile_grid, connections) connection_loc_to_node.update(node_map) # Build the global clock network print("Building the global clock network...") # GMUX to QMUX and QMUX to CAND tracks node_map = create_quadrant_clock_tracks(xml_graph.graph, connections, connection_loc_to_node) connection_loc_to_node.update(node_map) # Clock column tracks cand_node_map = create_column_clock_tracks(xml_graph.graph, vpr_clock_cells, vpr_quadrants) # Add switchbox models. print("Building switchbox models...") switchbox_models = {} # Gather QMUX cells qmux_cells = {} for cell in vpr_clock_cells.values(): if cell.type == "QMUX": loc = cell.loc if loc not in qmux_cells: qmux_cells[loc] = {} qmux_cells[loc][cell.name] = cell # Create the models for loc, type in vpr_switchbox_grid.items(): phy_loc = loc_map.bwd[loc] # QMUX switchbox model if loc in qmux_cells: switchbox_models[loc] = QmuxSwitchboxModel( graph=xml_graph.graph, loc=loc, phy_loc=phy_loc, switchbox=vpr_switchbox_types[type], qmux_cells=qmux_cells[loc], connections=[c for c in connections if is_clock(c)]) # Regular switchbox model else: switchbox_models[loc] = SwitchboxModel( graph=xml_graph.graph, loc=loc, phy_loc=phy_loc, switchbox=vpr_switchbox_types[type], ) # Build switchbox models for switchbox_model in progressbar_utils.progressbar( switchbox_models.values()): switchbox_model.build() # Build the global clock network cell models print("Building QMUX and CAND models...") # Add QMUX and CAND models for cell in progressbar_utils.progressbar(vpr_clock_cells.values()): phy_loc = loc_map.bwd[cell.loc] if cell.type == "QMUX": QmuxModel(graph=xml_graph.graph, cell=cell, phy_loc=phy_loc, switchbox_model=switchbox_models[cell.loc], connections=connections, node_map=connection_loc_to_node) if cell.type == "CAND": CandModel(graph=xml_graph.graph, cell=cell, phy_loc=phy_loc, connections=connections, node_map=connection_loc_to_node, cand_node_map=cand_node_map) # Populate connections to the switchbox models print("Populating connections...") populate_hop_connections(xml_graph.graph, switchbox_models, connections) populate_tile_connections(xml_graph.graph, switchbox_models, connections, connection_loc_to_node) populate_direct_connections(xml_graph.graph, connections, connection_loc_to_node) populate_cand_connections(xml_graph.graph, switchbox_models, cand_node_map) populate_const_connections(xml_graph.graph, switchbox_models, vpr_tile_types, vpr_tile_grid, tile_pin_to_node, const_node_map) # Create channels from tracks pad_segment_id = xml_graph.graph.get_segment_id_from_name("pad") channels_obj = xml_graph.graph.create_channels(pad_segment=pad_segment_id) # Remove padding channels print("Removing padding nodes...") xml_graph.graph.nodes = [ n for n in xml_graph.graph.nodes if n.capacity > 0 ] # Build node id to node map again since there have been new nodes added. nodes_by_id = {node.id: node for node in xml_graph.graph.nodes} # Sanity check edges print("Sanity checking edges...") node_ids = set([n.id for n in xml_graph.graph.nodes]) for edge in xml_graph.graph.edges: assert edge.src_node in node_ids, edge assert edge.sink_node in node_ids, edge assert edge.src_node != edge.sink_node, edge # Sanity check IPIN/OPIN connections. There must be no tile completely # disconnected from the routing network print("Sanity checking tile connections...") connected_locs = set() for edge in xml_graph.graph.edges: src = nodes_by_id[edge.src_node] dst = nodes_by_id[edge.sink_node] if src.type == rr.NodeType.OPIN: loc = (src.loc.x_low, src.loc.y_low) connected_locs.add(loc) if dst.type == rr.NodeType.IPIN: loc = (src.loc.x_low, src.loc.y_low) connected_locs.add(loc) non_empty_locs = set((loc.x, loc.y) for loc in xml_graph.graph.grid if loc.block_type_id > 0) unconnected_locs = non_empty_locs - connected_locs for loc in unconnected_locs: block_type = xml_graph.graph.block_type_at_loc(loc) print(" ERROR: Tile '{}' at ({}, {}) is not connected!".format( block_type, loc[0], loc[1])) # Write the routing graph nodes_obj = xml_graph.graph.nodes edges_obj = xml_graph.graph.edges print("Serializing the rr graph...") xml_graph.serialize_to_xml( channels_obj=channels_obj, nodes_obj=nodes_obj, edges_obj=yield_edges(edges_obj), node_remap=lambda x: x, )
def main(): parser = argparse.ArgumentParser() parser.add_argument('--db_root', required=True, help='Project X-Ray Database') parser.add_argument('--read_rr_graph', required=True, help='Input rr_graph file') parser.add_argument('--write_rr_graph', required=True, help='Output rr_graph file') parser.add_argument('--connection_database', help='Database of fabric connectivity', required=True) parser.add_argument( '--synth_tiles', help= 'If using an ROI, synthetic tile defintion from prjxray-arch-import') args = parser.parse_args() db = prjxray.db.Database(args.db_root) grid = db.grid() if args.synth_tiles: use_roi = True with open(args.synth_tiles) as f: synth_tiles = json.load(f) roi = Roi( db=db, x1=synth_tiles['info']['GRID_X_MIN'], y1=synth_tiles['info']['GRID_Y_MIN'], x2=synth_tiles['info']['GRID_X_MAX'], y2=synth_tiles['info']['GRID_Y_MAX'], ) print('{} generating routing graph for ROI.'.format(now())) else: use_roi = False # Convert input rr graph into graph2.Graph object. input_rr_graph = read_xml_file(args.read_rr_graph) xml_graph = xml_graph2.Graph( input_rr_graph, progressbar=progressbar.progressbar, output_file_name=args.write_rr_graph, ) graph = xml_graph.graph # Add back short switch, which is unused in arch xml, so is not emitted in # rrgraph XML. # # TODO: This can be removed once # https://github.com/verilog-to-routing/vtr-verilog-to-routing/issues/354 # is fixed. try: short = graph.get_switch_id('short') except KeyError: short = xml_graph.add_switch( graph2.Switch( id=None, name='short', type=graph2.SwitchType.SHORT, timing=None, sizing=graph2.SwitchSizing( mux_trans_size=0, buf_size=0, ), )) tool_version = input_rr_graph.getroot().attrib['tool_version'] tool_comment = input_rr_graph.getroot().attrib['tool_comment'] with DatabaseCache(args.connection_database, True) as conn: # Mapping of graph_node.pkey to rr node id. node_mapping = {} # Match site pins rr nodes with graph_node's in the connection_database. print('{} Importing graph nodes'.format(now())) import_graph_nodes(conn, graph, node_mapping) # Walk all track graph nodes and add them. print('{} Creating tracks'.format(now())) segment_id = graph.get_segment_id_from_name('dummy') create_track_rr_graph(conn, graph, node_mapping, use_roi, roi, synth_tiles, segment_id) # Set of (src, sink, switch_id) tuples that pip edges have been sent to # VPR. VPR cannot handle duplicate paths with the same switch id. if use_roi: print('{} Adding synthetic edges'.format(now())) add_synthetic_edges(conn, graph, node_mapping, grid, synth_tiles) print('{} Creating channels.'.format(now())) channels_obj = create_channels(conn) print('{} Serializing to disk.'.format(now())) with xml_graph: xml_graph.start_serialize_to_xml( tool_version=tool_version, tool_comment=tool_comment, channels_obj=channels_obj, ) xml_graph.serialize_nodes(yield_nodes(xml_graph.graph.nodes)) xml_graph.serialize_edges( import_graph_edges(conn, graph, node_mapping))