def main(): parser = argparse.ArgumentParser(description="Generate synth_tiles.json") parser.add_argument('--db_root', required=True) parser.add_argument('--part', required=True) parser.add_argument('--roi', required=False) parser.add_argument('--overlay', required=False) parser.add_argument('--connection_database', help='Connection database', required=True) parser.add_argument('--synth_tiles', required=True) args = parser.parse_args() db = prjxray.db.Database(args.db_root, args.part) g = db.grid() synth_tiles = {} synth_tiles['tiles'] = {} rois = dict() if args.roi: with open(args.roi) as f: j = json.load(f) synth_tiles['info'] = j['info'] roi = Roi( db=db, x1=j['info']['GRID_X_MIN'], y1=j['info']['GRID_Y_MIN'], x2=j['info']['GRID_X_MAX'], y2=j['info']['GRID_Y_MAX'], ) rois[roi] = j elif args.overlay: with open(args.overlay) as f: j = json.load(f) synth_tiles['info'] = list() for r in j: roi = Roi( db=db, x1=r['info']['GRID_X_MIN'], y1=r['info']['GRID_Y_MIN'], x2=r['info']['GRID_X_MAX'], y2=r['info']['GRID_Y_MAX'], ) rois[roi] = r else: assert False, 'Synth tiles must be for roi or overlay' with DatabaseCache(args.connection_database, read_only=True) as conn: tile_in_use = set() for roi, j in rois.items(): if args.overlay: synth_tiles['info'].append(j['info']) tile_pin_count = dict() num_synth_tiles = 0 for port in sorted(j['ports'], key=lambda i: (i['type'], i['name'])): if port['type'] == 'out': port_type = 'input' if not args.overlay else 'output' is_clock = False elif port['type'] == 'in': is_clock = False port_type = 'output' if not args.overlay else 'input' elif port['type'] == 'clk': port_type = 'output' if not args.overlay else 'input' is_clock = True else: assert False, port if 'wire' not in port: tile, wire = find_wire_from_node(conn, g, roi, port['node'], overlay=bool( args.overlay)) else: tile, wire = port['wire'].split('/') tile_in_use.add(tile) # Make sure connecting wire is not in ROI! loc = g.loc_of_tilename(tile) if bool(args.overlay) ^ roi.tile_in_roi(loc): # Or if in the ROI, make sure it has no sites. gridinfo = g.gridinfo_at_tilename(tile) assert len( db.get_tile_type(gridinfo.tile_type).get_sites() ) == 0, "{}/{}".format(tile, wire) vpr_loc = map_tile_to_vpr_coord(conn, tile) if tile not in synth_tiles['tiles']: tile_name = 'SYN-IOPAD-{}'.format(num_synth_tiles) synth_tiles['tiles'][tile] = { 'pins': [], 'loc': vpr_loc, 'tile_name': tile_name, } num_synth_tiles += 1 tile_pin_count[tile] = 0 synth_tiles['tiles'][tile]['pins'].append({ 'roi_name': port['name'].replace('[', '_').replace(']', '_'), 'wire': wire, 'pad': port['pin'], 'port_type': port_type, 'is_clock': is_clock, 'z_loc': tile_pin_count[tile], }) tile_pin_count[tile] += 1 if not args.overlay: # Find two VBRK's in the corner of the fabric to use as the synthetic VCC/ # GND source. vbrk_loc = None vbrk_tile = None vbrk2_loc = None vbrk2_tile = None for tile in g.tiles(): if tile in tile_in_use: continue loc = g.loc_of_tilename(tile) if not roi.tile_in_roi(loc): continue gridinfo = g.gridinfo_at_tilename(tile) if 'VBRK' not in gridinfo.tile_type: continue assert len(db.get_tile_type( gridinfo.tile_type).get_sites()) == 0, tile if vbrk_loc is None: vbrk2_loc = vbrk_loc vbrk2_tile = vbrk_tile vbrk_loc = loc vbrk_tile = tile else: if (loc.grid_x < vbrk_loc.grid_x and loc.grid_y < vbrk_loc.grid_y) or vbrk2_loc is None: vbrk2_loc = vbrk_loc vbrk2_tile = vbrk_tile vbrk_loc = loc vbrk_tile = tile assert vbrk_loc is not None assert vbrk_tile is not None assert vbrk_tile not in synth_tiles['tiles'] vbrk_vpr_loc = map_tile_to_vpr_coord(conn, vbrk_tile) synth_tiles['tiles'][vbrk_tile] = { 'loc': vbrk_vpr_loc, 'pins': [ { 'wire': 'VCC', 'pad': 'VCC', 'port_type': 'VCC', 'is_clock': False, 'z_loc': '0', }, ], } assert vbrk2_loc is not None assert vbrk2_tile is not None assert vbrk2_tile not in synth_tiles['tiles'] vbrk2_vpr_loc = map_tile_to_vpr_coord(conn, vbrk2_tile) synth_tiles['tiles'][vbrk2_tile] = { 'loc': vbrk2_vpr_loc, 'pins': [ { 'wire': 'GND', 'pad': 'GND', 'port_type': 'GND', 'is_clock': False, 'z_loc': '0', }, ], } with open(args.synth_tiles, 'w') as f: json.dump(synth_tiles, f, indent=2)
def main(): mydir = os.path.dirname(__file__) prjxray_db = os.path.abspath(os.path.join(mydir, "..", "..", "third_party", "prjxray-db")) db_types = prjxray.db.get_available_databases(prjxray_db) parser = argparse.ArgumentParser(description="Generate arch.xml") parser.add_argument( '--part', choices=[os.path.basename(db_type) for db_type in db_types], help="""Project X-Ray database to use.""") parser.add_argument( '--output-arch', nargs='?', type=argparse.FileType('w'), help="""File to output arch.""") parser.add_argument('--tile-types', help="Semi-colon seperated tile types.") parser.add_argument( '--pin_assignments', required=True, type=argparse.FileType('r')) parser.add_argument( '--use_roi', required=False) parser.add_argument( '--synth_tiles', required=False) parser.add_argument( '--device', required=True) args = parser.parse_args() tile_types = args.tile_types.split(',') tile_model = "../../tiles/{0}/{0}.model.xml" tile_pbtype = "../../tiles/{0}/{0}.pb_type.xml" xi_url = "http://www.w3.org/2001/XInclude" ET.register_namespace('xi', xi_url) xi_include = "{%s}include" % xi_url arch_xml = ET.Element( 'architecture', {}, nsmap = {'xi': xi_url}, ) model_xml = ET.SubElement(arch_xml, 'models') for tile_type in tile_types: ET.SubElement(model_xml, xi_include, { 'href': tile_model.format(tile_type.lower()), 'xpointer':"xpointer(models/child::node())", }) complexblocklist_xml = ET.SubElement(arch_xml, 'complexblocklist') for tile_type in tile_types: ET.SubElement(complexblocklist_xml, xi_include, { 'href': tile_pbtype.format(tile_type.lower()), }) layout_xml = ET.SubElement(arch_xml, 'layout') db = prjxray.db.Database(os.path.join(prjxray_db, args.part)) g = db.grid() x_min, x_max, y_min, y_max = g.dims() # FIXME: There is an issue in the routing phase. (https://github.com/SymbiFlow/symbiflow-arch-defs/issues/353) # if a zynq device is selected the grid must be expanded by 1 if args.device == 'xc7z010': x_max += 1 y_max += 1 name = '{}-test'.format(args.device) fixed_layout_xml = ET.SubElement(layout_xml, 'fixed_layout', { 'name': name, 'height': str(y_max+1), 'width': str(x_max+1), }) only_emit_roi = False roi_inputs = [] roi_outputs = [] synth_tiles = {} synth_tiles['tiles'] = {} if args.use_roi: only_emit_roi = True with open(args.use_roi) as f: j = json.load(f) roi = Roi( db=db, x1=j['info']['GRID_X_MIN'], y1=j['info']['GRID_Y_MIN'], x2=j['info']['GRID_X_MAX'], y2=j['info']['GRID_Y_MAX'], ) synth_tiles['info'] = j['info'] for port in j['ports']: if port['name'].startswith('dout['): roi_outputs.append(port) port_type = 'input' is_clock = False elif port['name'].startswith('din['): roi_inputs.append(port) is_clock = False port_type = 'output' elif port['name'].startswith('clk'): roi_inputs.append(port) port_type = 'output' is_clock = True else: assert False, port tile, wire = port['wire'].split('/') # Make sure connecting wire is not in ROI! loc = g.loc_of_tilename(tile) if roi.tile_in_roi(loc): # Or if in the ROI, make sure it has no sites. gridinfo = g.gridinfo_at_tilename(tile) assert len(db.get_tile_type(gridinfo.tile_type).get_sites()) == 0, tile if tile not in synth_tiles['tiles']: synth_tiles['tiles'][tile] = { 'pins': [], 'loc': g.loc_of_tilename(tile), } synth_tiles['tiles'][tile]['pins'].append({ 'roi_name': port['name'].replace('[', '_').replace(']','_'), 'wire': wire, 'pad': port['pin'], 'port_type': port_type, 'is_clock': is_clock, }) with open(args.synth_tiles, 'w') as f: json.dump(synth_tiles, f) synth_tile_map = add_synthetic_tile(complexblocklist_xml) for loc in g.tile_locations(): gridinfo = g.gridinfo_at_loc(loc) tile = g.tilename_at_loc(loc) if tile in synth_tiles['tiles']: synth_tile = synth_tiles['tiles'][tile] assert len(synth_tile['pins']) == 1 vpr_tile_type = synth_tile_map[synth_tile['pins'][0]['port_type']] elif only_emit_roi and not roi.tile_in_roi(loc): # This tile is outside the ROI, skip it. continue elif gridinfo.tile_type in tile_types: # We want to import this tile type. vpr_tile_type = 'BLK_TI-{}'.format(gridinfo.tile_type) else: # We don't want this tile continue is_vbrk = gridinfo.tile_type.find('VBRK') != -1 # VBRK tiles are known to have no bitstream data. if not is_vbrk and not gridinfo.bits: print('*** WARNING *** Skipping tile {} because it lacks bitstream data.'.format(tile), file=sys.stderr) single_xml = ET.SubElement(fixed_layout_xml, 'single', { 'priority': '1', 'type': vpr_tile_type, 'x': str(loc[0]), 'y': str(loc[1]), }) meta = ET.SubElement(single_xml, 'metadata') ET.SubElement(meta, 'meta', { 'name': 'fasm_prefix', }).text = tile device_xml = ET.SubElement(arch_xml, 'device') ET.SubElement(device_xml, 'sizing', { "R_minW_nmos":"6065.520020", "R_minW_pmos":"18138.500000", }) ET.SubElement(device_xml, 'area', { "grid_logic_tile_area":"14813.392", }) ET.SubElement(device_xml, 'connection_block', { "input_switch_name":"buffer", }) ET.SubElement(device_xml, 'switch_block', { "type":"wilton", "fs":"3", }) chan_width_distr_xml = ET.SubElement(device_xml, 'chan_width_distr') ET.SubElement(chan_width_distr_xml, 'x', { 'distr':'uniform', 'peak':'1.0', }) ET.SubElement(chan_width_distr_xml, 'y', { 'distr':'uniform', 'peak':'1.0', }) switchlist_xml = ET.SubElement(arch_xml, 'switchlist') ET.SubElement(switchlist_xml, 'switch', { 'type':'mux', 'name':'routing', "R":"551", "Cin":".77e-15", "Cout":"4e-15", "Tdel":"6.8e-12", "mux_trans_size":"2.630740", "buf_size":"27.645901" }) ET.SubElement(switchlist_xml, 'switch', { 'type':'mux', 'name':'buffer', "R":"551", "Cin":".77e-15", "Cout":"4e-15", "Tdel":"6.8e-12", "mux_trans_size":"2.630740", "buf_size":"27.645901" }) segmentlist_xml = ET.SubElement(arch_xml, 'segmentlist') # VPR requires a segment, so add one. dummy_xml = ET.SubElement(segmentlist_xml, 'segment', { 'name': 'dummy', 'length': '12', 'freq': '1.0', 'type': 'bidir', 'Rmetal':'101', 'Cmetal':'22.5e15', }) ET.SubElement(dummy_xml, 'wire_switch', { 'name': 'routing', }) ET.SubElement(dummy_xml, 'opin_switch', { 'name': 'routing', }) ET.SubElement(dummy_xml, 'sb', { 'type': 'pattern', }).text = ' '.join('1' for _ in range(13)) ET.SubElement(dummy_xml, 'cb', { 'type': 'pattern', }).text = ' '.join('1' for _ in range(12)) directlist_xml = ET.SubElement(arch_xml, 'directlist') pin_assignments = json.load(args.pin_assignments) # Choose smallest distance for block to block connections with multiple # direct_connections. VPR cannot handle multiple block to block connections. directs = {} for direct in pin_assignments['direct_connections']: key = (direct['from_pin'], direct['to_pin']) if key not in directs: directs[key] = [] directs[key].append((abs(direct['x_offset']) + abs(direct['y_offset']), direct)) for direct in directs.values(): _, direct = min(direct, key=lambda v: v[0]) if direct['from_pin'].split('.')[0] not in tile_types: continue if direct['to_pin'].split('.')[0] not in tile_types: continue if direct['x_offset'] == 0 and direct['y_offset'] == 0: continue ET.SubElement(directlist_xml, 'direct', { 'name': '{}_to_{}_dx_{}_dy_{}'.format(direct['from_pin'], direct['to_pin'], direct['x_offset'], direct['y_offset']), 'from_pin': 'BLK_TI-' + direct['from_pin'], 'to_pin': 'BLK_TI-' + direct['to_pin'], 'x_offset': str(direct['x_offset']), 'y_offset': str(direct['y_offset']), 'z_offset': '0', 'switch_name': direct['switch_name'], }) arch_xml_str = ET.tostring(arch_xml, pretty_print=True).decode('utf-8') args.output_arch.write(arch_xml_str) args.output_arch.close()
def main(): mydir = os.path.dirname(__file__) prjxray_db = os.path.abspath( os.path.join(mydir, "..", "..", "third_party", "prjxray-db")) db_types = prjxray.db.get_available_databases(prjxray_db) parser = argparse.ArgumentParser(description="Generate arch.xml") parser.add_argument( '--part', choices=[os.path.basename(db_type) for db_type in db_types], help="""Project X-Ray database to use.""") parser.add_argument('--output-arch', nargs='?', type=argparse.FileType('w'), help="""File to output arch.""") parser.add_argument('--tile-types', required=True, help="Semi-colon seperated tile types.") parser.add_argument('--pb_types', required=True, help="Semi-colon seperated pb_types types.") parser.add_argument('--pin_assignments', required=True, type=argparse.FileType('r')) parser.add_argument('--use_roi', required=False) parser.add_argument('--device', required=True) parser.add_argument('--synth_tiles', required=False) parser.add_argument('--connection_database', required=True) parser.add_argument( '--graph_limit', help='Limit grid to specified dimensions in x_min,y_min,x_max,y_max', ) args = parser.parse_args() tile_types = args.tile_types.split(',') pb_types = args.pb_types.split(',') model_xml_spec = "../../tiles/{0}/{0}.model.xml" pbtype_xml_spec = "../../tiles/{0}/{0}.pb_type.xml" tile_xml_spec = "../../tiles/{0}/{0}.tile.xml" xi_url = "http://www.w3.org/2001/XInclude" ET.register_namespace('xi', xi_url) xi_include = "{%s}include" % xi_url arch_xml = ET.Element( 'architecture', {}, nsmap={'xi': xi_url}, ) model_xml = ET.SubElement(arch_xml, 'models') for pb_type in pb_types: ET.SubElement( model_xml, xi_include, { 'href': model_xml_spec.format(pb_type.lower()), 'xpointer': "xpointer(models/child::node())", }) tiles_xml = ET.SubElement(arch_xml, 'tiles') for tile_type in tile_types: ET.SubElement(tiles_xml, xi_include, { 'href': tile_xml_spec.format(tile_type.lower()), }) complexblocklist_xml = ET.SubElement(arch_xml, 'complexblocklist') for pb_type in pb_types: ET.SubElement(complexblocklist_xml, xi_include, { 'href': pbtype_xml_spec.format(pb_type.lower()), }) layout_xml = ET.SubElement(arch_xml, 'layout') db = prjxray.db.Database(os.path.join(prjxray_db, args.part)) g = db.grid() synth_tiles = {} synth_tiles['tiles'] = {} synth_loc_map = {} synth_tile_map = {} roi = None if args.use_roi: with open(args.use_roi) as f: j = json.load(f) with open(args.synth_tiles) as f: synth_tiles = json.load(f) roi = Roi( db=db, x1=j['info']['GRID_X_MIN'], y1=j['info']['GRID_Y_MIN'], x2=j['info']['GRID_X_MAX'], y2=j['info']['GRID_Y_MAX'], ) synth_tile_map = add_synthetic_tiles(model_xml, complexblocklist_xml, tiles_xml, need_io=True) for _, tile_info in synth_tiles['tiles'].items(): assert tuple(tile_info['loc']) not in synth_loc_map assert len(tile_info['pins']) == 1 vpr_tile_type = synth_tile_map[tile_info['pins'][0]['port_type']] synth_loc_map[tuple(tile_info['loc'])] = vpr_tile_type elif args.graph_limit: x_min, y_min, x_max, y_max = map(int, args.graph_limit.split(',')) roi = Roi( db=db, x1=x_min, y1=y_min, x2=x_max, y2=y_max, ) with DatabaseCache(args.connection_database, read_only=True) as conn: c = conn.cursor() if 'GND' not in synth_tile_map: synth_tile_map, synth_loc_map = insert_constant_tiles( conn, model_xml, complexblocklist_xml, tiles_xml) # Find the grid extent. y_max = 0 x_max = 0 for grid_x, grid_y in c.execute("SELECT grid_x, grid_y FROM tile"): x_max = max(grid_x + 2, x_max) y_max = max(grid_y + 2, y_max) name = '{}-test'.format(args.device) fixed_layout_xml = ET.SubElement(layout_xml, 'fixed_layout', { 'name': name, 'height': str(y_max), 'width': str(x_max), }) for vpr_tile_type, grid_x, grid_y, metadata_function in get_tiles( conn=conn, g=g, roi=roi, synth_loc_map=synth_loc_map, synth_tile_map=synth_tile_map, tile_types=tile_types, ): single_xml = ET.SubElement( fixed_layout_xml, 'single', { 'priority': '1', 'type': vpr_tile_type, 'x': str(grid_x), 'y': str(grid_y), }) metadata_function(single_xml) switchlist_xml = ET.SubElement(arch_xml, 'switchlist') for name, internal_capacitance, drive_resistance, intrinsic_delay, \ switch_type in c.execute(""" SELECT name, internal_capacitance, drive_resistance, intrinsic_delay, switch_type FROM switch;"""): attrib = { 'type': switch_type, 'name': name, "R": str(drive_resistance), "Cin": str(0), "Cout": str(0), "Tdel": str(intrinsic_delay), } if internal_capacitance != 0: attrib["Cinternal"] = str(internal_capacitance) if False: attrib["mux_trans_size"] = str(0) attrib["buf_size"] = str(0) ET.SubElement(switchlist_xml, 'switch', attrib) segmentlist_xml = ET.SubElement(arch_xml, 'segmentlist') # VPR requires a segment, so add one. dummy_xml = ET.SubElement( segmentlist_xml, 'segment', { 'name': 'dummy', 'length': '2', 'freq': '1.0', 'type': 'bidir', 'Rmetal': '0', 'Cmetal': '0', }) ET.SubElement(dummy_xml, 'wire_switch', { 'name': 'buffer', }) ET.SubElement(dummy_xml, 'opin_switch', { 'name': 'buffer', }) ET.SubElement(dummy_xml, 'sb', { 'type': 'pattern', }).text = ' '.join('1' for _ in range(3)) ET.SubElement(dummy_xml, 'cb', { 'type': 'pattern', }).text = ' '.join('1' for _ in range(2)) for (name, length) in c.execute("SELECT name, length FROM segment"): if length is None: length = 1 segment_xml = ET.SubElement( segmentlist_xml, 'segment', { 'name': name, 'length': str(length), 'freq': '1.0', 'type': 'bidir', 'Rmetal': '0', 'Cmetal': '0', }) ET.SubElement(segment_xml, 'wire_switch', { 'name': 'buffer', }) ET.SubElement(segment_xml, 'opin_switch', { 'name': 'buffer', }) ET.SubElement(segment_xml, 'sb', { 'type': 'pattern', }).text = ' '.join('1' for _ in range(length + 1)) ET.SubElement(segment_xml, 'cb', { 'type': 'pattern', }).text = ' '.join('1' for _ in range(length)) ET.SubElement( switchlist_xml, 'switch', { 'type': 'mux', 'name': 'buffer', "R": "551", "Cin": ".77e-15", "Cout": "4e-15", # TODO: This value should be the "typical" pip switch delay from # This value is the dominate term in the inter-cluster delay # estimate. "Tdel": "0.178e-9", "mux_trans_size": "2.630740", "buf_size": "27.645901" }) device_xml = ET.SubElement(arch_xml, 'device') ET.SubElement(device_xml, 'sizing', { "R_minW_nmos": "6065.520020", "R_minW_pmos": "18138.500000", }) ET.SubElement(device_xml, 'area', { "grid_logic_tile_area": "14813.392", }) ET.SubElement(device_xml, 'connection_block', { "input_switch_name": "buffer", }) ET.SubElement(device_xml, 'switch_block', { "type": "wilton", "fs": "3", }) chan_width_distr_xml = ET.SubElement(device_xml, 'chan_width_distr') ET.SubElement(chan_width_distr_xml, 'x', { 'distr': 'uniform', 'peak': '1.0', }) ET.SubElement(chan_width_distr_xml, 'y', { 'distr': 'uniform', 'peak': '1.0', }) directlist_xml = ET.SubElement(arch_xml, 'directlist') pin_assignments = json.load(args.pin_assignments) # Choose smallest distance for block to block connections with multiple # direct_connections. VPR cannot handle multiple block to block connections. directs = {} for direct in pin_assignments['direct_connections']: key = (direct['from_pin'], direct['to_pin']) if key not in directs: directs[key] = [] directs[key].append( (abs(direct['x_offset']) + abs(direct['y_offset']), direct)) for direct in directs.values(): _, direct = min(direct, key=lambda v: v[0]) if direct['from_pin'].split('.')[0] not in tile_types: continue if direct['to_pin'].split('.')[0] not in tile_types: continue if direct['x_offset'] == 0 and direct['y_offset'] == 0: continue ET.SubElement( directlist_xml, 'direct', { 'name': '{}_to_{}_dx_{}_dy_{}'.format( direct['from_pin'], direct['to_pin'], direct['x_offset'], direct['y_offset']), 'from_pin': add_vpr_tile_prefix(direct['from_pin']), 'to_pin': add_vpr_tile_prefix(direct['to_pin']), 'x_offset': str(direct['x_offset']), 'y_offset': str(direct['y_offset']), 'z_offset': '0', 'switch_name': direct['switch_name'], }) arch_xml_str = ET.tostring(arch_xml, pretty_print=True).decode('utf-8') args.output_arch.write(arch_xml_str) args.output_arch.close()
def main(): parser = argparse.ArgumentParser() parser.add_argument( '--db_root', required=True, help='Project X-Ray Database') parser.add_argument( '--read_rr_graph', required=True, help='Input rr_graph file') parser.add_argument( '--write_rr_graph', required=True, help='Output rr_graph file') parser.add_argument( '--channels', required=True, help='Channel definitions from prjxray_form_channels') parser.add_argument( '--synth_tiles', help='If using an ROI, synthetic tile defintion from prjxray-arch-import') args = parser.parse_args() db = prjxray.db.Database(args.db_root) grid = db.grid() if args.synth_tiles: use_roi = True with open(args.synth_tiles) as f: synth_tiles = json.load(f) roi = Roi( db=db, x1=synth_tiles['info']['GRID_X_MIN'], y1=synth_tiles['info']['GRID_Y_MIN'], x2=synth_tiles['info']['GRID_X_MAX'], y2=synth_tiles['info']['GRID_Y_MAX'], ) print('{} generating routing graph for ROI.'.format(now())) else: use_roi = False # Convert input rr graph into graph2.Graph object. input_rr_graph = read_xml_file(args.read_rr_graph) xml_graph = xml_graph2.Graph( input_rr_graph, progressbar=progressbar.progressbar) graph = xml_graph.graph tool_version = input_rr_graph.getroot().attrib['tool_version'] tool_comment = input_rr_graph.getroot().attrib['tool_comment'] delayless_switch = graph.get_delayless_switch_id() print('{} reading channels definitions.'.format(now())) with open(args.channels) as f: channels = json.load(f) segment_id = graph.get_segment_id_from_name('dummy') track_wire_map = {} print('{} add nodes for all channels.'.format(now())) used_channels = 0 for idx, channel in progressbar.progressbar(enumerate(channels['channels'])): # Don't use dead channels if using an ROI. # Consider a channel alive if at least 1 wire in the node is part of a # live tile. if use_roi: alive = False for tile, wire in channel['wires']: loc = grid.loc_of_tilename(tile) if roi.tile_in_roi(loc) or tile in synth_tiles['tiles']: alive = True break if not alive: continue used_channels += 1 nodes = [] track_list = [] for idx2, track_dict in enumerate(channel['tracks']): if track_dict['direction'] == 'X': track_dict['x_low'] = max(track_dict['x_low'], 1) elif track_dict['direction'] == 'Y': track_dict['y_low'] = max(track_dict['y_low'], 1) track = tracks.Track(**track_dict) track_list.append(track) nodes.append(graph.add_track(track=track, segment_id=segment_id, name='track_{}_{}'.format(idx, idx2))) for a_idx, b_idx in channel['track_connections']: graph.add_edge(nodes[a_idx], nodes[b_idx], delayless_switch, 'track_{}_to_{}'.format(a_idx, b_idx)) graph.add_edge(nodes[b_idx], nodes[a_idx], delayless_switch, 'track_{}_to_{}'.format(b_idx, a_idx)) tracks_model = tracks.Tracks(track_list, channel['track_connections']) for tile, wire in channel['wires']: track_wire_map[(tile, wire)] = (tracks_model, nodes) print('original {} final {}'.format(len(channels['channels']), used_channels)) routing_switch = graph.get_switch_id('routing') pip_map = {} edges_with_mux = {} for idx, edge_with_mux in progressbar.progressbar(enumerate(channels['edges_with_mux'])): if edge_with_mux['pip'] not in edges_with_mux: edges_with_mux[edge_with_mux['pip']] = {} assert len(edge_with_mux['source_node']) == 1 edges_with_mux[edge_with_mux['pip']][tuple(edge_with_mux['source_node'][0])] = edge_with_mux['destination_node'] # Set of (src, sink, switch_id) tuples that pip edges have been sent to # VPR. VPR cannot handle duplicate paths with the same switch id. pip_set = set() print('{} Adding edges'.format(now())) for loc in progressbar.progressbar(grid.tile_locations()): gridinfo = grid.gridinfo_at_loc(loc) tile_name = grid.tilename_at_loc(loc) if use_roi: if tile_name in synth_tiles['tiles']: assert len(synth_tiles['tiles'][tile_name]['pins']) == 1 for pin in synth_tiles['tiles'][tile_name]['pins']: tracks_model, track_nodes = track_wire_map[(tile_name, pin['wire'])] option = list(tracks_model.get_tracks_for_wire_at_coord(loc)) assert len(option) > 0 if pin['port_type'] == 'input': tile_type = 'BLK_SY-OUTPAD' wire = 'outpad' elif pin['port_type'] == 'output': tile_type = 'BLK_SY-INPAD' wire = 'inpad' else: assert False, pin track_node = track_nodes[option[0][0]] pin_name = graph.create_pin_name_from_tile_type_and_pin( tile_type, wire) pin_node = graph.get_nodes_for_pin(loc, pin_name) if pin['port_type'] == 'input': graph.add_edge( src_node=track_node, sink_node=pin_node[0][0], switch_id=routing_switch, name='synth_{}_{}'.format(tile_name, pin['wire']), ) elif pin['port_type'] == 'output': graph.add_edge( src_node=pin_node[0][0], sink_node=track_node, switch_id=routing_switch, name='synth_{}_{}'.format(tile_name, pin['wire']), ) else: assert False, pin else: # Not a synth node, check if in ROI. if not roi.tile_in_roi(loc): continue tile_type = db.get_tile_type(gridinfo.tile_type) for pip in tile_type.get_pips(): if pip.is_pseudo: continue if not pip.is_directional: # TODO: Handle bidirectional pips? continue edge_node = make_connection(graph, track_wire_map, loc, tile_name, gridinfo.tile_type, pip, routing_switch, edges_with_mux, grid, pip_set) if edge_node is not None: pip_map[(tile_name, pip.name)] = edge_node print('{} Writing node mapping.'.format(now())) node_mapping = { 'pips': [], 'tracks': [] } for (tile, pip_name), edge in pip_map.items(): node_mapping['pips'].append({ 'tile': tile, 'pip': pip_name, 'edge': edge }) for (tile, wire), (_, nodes) in track_wire_map.items(): node_mapping['tracks'].append({ 'tile': tile, 'wire': wire, 'nodes': nodes, }) with open('node_mapping.pickle', 'wb') as f: pickle.dump(node_mapping, f) print('{} Create channels and serializing.'.format(now())) pool = multiprocessing.Pool(10) serialized_rr_graph = xml_graph.serialize_to_xml( tool_version=tool_version, tool_comment=tool_comment, pad_segment=segment_id, pool=pool, ) print('{} Writing to disk.'.format(now())) with open(args.write_rr_graph, "wb") as f: f.write(serialized_rr_graph) print('{} Done.'.format(now()))
def main(): parser = argparse.ArgumentParser() parser.add_argument('--db_root', required=True, help='Project X-Ray Database') parser.add_argument('--connection_database', help='Database of fabric connectivity', required=True) parser.add_argument('--pin_assignments', help='Pin assignments JSON', required=True) parser.add_argument( '--synth_tiles', help= 'If using an ROI, synthetic tile defintion from prjxray-arch-import') args = parser.parse_args() pool = multiprocessing.Pool(20) db = prjxray.db.Database(args.db_root) grid = db.grid() with DatabaseCache(args.connection_database) as conn: with open(args.pin_assignments) as f: pin_assignments = json.load(f) tile_wires = [] for tile_type, wire_map in pin_assignments['pin_directions'].items(): for wire in wire_map.keys(): tile_wires.append((tile_type, wire)) for tile_type, wire in progressbar_utils.progressbar(tile_wires): pins = [ direction_to_enum(pin) for pin in pin_assignments['pin_directions'][tile_type][wire] ] add_graph_nodes_for_pins(conn, tile_type, wire, pins) if args.synth_tiles: use_roi = True with open(args.synth_tiles) as f: synth_tiles = json.load(f) roi = Roi( db=db, x1=synth_tiles['info']['GRID_X_MIN'], y1=synth_tiles['info']['GRID_Y_MIN'], x2=synth_tiles['info']['GRID_X_MAX'], y2=synth_tiles['info']['GRID_Y_MAX'], ) print('{} generating routing graph for ROI.'.format(now())) else: use_roi = False output_only_nodes = set() input_only_nodes = set() find_pip = create_find_pip(conn) find_wire = create_find_wire(conn) find_connector = create_find_connector(conn) const_connectors = create_const_connectors(conn) print('{} Finding nodes belonging to ROI'.format(now())) if use_roi: for loc in progressbar_utils.progressbar(grid.tile_locations()): gridinfo = grid.gridinfo_at_loc(loc) tile_name = grid.tilename_at_loc(loc) if tile_name in synth_tiles['tiles']: assert len(synth_tiles['tiles'][tile_name]['pins']) == 1 for pin in synth_tiles['tiles'][tile_name]['pins']: if pin['port_type'] not in ['input', 'output']: continue _, _, _, node_pkey = find_wire(tile_name, gridinfo.tile_type, pin['wire']) if pin['port_type'] == 'input': # This track can output be used as a sink. input_only_nodes |= set((node_pkey, )) elif pin['port_type'] == 'output': # This track can output be used as a src. output_only_nodes |= set((node_pkey, )) else: assert False, pin write_cur = conn.cursor() write_cur.execute('SELECT pkey FROM switch WHERE name = ?;', ('__vpr_delayless_switch__', )) delayless_switch_pkey = write_cur.fetchone()[0] edges = [] edge_set = set() for loc in progressbar_utils.progressbar(grid.tile_locations()): gridinfo = grid.gridinfo_at_loc(loc) tile_name = grid.tilename_at_loc(loc) # Not a synth node, check if in ROI. if use_roi and not roi.tile_in_roi(loc): continue tile_type = db.get_tile_type(gridinfo.tile_type) for pip in tile_type.get_pips(): if pip.is_pseudo: continue connections = make_connection( conn=conn, input_only_nodes=input_only_nodes, output_only_nodes=output_only_nodes, find_pip=find_pip, find_wire=find_wire, find_connector=find_connector, tile_name=tile_name, tile_type=gridinfo.tile_type, pip=pip, delayless_switch_pkey=delayless_switch_pkey, const_connectors=const_connectors) if connections: for connection in connections: key = tuple(connection[0:3]) if key in edge_set: continue edge_set.add(key) edges.append(connection) print('{} Created {} edges, inserting'.format(now(), len(edges))) write_cur.execute("""BEGIN EXCLUSIVE TRANSACTION;""") for edge in progressbar_utils.progressbar(edges): write_cur.execute( """ INSERT INTO graph_edge( src_graph_node_pkey, dest_graph_node_pkey, switch_pkey, phy_tile_pkey, pip_in_tile_pkey, backward) VALUES (?, ?, ?, ?, ?, ?)""", edge) write_cur.execute("""COMMIT TRANSACTION;""") print('{} Inserted edges'.format(now())) write_cur.execute( """CREATE INDEX src_node_index ON graph_edge(src_graph_node_pkey);""" ) write_cur.execute( """CREATE INDEX dest_node_index ON graph_edge(dest_graph_node_pkey);""" ) write_cur.connection.commit() print('{} Indices created, marking track liveness'.format(now())) alive_tracks = set() mark_track_liveness(conn, pool, input_only_nodes, output_only_nodes, alive_tracks) print('{} Flushing database back to file "{}"'.format( now(), args.connection_database)) with DatabaseCache(args.connection_database, read_only=True) as conn: verify_channels(conn, alive_tracks) print("{}: Channels verified".format(datetime.datetime.now()))
def main(): parser = argparse.ArgumentParser(description="Generate synth_tiles.json") parser.add_argument('--db_root', required=True) parser.add_argument('--part', required=True) parser.add_argument('--roi', required=True) parser.add_argument('--connection_database', help='Connection database', required=True) parser.add_argument('--synth_tiles', required=True) args = parser.parse_args() db = prjxray.db.Database(args.db_root, args.part) g = db.grid() synth_tiles = {} synth_tiles['tiles'] = {} with open(args.roi) as f: j = json.load(f) roi = Roi( db=db, x1=j['info']['GRID_X_MIN'], y1=j['info']['GRID_Y_MIN'], x2=j['info']['GRID_X_MAX'], y2=j['info']['GRID_Y_MAX'], ) with DatabaseCache(args.connection_database, read_only=True) as conn: synth_tiles['info'] = j['info'] vbrk_in_use = set() for port in j['ports']: if port['name'].startswith('dout['): port_type = 'input' is_clock = False elif port['name'].startswith('din['): is_clock = False port_type = 'output' elif port['name'].startswith('clk'): port_type = 'output' is_clock = True else: assert False, port tile, wire = port['wire'].split('/') vbrk_in_use.add(tile) # Make sure connecting wire is not in ROI! loc = g.loc_of_tilename(tile) if roi.tile_in_roi(loc): # Or if in the ROI, make sure it has no sites. gridinfo = g.gridinfo_at_tilename(tile) assert len(db.get_tile_type( gridinfo.tile_type).get_sites()) == 0, tile vpr_loc = map_tile_to_vpr_coord(conn, tile) if tile not in synth_tiles['tiles']: synth_tiles['tiles'][tile] = { 'pins': [], 'loc': vpr_loc, } synth_tiles['tiles'][tile]['pins'].append({ 'roi_name': port['name'].replace('[', '_').replace(']', '_'), 'wire': wire, 'pad': port['pin'], 'port_type': port_type, 'is_clock': is_clock, }) # Find two VBRK's in the corner of the fabric to use as the synthetic VCC/ # GND source. vbrk_loc = None vbrk_tile = None vbrk2_loc = None vbrk2_tile = None for tile in g.tiles(): if tile in vbrk_in_use: continue loc = g.loc_of_tilename(tile) if not roi.tile_in_roi(loc): continue gridinfo = g.gridinfo_at_tilename(tile) if 'VBRK' not in gridinfo.tile_type: continue assert len(db.get_tile_type( gridinfo.tile_type).get_sites()) == 0, tile if vbrk_loc is None: vbrk2_loc = vbrk_loc vbrk2_tile = vbrk_tile vbrk_loc = loc vbrk_tile = tile else: if (loc.grid_x < vbrk_loc.grid_x and loc.grid_y < vbrk_loc.grid_y) or vbrk2_loc is None: vbrk2_loc = vbrk_loc vbrk2_tile = vbrk_tile vbrk_loc = loc vbrk_tile = tile assert vbrk_loc is not None assert vbrk_tile is not None assert vbrk_tile not in synth_tiles['tiles'] vbrk_vpr_loc = map_tile_to_vpr_coord(conn, vbrk_tile) synth_tiles['tiles'][vbrk_tile] = { 'loc': vbrk_vpr_loc, 'pins': [ { 'wire': 'VCC', 'pad': 'VCC', 'port_type': 'VCC', 'is_clock': False, }, ], } assert vbrk2_loc is not None assert vbrk2_tile is not None assert vbrk2_tile not in synth_tiles['tiles'] vbrk2_vpr_loc = map_tile_to_vpr_coord(conn, vbrk2_tile) synth_tiles['tiles'][vbrk2_tile] = { 'loc': vbrk2_vpr_loc, 'pins': [ { 'wire': 'GND', 'pad': 'GND', 'port_type': 'GND', 'is_clock': False, }, ], } with open(args.synth_tiles, 'w') as f: json.dump(synth_tiles, f, indent=2)
def main(): parser = argparse.ArgumentParser() parser.add_argument( '--db_root', required=True, help='Project X-Ray Database' ) parser.add_argument( '--read_rr_graph', required=True, help='Input rr_graph file' ) parser.add_argument( '--write_rr_graph', required=True, help='Output rr_graph file' ) parser.add_argument( '--write_rr_node_map', required=True, help='Output map of graph_node_pkey to rr inode file' ) parser.add_argument( '--connection_database', help='Database of fabric connectivity', required=True ) parser.add_argument( '--synth_tiles', help='If using an ROI, synthetic tile defintion from prjxray-arch-import' ) args = parser.parse_args() db = prjxray.db.Database(args.db_root) grid = db.grid() if args.synth_tiles: use_roi = True with open(args.synth_tiles) as f: synth_tiles = json.load(f) roi = Roi( db=db, x1=synth_tiles['info']['GRID_X_MIN'], y1=synth_tiles['info']['GRID_Y_MIN'], x2=synth_tiles['info']['GRID_X_MAX'], y2=synth_tiles['info']['GRID_Y_MAX'], ) print('{} generating routing graph for ROI.'.format(now())) else: use_roi = False # Convert input rr graph into graph2.Graph object. input_rr_graph = read_xml_file(args.read_rr_graph) xml_graph = xml_graph2.Graph( input_rr_graph, progressbar=progressbar_utils.progressbar, output_file_name=args.write_rr_graph, ) graph = xml_graph.graph tool_version = input_rr_graph.getroot().attrib['tool_version'] tool_comment = input_rr_graph.getroot().attrib['tool_comment'] with DatabaseCache(args.connection_database, True) as conn: cur = conn.cursor() for name, internal_capacitance, drive_resistance, intrinsic_delay, \ switch_type in cur.execute(""" SELECT name, internal_capacitance, drive_resistance, intrinsic_delay, switch_type FROM switch;"""): # Add back missing switchs, which were unused in arch xml, and so # were not emitted in rrgraph XML. # # TODO: This can be removed once # https://github.com/verilog-to-routing/vtr-verilog-to-routing/issues/354 # is fixed. try: graph.get_switch_id(name) continue except KeyError: xml_graph.add_switch( graph2.Switch( id=None, name=name, type=graph2.SwitchType[switch_type.upper()], timing=graph2.SwitchTiming( r=drive_resistance, c_in=0.0, c_out=0.0, c_internal=internal_capacitance, t_del=intrinsic_delay, ), sizing=graph2.SwitchSizing( mux_trans_size=0, buf_size=0, ), ) ) # Mapping of graph_node.pkey to rr node id. node_mapping = {} print('{} Creating connection box list'.format(now())) connection_box_map = create_connection_boxes(conn, graph) # Match site pins rr nodes with graph_node's in the connection_database. print('{} Importing graph nodes'.format(now())) import_graph_nodes(conn, graph, node_mapping, connection_box_map) # Walk all track graph nodes and add them. print('{} Creating tracks'.format(now())) segment_id = graph.get_segment_id_from_name('dummy') create_track_rr_graph( conn, graph, node_mapping, use_roi, roi, synth_tiles, segment_id ) # Set of (src, sink, switch_id) tuples that pip edges have been sent to # VPR. VPR cannot handle duplicate paths with the same switch id. if use_roi: print('{} Adding synthetic edges'.format(now())) add_synthetic_edges(conn, graph, node_mapping, grid, synth_tiles) print('{} Creating channels.'.format(now())) channels_obj = create_channels(conn) x_dim, y_dim = phy_grid_dims(conn) connection_box_obj = graph.create_connection_box_object( x_dim=x_dim, y_dim=y_dim ) print('{} Serializing to disk.'.format(now())) with xml_graph: xml_graph.start_serialize_to_xml( tool_version=tool_version, tool_comment=tool_comment, channels_obj=channels_obj, connection_box_obj=connection_box_obj, ) xml_graph.serialize_nodes(yield_nodes(xml_graph.graph.nodes)) xml_graph.serialize_edges( import_graph_edges(conn, graph, node_mapping) ) print('{} Writing node map.'.format(now())) with open(args.write_rr_node_map, 'wb') as f: pickle.dump(node_mapping, f) print('{} Done writing node map.'.format(now()))
def main(): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument( '--connection_database', required=True, help="Path to SQLite3 database for given FASM file part.") parser.add_argument( '--db_root', required=True, help="Path to prjxray database for given FASM file part.") parser.add_argument('--allow_orphan_sinks', action='store_true', help="Allow sinks to have no connection.") parser.add_argument( '--prune-unconnected-ports', action='store_true', help="Prune top-level I/O ports that are not connected to any logic.") parser.add_argument('--fasm_file', help="FASM file to convert BELs and routes.", required=True) parser.add_argument('--bit_file', help="Bitstream file to convert to FASM.") parser.add_argument( '--bitread', help="Path to bitread executable, required if --bit_file is provided.") parser.add_argument( '--part', help="Name of part being targeted, required if --bit_file is provided." ) parser.add_argument( '--allow-non-dedicated-clk-routes', action='store_true', help="Effectively sets CLOCK_DEDICATED_ROUTE to FALSE on all nets.") parser.add_argument('--iostandard', default=None, help="Default IOSTANDARD to use for IO buffers.") parser.add_argument('--drive', type=int, default=None, help="Default DRIVE to use for IO buffers.") parser.add_argument('--top', default="top", help="Root level module name.") parser.add_argument('--pcf', help="Mapping of top-level pins to pads.") parser.add_argument('--route_file', help="VPR route output file.") parser.add_argument('--rr_graph', help="Real or virt xc7 graph") parser.add_argument( '--vpr_capnp_schema_dir', help='Directory container VPR schema files', ) parser.add_argument('--eblif', help="EBLIF file used to generate design") parser.add_argument('verilog_file', help="Filename of output verilog file") parser.add_argument('tcl_file', help="Filename of output tcl script.") args = parser.parse_args() conn = sqlite3.connect('file:{}?mode=ro'.format(args.connection_database), uri=True) db = prjxray.db.Database(args.db_root, args.part) grid = db.grid() if args.bit_file: bit2fasm(args.db_root, db, grid, args.bit_file, args.fasm_file, args.bitread, args.part) tiles = {} maybe_get_wire = create_maybe_get_wire(conn) top = Module(db, grid, conn, name=args.top) if args.pcf: top.set_site_to_signal(load_io_sites(args.db_root, args.part, args.pcf)) if args.route_file: assert args.rr_graph assert args.vpr_capnp_schema_dir net_map = load_net_list(conn, args.vpr_capnp_schema_dir, args.rr_graph, args.route_file) top.set_net_map(net_map) if args.part: with open(os.path.join(args.db_root, args.part, 'part.json')) as f: part_data = json.load(f) top.set_io_banks(part_data['iobanks']) if args.eblif: with open(args.eblif) as f: parsed_eblif = eblif.parse_blif(f) top.add_to_cname_map(parsed_eblif) top.make_iosettings_map(parsed_eblif) top.set_default_iostandard(args.iostandard, args.drive) for fasm_line in fasm.parse_fasm_filename(args.fasm_file): if not fasm_line.set_feature: continue set_feature = process_set_feature(fasm_line.set_feature) parts = set_feature.feature.split('.') tile = parts[0] if tile not in tiles: tiles[tile] = [] tiles[tile].append(set_feature) if len(parts) == 3: maybe_add_pip(top, maybe_get_wire, set_feature) for tile, tile_features in tiles.items(): process_tile(top, tile, tile_features) # Check if the PS7 is present in the tilegrid. If so then insert it. pss_tile, ps7_site = get_ps7_site(db) if pss_tile is not None and ps7_site is not None: # First load the PS7 ports fname = os.path.join(args.db_root, "ps7_ports.json") with open(fname, "r") as fp: ps7_ports = json.load(fp) # Insert the PS7 insert_ps7(top, pss_tile, ps7_site, ps7_ports) top.make_routes(allow_orphan_sinks=args.allow_orphan_sinks) if args.prune_unconnected_ports: top.prune_unconnected_ports() if args.allow_non_dedicated_clk_routes: top.add_extra_tcl_line( "set_property CLOCK_DEDICATED_ROUTE FALSE [get_nets]") with open(args.verilog_file, 'w') as f: for line in top.output_verilog(): print(line, file=f) with open(args.tcl_file, 'w') as f: for line in top.output_bel_locations(): print(line, file=f) for line in top.output_nets(): print(line, file=f) for line in top.output_disabled_drcs(): print(line, file=f) for line in top.output_extra_tcl(): print(line, file=f)
def main(): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument( '--connection_database', required=True, help="Path to SQLite3 database for given FASM file part.") parser.add_argument( '--db_root', required=True, help="Path to prjxray database for given FASM file part.") parser.add_argument('--allow_orphan_sinks', action='store_true', help="Allow sinks to have no connection.") parser.add_argument( '--prune-unconnected-ports', action='store_true', help="Prune top-level I/O ports that are not connected to any logic.") parser.add_argument( '--iostandard_defs', help= "Specify a JSON file defining IOSTANDARD and DRIVE parameters for each IOB site" ) parser.add_argument('--fasm_file', help="FASM file to convert BELs and routes.", required=True) parser.add_argument('--bit_file', help="Bitstream file to convert to FASM.") parser.add_argument( '--bitread', help="Path to bitread executable, required if --bit_file is provided.") parser.add_argument( '--part', help="Name of part being targeted, required if --bit_file is provided." ) parser.add_argument('--top', default="top", help="Root level module name.") parser.add_argument('--pcf', help="Mapping of top-level pins to pads.") parser.add_argument('--route_file', help="VPR route output file.") parser.add_argument('--rr_graph', help="Real or virt xc7 graph") parser.add_argument('--eblif', help="EBLIF file used to generate design") parser.add_argument('verilog_file', help="Filename of output verilog file") parser.add_argument('tcl_file', help="Filename of output tcl script.") args = parser.parse_args() conn = sqlite3.connect('file:{}?mode=ro'.format(args.connection_database), uri=True) db = prjxray.db.Database(args.db_root) grid = db.grid() if args.bit_file: bit2fasm(args.db_root, db, grid, args.bit_file, args.fasm_file, args.bitread, args.part) tiles = {} maybe_get_wire = create_maybe_get_wire(conn) top = Module(db, grid, conn, name=args.top) if args.pcf: top.set_site_to_signal(load_io_sites(args.db_root, args.part, args.pcf)) if args.route_file: assert args.rr_graph net_map = load_net_list(conn, args.rr_graph, args.route_file) top.set_net_map(net_map) if args.eblif: with open(args.eblif) as f: parsed_eblif = eblif.parse_blif(f) top.add_to_cname_map(parsed_eblif) for fasm_line in fasm.parse_fasm_filename(args.fasm_file): if not fasm_line.set_feature: continue parts = fasm_line.set_feature.feature.split('.') tile = parts[0] if tile not in tiles: tiles[tile] = [] tiles[tile].append(fasm_line.set_feature) if len(parts) == 3: maybe_add_pip(top, maybe_get_wire, fasm_line.set_feature) if args.iostandard_defs: with open(args.iostandard_defs) as fp: defs = json.load(fp) top.set_iostandard_defs(defs) for tile, tile_features in tiles.items(): process_tile(top, tile, tile_features) top.make_routes(allow_orphan_sinks=args.allow_orphan_sinks) if args.prune_unconnected_ports: top.prune_unconnected_ports() with open(args.verilog_file, 'w') as f: for l in top.output_verilog(): print(l, file=f) with open(args.tcl_file, 'w') as f: for l in top.output_bel_locations(): print(l, file=f) for l in top.output_nets(): print(l, file=f)
def main(): parser = argparse.ArgumentParser( description='Convert a PCF file into a VPR io.place file.') parser.add_argument("--input", '-i', "-I", type=argparse.FileType('r'), default=sys.stdout, help='The input constraints place file') parser.add_argument("--output", '-o', "-O", type=argparse.FileType('w'), default=sys.stdout, help='The output constraints place file') parser.add_argument("--net", '-n', type=argparse.FileType('r'), required=True, help='top.net file') parser.add_argument('--vpr_grid_map', help='Map of canonical to VPR grid locations', required=True) parser.add_argument('--arch', help='Arch XML', required=True) parser.add_argument('--db_root', required=True) parser.add_argument('--part', required=True) parser.add_argument("--blif", '-b', type=argparse.FileType('r'), required=True, help='BLIF / eBLIF file') parser.add_argument('--roi', action='store_true', help='Using ROI') parser.add_argument("--allow-bufg-logic-sources", action="store_true", help="When set allows BUFGs to be driven by logic") parser.add_argument('--graph_limit', help='Graph limit parameters') args = parser.parse_args() part = args.part device_families = { "xc7a": "artix7", "xc7k": "kintex7", "xc7z": "zynq7", } device_family = None for device in device_families: if part.startswith(device): device_family = device_families[device] break assert device_family db_root = os.path.join(args.db_root, device_family) db = prjxray.db.Database(db_root, args.part) canon_grid = db.grid() io_blocks = {} loc_in_use = set() for line in args.input: args.output.write(line) if line[0] == '#': continue block, x, y, z = line.strip().split()[0:4] io_blocks[block] = (int(x), int(y), int(z)) loc_in_use.add(io_blocks[block]) place_constraints = vpr_place_constraints.PlaceConstraints(args.net) place_constraints.load_loc_sites_from_net_file() grid_capacities = get_tile_capacities(args.arch) eblif_data = eblif.parse_blif(args.blif) vpr_grid = VprGrid(args.vpr_grid_map, args.graph_limit) # Constrain IO blocks and LOCed resources blocks = {} block_locs = {} for block, loc in place_constraints.get_loc_sites(): vpr_loc = get_vpr_coords_from_site_name(canon_grid, vpr_grid, loc, grid_capacities) loc_in_use.add(vpr_loc) if block in io_blocks: assert io_blocks[block] == vpr_loc, (block, vpr_loc, io_blocks[block]) blocks[block] = vpr_loc block_locs[block] = loc place_constraints.constrain_block( block, vpr_loc, "Constraining block {}".format(block)) # Constrain blocks directly connected to IO in the same x, y location constrain_special_ios(canon_grid, vpr_grid, io_blocks, eblif_data, blocks, place_constraints) # Constrain clock resources clock_placer = ClockPlacer(vpr_grid, io_blocks, eblif_data, args.roi, args.graph_limit, args.allow_bufg_logic_sources) if clock_placer.has_clock_nets(): for block, loc in clock_placer.place_clocks(canon_grid, vpr_grid, loc_in_use, block_locs, blocks, grid_capacities): vpr_loc = get_vpr_coords_from_site_name(canon_grid, vpr_grid, loc, grid_capacities) place_constraints.constrain_block( block, vpr_loc, "Constraining clock block {}".format(block)) """ Constrain IDELAYCTRL sites Prior to the invocation of this script, the IDELAYCTRL sites must have been replicated accordingly to the IDELAY specifications. There can be three different usage combinations of IDELAYCTRL and IDELAYs in a design: 1. IODELAYs and IDELAYCTRLs can be constrained to banks as needed, through an in-design LOC constraint. Manual replication of the constrained IDELAYCTRLs is necessary to provide a controller for each bank. 2. IODELAYs and a single IDELAYCTRL can be left entirely unconstrained, becoming a default group. The IDELAYCTRLis replicated depending on bank usage. Replication must have happened prior to this step 3. One or more IODELAY_GROUPs can be defined that contain IODELAYs and a single IDELAYCTRL each. These components can be otherwise unconstrained and the IDELAYCTRL for each group has to be replicated as needed (depending on bank usage). NOTE: IODELAY_GROUPS are not enabled at the moment. """ idelayctrl_cmts = set() idelay_instances = place_constraints.get_used_instances("IDELAYE2") for inst in idelay_instances: x, y, z = io_blocks[inst] idelayctrl_cmt = vpr_grid.get_vpr_loc_cmt()[(x, y)] idelayctrl_cmts.add(idelayctrl_cmt) idelayctrl_instances = place_constraints.get_used_instances("IDELAYCTRL") assert len(idelayctrl_cmts) == len( idelayctrl_instances ), "The number of IDELAYCTRL blocks and IO banks with IDELAYs used do not match." idelayctrl_sites = dict() for site_name, _, clk_region in vpr_grid.get_site_type_dict( )['IDELAYCTRL']: if clk_region in idelayctrl_cmts: idelayctrl_sites[clk_region] = site_name # Check and remove user constrained IDELAYCTRLs for idelayctrl_block in idelayctrl_instances: if idelayctrl_block in blocks.keys(): x, y, _ = blocks[idelayctrl_block] idelayctrl_cmt = vpr_grid.get_vpr_loc_cmt()[(x, y)] assert idelayctrl_cmt in idelayctrl_cmts idelayctrl_cmts.remove(idelayctrl_cmt) idelayctrl_instances.remove(idelayctrl_block) # TODO: Add possibility to bind IDELAY banks to IDELAYCTRL sites using # the IDELAY_GROUP attribute. for cmt, idelayctrl_block in zip(idelayctrl_cmts, idelayctrl_instances): x, y = vpr_grid.get_site_dict()[idelayctrl_sites[cmt]]['vpr_loc'] vpr_loc = (x, y, 0) place_constraints.constrain_block( idelayctrl_block, vpr_loc, "Constraining idelayctrl block {}".format(idelayctrl_block)) if len(idelayctrl_instances) > 0: print("Warning: IDELAY_GROUPS parameters are currently being ignored!", file=sys.stderr) place_constraints.output_place_constraints(args.output)
def main(): parser = argparse.ArgumentParser(description="Generate arch.xml") parser.add_argument( '--db_root', required=True, help="Project X-Ray database to use." ) parser.add_argument('--part', required=True, help="FPGA part") parser.add_argument( '--output-arch', nargs='?', type=argparse.FileType('w'), help="""File to output arch.""" ) parser.add_argument( '--tile-types', required=True, help="Semi-colon seperated tile types." ) parser.add_argument( '--pb_types', required=True, help="Semi-colon seperated pb_types types." ) parser.add_argument( '--pin_assignments', required=True, type=argparse.FileType('r') ) parser.add_argument('--use_roi', required=False) parser.add_argument('--use_overlay', required=False) parser.add_argument('--device', required=True) parser.add_argument('--synth_tiles', required=False) parser.add_argument('--connection_database', required=True) parser.add_argument( '--graph_limit', help='Limit grid to specified dimensions in x_min,y_min,x_max,y_max', ) args = parser.parse_args() tile_types = args.tile_types.split(',') pb_types = args.pb_types.split(',') model_xml_spec = "../../tiles/{0}/{0}.model.xml" pbtype_xml_spec = "../../tiles/{0}/{0}.pb_type.xml" tile_xml_spec = "../../tiles/{0}/{0}.tile.xml" xi_url = "http://www.w3.org/2001/XInclude" ET.register_namespace('xi', xi_url) xi_include = "{%s}include" % xi_url arch_xml = ET.Element( 'architecture', {}, nsmap={'xi': xi_url}, ) model_xml = ET.SubElement(arch_xml, 'models') for pb_type in pb_types: ET.SubElement( model_xml, xi_include, { 'href': model_xml_spec.format(pb_type.lower()), 'xpointer': "xpointer(models/child::node())", } ) tiles_xml = ET.SubElement(arch_xml, 'tiles') tile_capacity = {} for tile_type in tile_types: uri = tile_xml_spec.format(tile_type.lower()) ET.SubElement(tiles_xml, xi_include, { 'href': uri, }) with open(uri) as f: tile_xml = ET.parse(f, ET.XMLParser()) tile_root = tile_xml.getroot() assert tile_root.tag == 'tile' tile_capacity[tile_type] = 0 for sub_tile in tile_root.iter('sub_tile'): if 'capacity' in sub_tile.attrib: tile_capacity[tile_type] += int( sub_tile.attrib['capacity'] ) else: tile_capacity[tile_type] += 1 complexblocklist_xml = ET.SubElement(arch_xml, 'complexblocklist') for pb_type in pb_types: ET.SubElement( complexblocklist_xml, xi_include, { 'href': pbtype_xml_spec.format(pb_type.lower()), } ) layout_xml = ET.SubElement(arch_xml, 'layout') db = prjxray.db.Database(args.db_root, args.part) g = db.grid() synth_tiles = {} synth_tiles['tiles'] = {} synth_loc_map = {} synth_tile_map = {} roi = None if args.use_roi: with open(args.use_roi) as f: j = json.load(f) with open(args.synth_tiles) as f: synth_tiles = json.load(f) roi = Roi( db=db, x1=j['info']['GRID_X_MIN'], y1=j['info']['GRID_Y_MIN'], x2=j['info']['GRID_X_MAX'], y2=j['info']['GRID_Y_MAX'], ) for _, tile_info in synth_tiles['tiles'].items(): if tile_info['pins'][0]['port_type'] in ['GND', 'VCC']: continue assert tuple(tile_info['loc']) not in synth_loc_map tile_name = tile_info['tile_name'] num_input = len( list( filter( lambda t: t['port_type'] == 'output', tile_info['pins'] ) ) ) num_output = len( list( filter( lambda t: t['port_type'] == 'input', tile_info['pins'] ) ) ) create_synth_io_tile( complexblocklist_xml, tiles_xml, tile_name, num_input, num_output ) synth_loc_map[tuple(tile_info['loc'])] = tile_name create_synth_pb_types(model_xml, complexblocklist_xml) synth_tile_map = add_constant_synthetic_tiles( model_xml, complexblocklist_xml, tiles_xml ) for _, tile_info in synth_tiles['tiles'].items(): if tile_info['pins'][0]['port_type'] not in ['GND', 'VCC']: continue assert tuple(tile_info['loc']) not in synth_loc_map vpr_tile_type = synth_tile_map[tile_info['pins'][0]['port_type']] synth_loc_map[tuple(tile_info['loc'])] = vpr_tile_type elif args.graph_limit: x_min, y_min, x_max, y_max = map(int, args.graph_limit.split(',')) roi = Roi( db=db, x1=x_min, y1=y_min, x2=x_max, y2=y_max, ) elif args.use_overlay: with open(args.use_overlay) as f: j = json.load(f) with open(args.synth_tiles) as f: synth_tiles = json.load(f) region_dict = dict() for r in synth_tiles['info']: bounds = ( r['GRID_X_MIN'], r['GRID_X_MAX'], r['GRID_Y_MIN'], r['GRID_Y_MAX'] ) region_dict[r['name']] = bounds roi = Overlay(region_dict=region_dict) for _, tile_info in synth_tiles['tiles'].items(): if tile_info['pins'][0]['port_type'] in ['GND', 'VCC']: continue assert tuple(tile_info['loc']) not in synth_loc_map tile_name = tile_info['tile_name'] num_input = len( list( filter( lambda t: t['port_type'] == 'output', tile_info['pins'] ) ) ) num_output = len( list( filter( lambda t: t['port_type'] == 'input', tile_info['pins'] ) ) ) create_synth_io_tile( complexblocklist_xml, tiles_xml, tile_name, num_input, num_output ) synth_loc_map[tuple(tile_info['loc'])] = tile_name create_synth_pb_types(model_xml, complexblocklist_xml, True) with DatabaseCache(args.connection_database, read_only=True) as conn: c = conn.cursor() if 'GND' not in synth_tile_map: synth_tile_map, synth_loc_map_const = insert_constant_tiles( conn, model_xml, complexblocklist_xml, tiles_xml ) synth_loc_map.update(synth_loc_map_const) # Find the grid extent. y_max = 0 x_max = 0 for grid_x, grid_y in c.execute("SELECT grid_x, grid_y FROM tile"): x_max = max(grid_x + 2, x_max) y_max = max(grid_y + 2, y_max) name = '{}-test'.format(args.device) fixed_layout_xml = ET.SubElement( layout_xml, 'fixed_layout', { 'name': name, 'height': str(y_max), 'width': str(x_max), } ) for vpr_tile_type, grid_x, grid_y, metadata_function in get_tiles( conn=conn, g=g, roi=roi, synth_loc_map=synth_loc_map, synth_tile_map=synth_tile_map, tile_types=tile_types, tile_capacity=tile_capacity, ): single_xml = ET.SubElement( fixed_layout_xml, 'single', { 'priority': '1', 'type': vpr_tile_type, 'x': str(grid_x), 'y': str(grid_y), } ) metadata_function(single_xml) switchlist_xml = ET.SubElement(arch_xml, 'switchlist') for name, internal_capacitance, drive_resistance, intrinsic_delay, \ switch_type in c.execute(""" SELECT name, internal_capacitance, drive_resistance, intrinsic_delay, switch_type FROM switch WHERE name != "__vpr_delayless_switch__";"""): attrib = { 'type': switch_type, 'name': name, "R": str(drive_resistance), "Cin": str(0), "Cout": str(0), "Tdel": str(intrinsic_delay), } if internal_capacitance != 0: attrib["Cinternal"] = str(internal_capacitance) if False: attrib["mux_trans_size"] = str(0) attrib["buf_size"] = str(0) ET.SubElement(switchlist_xml, 'switch', attrib) segmentlist_xml = ET.SubElement(arch_xml, 'segmentlist') # VPR requires a segment, so add one. dummy_xml = ET.SubElement( segmentlist_xml, 'segment', { 'name': 'dummy', 'length': '2', 'freq': '1.0', 'type': 'bidir', 'Rmetal': '0', 'Cmetal': '0', } ) ET.SubElement(dummy_xml, 'wire_switch', { 'name': 'buffer', }) ET.SubElement(dummy_xml, 'opin_switch', { 'name': 'buffer', }) ET.SubElement(dummy_xml, 'sb', { 'type': 'pattern', }).text = ' '.join('1' for _ in range(3)) ET.SubElement(dummy_xml, 'cb', { 'type': 'pattern', }).text = ' '.join('1' for _ in range(2)) for (name, length) in c.execute("SELECT name, length FROM segment"): if length is None: length = 1 segment_xml = ET.SubElement( segmentlist_xml, 'segment', { 'name': name, 'length': str(length), 'freq': '1.0', 'type': 'bidir', 'Rmetal': '0', 'Cmetal': '0', } ) ET.SubElement(segment_xml, 'wire_switch', { 'name': 'buffer', }) ET.SubElement(segment_xml, 'opin_switch', { 'name': 'buffer', }) ET.SubElement(segment_xml, 'sb', { 'type': 'pattern', }).text = ' '.join('1' for _ in range(length + 1)) ET.SubElement(segment_xml, 'cb', { 'type': 'pattern', }).text = ' '.join('1' for _ in range(length)) ET.SubElement( switchlist_xml, 'switch', { 'type': 'mux', 'name': 'buffer', "R": "551", "Cin": ".77e-15", "Cout": "4e-15", # TODO: This value should be the "typical" pip switch delay from # This value is the dominate term in the inter-cluster delay # estimate. "Tdel": "0.178e-9", "mux_trans_size": "2.630740", "buf_size": "27.645901" } ) device_xml = ET.SubElement(arch_xml, 'device') ET.SubElement( device_xml, 'sizing', { "R_minW_nmos": "6065.520020", "R_minW_pmos": "18138.500000", } ) ET.SubElement(device_xml, 'area', { "grid_logic_tile_area": "14813.392", }) ET.SubElement( device_xml, 'connection_block', { "input_switch_name": "buffer", } ) ET.SubElement(device_xml, 'switch_block', { "type": "wilton", "fs": "3", }) chan_width_distr_xml = ET.SubElement(device_xml, 'chan_width_distr') ET.SubElement( chan_width_distr_xml, 'x', { 'distr': 'uniform', 'peak': '1.0', } ) ET.SubElement( chan_width_distr_xml, 'y', { 'distr': 'uniform', 'peak': '1.0', } ) directlist_xml = ET.SubElement(arch_xml, 'directlist') pin_assignments = json.load(args.pin_assignments) # Choose smallest distance for block to block connections with multiple # direct_connections. VPR cannot handle multiple block to block connections. directs = {} for direct in pin_assignments['direct_connections']: key = (direct['from_pin'], direct['to_pin']) if key not in directs: directs[key] = [] directs[key].append( (abs(direct['x_offset']) + abs(direct['y_offset']), direct) ) ALLOWED_ZERO_OFFSET_DIRECT = [ "GTP_CHANNEL_0", "GTP_CHANNEL_1", "GTP_CHANNEL_2", "GTP_CHANNEL_3", "GTP_CHANNEL_0_MID_LEFT", "GTP_CHANNEL_1_MID_LEFT", "GTP_CHANNEL_2_MID_LEFT", "GTP_CHANNEL_3_MID_LEFT", "GTP_CHANNEL_0_MID_RIGHT", "GTP_CHANNEL_1_MID_RIGHT", "GTP_CHANNEL_2_MID_RIGHT", "GTP_CHANNEL_3_MID_RIGHT", "GTP_COMMON_MID_LEFT", "GTP_COMMON_MID_RIGHT", ] zero_offset_directs = dict() for direct in directs.values(): _, direct = min(direct, key=lambda v: v[0]) from_tile = direct['from_pin'].split('.')[0] to_tile = direct['to_pin'].split('.')[0] if from_tile not in tile_types: continue if to_tile not in tile_types: continue # In general, the Z offset is 0, except for special cases # such as for the GTP tiles, where there are direct connections # within the same (x, y) cooredinates, but between different sub_tiles direct['z_offset'] = 0 if direct['x_offset'] == 0 and direct['y_offset'] == 0: if from_tile == to_tile and from_tile in ALLOWED_ZERO_OFFSET_DIRECT: if from_tile not in zero_offset_directs: zero_offset_directs[from_tile] = list() zero_offset_directs[from_tile].append(direct) continue add_direct(directlist_xml, direct) for tile, directs in zero_offset_directs.items(): uri = tile_xml_spec.format(tile.lower()) ports = list() with open(uri) as f: tile_xml = ET.parse(f, ET.XMLParser()) tile_root = tile_xml.getroot() for capacity, sub_tile in enumerate(tile_root.iter('sub_tile')): for in_port in sub_tile.iter('input'): ports.append((in_port.attrib["name"], capacity)) for out_port in sub_tile.iter('output'): ports.append((out_port.attrib["name"], capacity)) for clk_port in sub_tile.iter('clock'): ports.append((clk_port.attrib["name"], capacity)) for direct in directs: tile_type, from_port = direct['from_pin'].split('.') _, to_port = direct['to_pin'].split('.') if tile != tile_type: continue from_port_capacity = None to_port_capacity = None for port, capacity in ports: if port == from_port: from_port_capacity = capacity if port == to_port: to_port_capacity = capacity assert from_port_capacity is not None and to_port_capacity is not None, ( tile, from_port, to_port ) direct["z_offset"] = to_port_capacity - from_port_capacity add_direct(directlist_xml, direct) arch_xml_str = ET.tostring(arch_xml, pretty_print=True).decode('utf-8') args.output_arch.write(arch_xml_str) args.output_arch.close()
def main(): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('--db_root', required=True) parser.add_argument('--part', required=True) parser.add_argument('--output_directory', required=True) parser.add_argument('--site_directory', required=True) parser.add_argument('--tile_type', required=True) parser.add_argument('--pb_types', required=True) parser.add_argument('--pin_assignments', required=True) parser.add_argument( '--unused_wires', help="Comma seperated list of site wires to exclude in this tile.") args = parser.parse_args() with open(args.pin_assignments) as f: pin_assignments = json.load(f) db = prjxray.db.Database(args.db_root, args.part) grid = db.grid() tile_type = db.get_tile_type(args.tile_type) pb_types = args.pb_types.split(',') equivalent_sites_dict = dict() gridinfo = None for tile in grid.tiles(): if args.tile_type in tile: gridinfo = grid.gridinfo_at_tilename(tile) break assert gridinfo for pb_type in pb_types: try: site, equivalent_sites = pb_type.split("/") except ValueError: site = pb_type equivalent_sites = None equivalent_sites_dict[site] = equivalent_sites.split( ':') if equivalent_sites else [] sites = list() for site in tile_type.get_sites(): site_type = db.get_site_type(site.type) input_wires, output_wires = get_wires(site, site_type, args.unused_wires) sites.append((site_type, site, input_wires, output_wires)) sites = sorted(sites, key=lambda site: (site[1].type, int(site[1].x), int(site[1].y))) tile_xml = start_heterogeneous_tile( args.tile_type, pin_assignments, sites, equivalent_sites_dict, ) add_switchblock_locations(tile_xml) with open( '{}/{}.tile.xml'.format(args.output_directory, args.tile_type.lower()), 'w') as f: tile_str = ET.tostring(tile_xml, pretty_print=True).decode('utf-8') f.write(tile_str)
def main(): parser = argparse.ArgumentParser(description="Generate synth_tiles.json") parser.add_argument( '--db_root', required=True) parser.add_argument( '--roi', required=True) parser.add_argument( '--synth_tiles', required=False) args = parser.parse_args() db = prjxray.db.Database(args.db_root) g = db.grid() synth_tiles = {} synth_tiles['tiles'] = {} with open(args.roi) as f: j = json.load(f) roi = Roi( db=db, x1=j['info']['GRID_X_MIN'], y1=j['info']['GRID_Y_MIN'], x2=j['info']['GRID_X_MAX'], y2=j['info']['GRID_Y_MAX'], ) synth_tiles['info'] = j['info'] for port in j['ports']: if port['name'].startswith('dout['): port_type = 'input' is_clock = False elif port['name'].startswith('din['): is_clock = False port_type = 'output' elif port['name'].startswith('clk'): port_type = 'output' is_clock = True else: assert False, port tile, wire = port['wire'].split('/') # Make sure connecting wire is not in ROI! loc = g.loc_of_tilename(tile) if roi.tile_in_roi(loc): # Or if in the ROI, make sure it has no sites. gridinfo = g.gridinfo_at_tilename(tile) assert len(db.get_tile_type(gridinfo.tile_type).get_sites()) == 0, tile if tile not in synth_tiles['tiles']: synth_tiles['tiles'][tile] = { 'pins': [], 'loc': g.loc_of_tilename(tile), } synth_tiles['tiles'][tile]['pins'].append({ 'roi_name': port['name'].replace('[', '_').replace(']','_'), 'wire': wire, 'pad': port['pin'], 'port_type': port_type, 'is_clock': is_clock, }) with open(args.synth_tiles, 'w') as f: json.dump(synth_tiles, f)
def main(): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument( '--connection_database', required=True, help="Path to SQLite3 database for given FASM file part.") parser.add_argument( '--db_root', required=True, help="Path to prjxray database for given FASM file part.") parser.add_argument('--allow_orphan_sinks', action='store_true', help="Allow sinks to have no connection.") parser.add_argument( '--prune-unconnected-ports', action='store_true', help="Prune top-level I/O ports that are not connected to any logic.") parser.add_argument('--fasm_file', help="FASM file to convert BELs and routes.", required=True) parser.add_argument('--bit_file', help="Bitstream file to convert to FASM.") parser.add_argument( '--bitread', help="Path to bitread executable, required if --bit_file is provided.") parser.add_argument( '--part', help="Name of part being targeted, required if --bit_file is provided." ) parser.add_argument( '--allow-non-dedicated-clk-routes', action='store_true', help="Effectively sets CLOCK_DEDICATED_ROUTE to FALSE on all nets.") parser.add_argument('--iostandard', default=None, help="Default IOSTANDARD to use for IO buffers.") parser.add_argument('--drive', type=int, default=None, help="Default DRIVE to use for IO buffers.") parser.add_argument('--top', default="top", help="Root level module name.") parser.add_argument('--pcf', help="Mapping of top-level pins to pads, PCF format.") parser.add_argument('--input_xdc', help="Mapping of top-level pints to pads, XDC format.") parser.add_argument('--route_file', help="VPR route output file.") parser.add_argument('--rr_graph', help="Real or virt xc7 graph") parser.add_argument('--vpr_capnp_schema_dir', help="VPR capnp schemas directory.") parser.add_argument('--eblif', help="EBLIF file used to generate design") parser.add_argument('--vpr_grid_map', help="VPR grid to Canonical grid map") parser.add_argument('--verilog_file', help="Filename of output verilog file") parser.add_argument('--xdc_file', help="Filename of output xdc constraints file.") parser.add_argument( '--logical_netlist', help="Filename of output interchange logical netlist capnp.") parser.add_argument( '--physical_netlist', help="Filename of output interchange physical netlist capnp.") parser.add_argument('--interchange_xdc', help="Filename of output interchange XDC.") parser.add_argument( '--interchange_capnp_schema_dir', help="Folder containing interchange capnp definitions.") args = parser.parse_args() if not os.path.exists( os.path.join(os.path.realpath(__file__), args.connection_database)): create_channels(args.db_root, args.part, args.connection_database) conn = sqlite3.connect('file:{}?mode=ro'.format(args.connection_database), uri=True) db = prjxray.db.Database(args.db_root, args.part) grid = db.grid() if args.bit_file: bit2fasm(args.db_root, db, grid, args.bit_file, args.fasm_file, args.bitread, args.part) tiles = {} top = Module(db, grid, conn, name=args.top) if args.eblif: with open(args.eblif) as f: parsed_eblif = eblif.parse_blif(f) else: parsed_eblif = None if args.eblif or args.pcf or args.input_xdc: top.set_site_to_signal( load_io_sites(args.db_root, args.part, args.pcf, args.input_xdc, parsed_eblif, top)) if args.route_file: assert args.rr_graph, "RR graph file required." assert args.vpr_grid_map, "VPR grid map required." assert args.vpr_capnp_schema_dir, "VPR capnp schemas dir path required." grid_map = dict() with open(args.vpr_grid_map, 'r') as csv_grid_map: csv_reader = csv.DictReader(csv_grid_map) for row in csv_reader: vpr_x = int(row['vpr_x']) vpr_y = int(row['vpr_y']) can_x = int(row['canon_x']) can_y = int(row['canon_y']) if (vpr_x, vpr_y) in grid_map: grid_map[(vpr_x, vpr_y)].append((can_x, can_y)) else: grid_map[(vpr_x, vpr_y)] = [(can_x, can_y)] net_map = load_net_list(conn, args.vpr_capnp_schema_dir, args.rr_graph, args.route_file, grid_map) top.set_net_map(net_map) if args.part: with open(os.path.join(args.db_root, args.part, 'part.json')) as f: part_data = json.load(f) top.set_io_banks(part_data['iobanks']) if args.eblif: top.add_to_cname_map(parsed_eblif) top.make_iosettings_map(parsed_eblif) top.set_default_iostandard(args.iostandard, args.drive) for fasm_line in fasm.parse_fasm_filename(args.fasm_file): if not fasm_line.set_feature: continue set_feature = process_set_feature(fasm_line.set_feature) parts = set_feature.feature.split('.') tile = parts[0] if tile not in tiles: tiles[tile] = [] tiles[tile].append(set_feature) if len(parts) == 3 and set_feature.value == 1: top.maybe_add_pip(set_feature.feature) for tile, tile_features in tiles.items(): process_tile(top, tile, tile_features) # Check if the PS7 is present in the tilegrid. If so then insert it. pss_tile, ps7_site = get_ps7_site(db) if pss_tile is not None and ps7_site is not None: # First load the PS7 ports fname = os.path.join(args.db_root, "ps7_ports.json") with open(fname, "r") as fp: ps7_ports = json.load(fp) # Insert the PS7 insert_ps7(top, pss_tile, ps7_site, ps7_ports) top.make_routes(allow_orphan_sinks=args.allow_orphan_sinks) if args.prune_unconnected_ports: top.prune_unconnected_ports() # IBUF IOSTANDARDS are checked here, after routing and pruning, # as we don't need to issue IOSTANDARD warnings/errors for # removed IBUFs (eg the PUDC pin) ibufs_append_iostandard_params(top) if args.allow_non_dedicated_clk_routes: top.add_extra_tcl_line( "set_property CLOCK_DEDICATED_ROUTE FALSE [get_nets]") if args.verilog_file: assert args.xdc_file with open(args.verilog_file, 'w') as f: for line in top.output_verilog(): print(line, file=f) with open(args.xdc_file, 'w') as f: for line in top.output_bel_locations(): print(line, file=f) for line in top.output_nets(): print(line, file=f) for line in top.output_disabled_drcs(): print(line, file=f) for line in top.output_extra_tcl(): print(line, file=f) if args.logical_netlist: assert args.physical_netlist assert args.interchange_capnp_schema_dir assert args.part with open(args.logical_netlist, 'wb') as f_log, open( args.physical_netlist, 'wb') as f_phys, open(args.interchange_xdc, 'w') as f_xdc: output_interchange(top, args.interchange_capnp_schema_dir, args.part, f_log, f_phys, f_xdc)
def main(): parser = argparse.ArgumentParser() parser.add_argument('--db_root', required=True, help='Project X-Ray Database') parser.add_argument('--read_rr_graph', required=True, help='Input rr_graph file') parser.add_argument('--write_rr_graph', required=True, help='Output rr_graph file') parser.add_argument('--connection_database', help='Database of fabric connectivity', required=True) parser.add_argument( '--synth_tiles', help= 'If using an ROI, synthetic tile defintion from prjxray-arch-import') args = parser.parse_args() db = prjxray.db.Database(args.db_root) grid = db.grid() if args.synth_tiles: use_roi = True with open(args.synth_tiles) as f: synth_tiles = json.load(f) roi = Roi( db=db, x1=synth_tiles['info']['GRID_X_MIN'], y1=synth_tiles['info']['GRID_Y_MIN'], x2=synth_tiles['info']['GRID_X_MAX'], y2=synth_tiles['info']['GRID_Y_MAX'], ) print('{} generating routing graph for ROI.'.format(now())) else: use_roi = False # Convert input rr graph into graph2.Graph object. input_rr_graph = read_xml_file(args.read_rr_graph) xml_graph = xml_graph2.Graph( input_rr_graph, progressbar=progressbar.progressbar, output_file_name=args.write_rr_graph, ) graph = xml_graph.graph # Add back short switch, which is unused in arch xml, so is not emitted in # rrgraph XML. # # TODO: This can be removed once # https://github.com/verilog-to-routing/vtr-verilog-to-routing/issues/354 # is fixed. try: short = graph.get_switch_id('short') except KeyError: short = xml_graph.add_switch( graph2.Switch( id=None, name='short', type=graph2.SwitchType.SHORT, timing=None, sizing=graph2.SwitchSizing( mux_trans_size=0, buf_size=0, ), )) tool_version = input_rr_graph.getroot().attrib['tool_version'] tool_comment = input_rr_graph.getroot().attrib['tool_comment'] with DatabaseCache(args.connection_database, True) as conn: # Mapping of graph_node.pkey to rr node id. node_mapping = {} # Match site pins rr nodes with graph_node's in the connection_database. print('{} Importing graph nodes'.format(now())) import_graph_nodes(conn, graph, node_mapping) # Walk all track graph nodes and add them. print('{} Creating tracks'.format(now())) segment_id = graph.get_segment_id_from_name('dummy') create_track_rr_graph(conn, graph, node_mapping, use_roi, roi, synth_tiles, segment_id) # Set of (src, sink, switch_id) tuples that pip edges have been sent to # VPR. VPR cannot handle duplicate paths with the same switch id. if use_roi: print('{} Adding synthetic edges'.format(now())) add_synthetic_edges(conn, graph, node_mapping, grid, synth_tiles) print('{} Creating channels.'.format(now())) channels_obj = create_channels(conn) print('{} Serializing to disk.'.format(now())) with xml_graph: xml_graph.start_serialize_to_xml( tool_version=tool_version, tool_comment=tool_comment, channels_obj=channels_obj, ) xml_graph.serialize_nodes(yield_nodes(xml_graph.graph.nodes)) xml_graph.serialize_edges( import_graph_edges(conn, graph, node_mapping))
def main(): parser = argparse.ArgumentParser() parser.add_argument('--db_root', help='Project X-Ray Database', required=True) parser.add_argument('--channels', help='Input JSON defining channel assignments', required=True) parser.add_argument( '--pin_assignments', help= 'Output JSON assigning pins to tile types and direction connections', required=True) args = parser.parse_args() db = prjxray.db.Database(args.db_root) grid = db.grid() edge_assignments = {} wires_in_tile_types = set() for tile_type in db.get_tile_types(): type_obj = db.get_tile_type(tile_type) for wire in type_obj.get_wires(): wires_in_tile_types.add((tile_type, wire)) for site in type_obj.get_sites(): for site_pin in site.site_pins: if site_pin.wire is None: continue key = (tile_type, site_pin.wire) assert key not in edge_assignments, key edge_assignments[key] = [] print('{} Reading channel data'.format(datetime.datetime.now())) with open(args.channels) as f: channels = json.load(f) print('{} Done reading channel data'.format(datetime.datetime.now())) direct_connections = set() # Edges with mux should have one source tile and one destination_tile. # The pin from the source_tile should face the destination_tile. # # It is expected that all edges_with_mux will lies in a line (e.g. X only or # Y only). for edge_with_mux in progressbar.progressbar(channels['edges_with_mux']): source_tile = None source_tile_type = None source_wire = None destination_tile = None destination_tile_type = None destination_wire = None for tile, wire in edge_with_mux['source_node']: tileinfo = grid.gridinfo_at_tilename(tile) tile_type = db.get_tile_type(tileinfo.tile_type) wire_info = tile_type.get_wire_info(wire) if len(wire_info.sites) == 1: assert source_tile is None, (tile, wire, source_tile) source_tile = tile source_tile_type = tileinfo.tile_type source_wire = wire for tile, wire in edge_with_mux['destination_node']: tileinfo = grid.gridinfo_at_tilename(tile) tile_type = db.get_tile_type(tileinfo.tile_type) wire_info = tile_type.get_wire_info(wire) if len(wire_info.sites) == 1: assert destination_tile is None, (tile, wire, destination_tile, wire_info) destination_tile = tile destination_tile_type = tileinfo.tile_type destination_wire = wire assert source_tile is not None assert destination_tile is not None source_loc = grid.loc_of_tilename(source_tile) destination_loc = grid.loc_of_tilename(destination_tile) assert source_loc.grid_x == destination_loc.grid_x or source_loc.grid_y == destination_loc.grid_y, ( source_tile, destination_tile, edge_with_mux['pip']) direct_connections.add( DirectConnection( from_pin='{}.{}'.format(source_tile_type, source_wire), to_pin='{}.{}'.format(destination_tile_type, destination_wire), switch_name='routing', x_offset=destination_loc.grid_x - source_loc.grid_x, y_offset=destination_loc.grid_y - source_loc.grid_y, )) if destination_loc.grid_x == source_loc.grid_x: if destination_loc.grid_y > source_loc.grid_y: source_dir = tracks.Direction.TOP destination_dir = tracks.Direction.BOTTOM else: source_dir = tracks.Direction.BOTTOM destination_dir = tracks.Direction.TOP else: if destination_loc.grid_x > source_loc.grid_x: source_dir = tracks.Direction.RIGHT destination_dir = tracks.Direction.LEFT else: source_dir = tracks.Direction.LEFT destination_dir = tracks.Direction.RIGHT edge_assignments[(source_tile_type, source_wire)].append( (source_dir, )) edge_assignments[(destination_tile_type, destination_wire)].append( (destination_dir, )) wires_not_in_channels = {} for node in progressbar.progressbar(channels['node_not_in_channels']): reason = node['classification'] for tile, wire in node['wires']: tileinfo = grid.gridinfo_at_tilename(tile) key = (tileinfo.tile_type, wire) # Sometimes nodes in particular tile instances are disconnected, # disregard classification changes if this is the case. if reason != 'NULL': if key not in wires_not_in_channels: wires_not_in_channels[key] = reason else: other_reason = wires_not_in_channels[key] assert reason == other_reason, (tile, wire, reason, other_reason) if key in wires_in_tile_types: wires_in_tile_types.remove(key) # List of nodes that are channels. channel_nodes = [] # Map of (tile, wire) to track. This will be used to find channels for pips # that come from EDGES_TO_CHANNEL. channel_wires_to_tracks = {} # Generate track models and verify that wires are either in a channel # or not in a channel. for channel in progressbar.progressbar(channels['channels']): track_list = [] for track in channel['tracks']: track_list.append(tracks.Track(**track)) tracks_model = tracks.Tracks(track_list, channel['track_connections']) channel_nodes.append(tracks_model) for tile, wire in channel['wires']: tileinfo = grid.gridinfo_at_tilename(tile) key = (tileinfo.tile_type, wire) # Make sure all wires in channels always are in channels assert key not in wires_not_in_channels if key in wires_in_tile_types: wires_in_tile_types.remove(key) channel_wires_to_tracks[(tile, wire)] = tracks_model # Make sure all wires appear to have been assigned. assert len(wires_in_tile_types) == 0 # Verify that all tracks are sane. for node in channel_nodes: node.verify_tracks() null_tile_wires = set() # Verify that all nodes that are classified as edges to channels have at # least one site, and at least one live connection to a channel. # # If no live connections from the node are present, this node should've # been marked as NULL during channel formation. for node in progressbar.progressbar(channels['node_not_in_channels']): reason = node['classification'] assert reason != 'EDGE_WITH_SHORT' if reason == 'NULL': for tile, wire in node['wires']: tileinfo = grid.gridinfo_at_tilename(tile) tile_type = db.get_tile_type(tileinfo.tile_type) null_tile_wires.add((tileinfo.tile_type, wire)) if reason == 'EDGES_TO_CHANNEL': num_sites = 0 for tile, wire in node['wires']: tileinfo = grid.gridinfo_at_tilename(tile) loc = grid.loc_of_tilename(tile) tile_type = db.get_tile_type(tileinfo.tile_type) wire_info = tile_type.get_wire_info(wire) num_sites += len(wire_info.sites) for pip in wire_info.pips: other_wire = prjxray.tile.get_other_wire_from_pip( tile_type.get_pip_by_name(pip), wire) key = (tile, other_wire) if key in channel_wires_to_tracks: tracks_model = channel_wires_to_tracks[key] if len(wire_info.sites) > 0: available_pins = set( pin_dir for _, pin_dir in tracks_model. get_tracks_for_wire_at_coord((loc.grid_x, loc.grid_y))) edge_assignments[(tileinfo.tile_type, wire)].append(available_pins) final_edge_assignments = {} for (tile_type, wire), available_pins in progressbar.progressbar( edge_assignments.items()): if len(available_pins) == 0: if (tile_type, wire) not in null_tile_wires: # TODO: Figure out what is going on with these wires. Appear to # tile internal connections sometimes? print((tile_type, wire)) final_edge_assignments[(tile_type, wire)] = [tracks.Direction.RIGHT] continue pins = set(available_pins[0]) for p in available_pins[1:]: pins &= set(p) if len(pins) > 0: final_edge_assignments[(tile_type, wire)] = [list(pins)[0]] else: # More than 2 pins are required, final the minimal number of pins pins = set() for p in available_pins: pins |= set(p) while len(pins) > 2: pins = list(pins) prev_len = len(pins) for idx in range(len(pins)): pins_subset = list(pins) del pins_subset[idx] pins_subset = set(pins_subset) bad_subset = False for p in available_pins: if len(pins_subset & set(p)) == 0: bad_subset = True break if not bad_subset: pins = list(pins_subset) break # Failed to remove any pins, stop. if len(pins) == prev_len: break final_edge_assignments[(tile_type, wire)] = pins for (tile_type, wire), available_pins in edge_assignments.items(): pins = set(final_edge_assignments[(tile_type, wire)]) for required_pins in available_pins: assert len(pins & set(required_pins)) > 0, (tile_type, wire, pins, required_pins) pin_directions = {} for (tile_type, wire), pins in final_edge_assignments.items(): if tile_type not in pin_directions: pin_directions[tile_type] = {} pin_directions[tile_type][wire] = [pin._name_ for pin in pins] with open(args.pin_assignments, 'w') as f: json.dump( { 'pin_directions': pin_directions, 'direct_connections': [d._asdict() for d in direct_connections], }, f, indent=2)