def main(): parser = argparse.ArgumentParser(description="Generates testarch FPGA") parser.add_argument("--schema_dir", required=True, help="Path to FPGA interchange capnp schema files") args = parser.parse_args() # Run the test architecture generator gen = TestArchGenerator() gen.generate() # Initialize the writer (or "serializer") interchange = Interchange(args.schema_dir) writer = DeviceResourcesCapnp( gen.device, interchange.device_resources_schema, interchange.logical_netlist_schema, ) # Serialize device_resources = writer.to_capnp() with open("device_resources.device.gz", "wb") as fp: write_capnp_file( device_resources, fp) #, compression_format=CompressionFormat.UNCOMPRESSED)
def main(): parser = argparse.ArgumentParser( description= "Create an example netlist, suitable for use with Vivado 2019.2") parser.add_argument('--schema_dir', required=True) parser.add_argument('--logical_netlist', required=True) parser.add_argument('--physical_netlist', required=True) parser.add_argument('--xdc', required=True) args = parser.parse_args() interchange = Interchange(args.schema_dir) logical_netlist = example_logical_netlist() logical_netlist_capnp = logical_netlist.convert_to_capnp(interchange) phys_netlist = example_physical_netlist() phys_netlist_capnp = phys_netlist.convert_to_capnp(interchange) with open(args.logical_netlist, 'wb') as f: write_capnp_file(logical_netlist_capnp, f) with open(args.physical_netlist, 'wb') as f: write_capnp_file(phys_netlist_capnp, f) with open(args.xdc, 'w') as f: f.write(example_xdc())
def write_format(message, output_format, out_f): """ Write capnp file to a serialized output format. message: Capnp Builder object to be serialized into output file. output_format (str): Input format type, either capnp, json, yaml. in_f (file-like): Binary file to writer to serialized format. """ if output_format == 'capnp': write_capnp_file(message, out_f) elif output_format == 'json': message = message.as_reader() json_data = to_json(message) json_string = json.dumps(json_data, indent=2) out_f.write(json_string.encode('utf-8')) elif output_format == 'yaml': ryml = get_ryml() message = message.as_reader() strings, yaml_tree = to_rapidyaml(message) yaml_string = ryml.emit(yaml_tree) out_f.write(yaml_string.encode('utf-8')) elif output_format == 'pyyaml': yaml, _, Dumper = get_pyyaml() message = message.as_reader() yaml_data = to_yaml(message) yaml_string = yaml.dump(yaml_data, sort_keys=False, Dumper=Dumper) out_f.write(yaml_string.encode('utf-8')) else: assert False, 'Invalid output format {}'.format(output_format)
def main(): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('--schema_dir', required=True) parser.add_argument('--schema', required=True, choices=SCHEMAS) parser.add_argument('--root_schema_path', default=None) parser.add_argument('--patch_path', required=True) parser.add_argument('--patch_format', required=True, choices=FORMATS) parser.add_argument('root') parser.add_argument('patch') parser.add_argument('output') args = parser.parse_args() patch_path = args.patch_path.split('.') root_schema = get_schema(args.schema_dir, args.schema, args.root_schema_path) with open(args.root, 'rb') as f: message = read_capnp_file(root_schema, f) message = message.as_builder() with open(args.patch, 'rb') as f: patch_capnp(message, patch_path, args.patch_format, f) with open(args.output, 'wb') as f: write_capnp_file(message, f)
def test_logical_netlist(self): logical_netlist = example_logical_netlist() interchange = Interchange( schema_directory=os.environ['INTERCHANGE_SCHEMA_PATH']) with tempfile.NamedTemporaryFile('w+b') as f: netlist_capnp = logical_netlist.convert_to_capnp(interchange) write_capnp_file(netlist_capnp, f) f.seek(0) read_logical_netlist = LogicalNetlist.read_from_capnp( f, interchange) self.assertEqual(read_logical_netlist.name, logical_netlist.name) self.assertEqual(read_logical_netlist.top_instance, logical_netlist.top_instance) self.assertEqual(read_logical_netlist.libraries.keys(), logical_netlist.libraries.keys()) for library_name, library in logical_netlist.libraries.items(): read_library = read_logical_netlist.libraries[library_name] self.assertEqual(library.cells.keys(), read_library.cells.keys()) for cell_name, cell in library.cells.items(): read_cell = read_library.cells[cell_name] self.assertEqual(cell.name, read_cell.name) self.assertEqual(cell.property_map, read_cell.property_map) self.assertEqual(cell.view, read_cell.view) self.assertEqual(cell.nets.keys(), read_cell.nets.keys()) self.assertEqual(cell.ports.keys(), read_cell.ports.keys()) self.assertEqual(cell.cell_instances.keys(), read_cell.cell_instances.keys())
def test_physical_netlist(self): phys_netlist = example_physical_netlist() interchange = Interchange( schema_directory=os.environ['INTERCHANGE_SCHEMA_PATH']) with tempfile.NamedTemporaryFile('w+b') as f: netlist_capnp = phys_netlist.convert_to_capnp(interchange) write_capnp_file(netlist_capnp, f) f.seek(0) read_phys_netlist = PhysicalNetlist.read_from_capnp(f, interchange) self.assertEqual(len(phys_netlist.placements), len(read_phys_netlist.placements))
def test_capnp_modes(self): logical_netlist = example_logical_netlist() interchange = Interchange( schema_directory=os.environ['INTERCHANGE_SCHEMA_PATH']) for compression_format in [ CompressionFormat.UNCOMPRESSED, CompressionFormat.GZIP ]: for packed in [True, False]: with tempfile.NamedTemporaryFile('w+b') as f: netlist_capnp = logical_netlist.convert_to_capnp( interchange) write_capnp_file(netlist_capnp, f, compression_format=compression_format, is_packed=packed) f.seek(0) _ = LogicalNetlist.read_from_capnp( f, interchange, compression_format=compression_format, is_packed=packed)
def main(): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('--schema_dir', required=True) parser.add_argument('--device', required=True) parser.add_argument('--top', required=True) parser.add_argument('--verbose', action='store_true') parser.add_argument( '--library', default='work', help='Library to put non-primitive elements') parser.add_argument('yosys_json') parser.add_argument('netlist') args = parser.parse_args() with open(args.yosys_json) as f: yosys_json = json.load(f) assert 'modules' in yosys_json, yosys_json.keys() if args.top not in yosys_json['modules']: raise RuntimeError( 'Could not find top module in yosys modules: {}'.format(', '.join( yosys_json['modules'].keys()))) interchange = Interchange(args.schema_dir) with open(args.device, 'rb') as f: device = interchange.read_device_resources(f) netlist = convert_yosys_json(device, yosys_json, args.top, args.library, args.verbose) netlist_capnp = netlist.convert_to_capnp(interchange) with open(args.netlist, 'wb') as f: write_capnp_file(netlist_capnp, f)
def main(): parser = argparse.ArgumentParser(description="Generates testarch FPGA") parser.add_argument("--schema-dir", required=True, help="Path to FPGA interchange capnp schema files") parser.add_argument("--out-file", default="test_arch.device", help="Output file name") parser.add_argument("--package", default="TESTPKG", help="Package name") parser.add_argument( "--no-ffmux", action="store_true", help= "Do not add the mux that selects FF input forcing it to require LUT-thru" ) args = parser.parse_args() # Run the test architecture generator gen = TestArchGenerator(args) gen.generate() # Initialize the writer (or "serializer") interchange = Interchange(args.schema_dir) writer = DeviceResourcesCapnp( gen.device, interchange.device_resources_schema, interchange.logical_netlist_schema, ) # Serialize device_resources = writer.to_capnp() with open(args.out_file, "wb") as fp: write_capnp_file( device_resources, fp) #, compression_format=CompressionFormat.UNCOMPRESSED)
def output_interchange(top, capnp_folder, part, f_logical, f_physical, f_xdc): """ Output FPGA interchange from top level Module class object. top (Module) - Top level module. capnp_folder (str) - Path to the interchange capnp folder part (str) - Part for physical netlist. f_logical (file-like) - File to output logical_netlist.Netlist. f_physical (file-like) - File to output physical_netlist.PhysNetlist. """ interchange = Interchange(capnp_folder) hdi_primitives = Library('hdi_primitives') work = Library('work') libraries = {hdi_primitives.name: hdi_primitives, work.name: work} top_cell = Cell(top.name) # Create source cells for constant nets. They are required to have some # name, so give them one. # # TODO: Iterate net names on this? This feels wrong/weird. Need to # handle net name collisions? constant_nets = { 0: "GLOBAL_LOGIC0", 1: "GLOBAL_LOGIC1", } top_cell.add_cell_instance(name='VCC', cell_name="VCC") top_cell.add_net(constant_nets[1]) top_cell.connect_net_to_instance( net_name=constant_nets[1], instance_name='VCC', port="P") top_cell.add_cell_instance(name='GND', cell_name="GND") top_cell.add_net(constant_nets[0]) top_cell.connect_net_to_instance( net_name=constant_nets[0], instance_name='GND', port="G") # Parse top level port names, and convert to bussed ports as needed. create_top_level_ports(top_cell, top, top.root_in, Direction.Input) create_top_level_ports(top_cell, top, top.root_out, Direction.Output) create_top_level_ports(top_cell, top, top.root_inout, Direction.Inout) for wire, width in make_bus(top.wires): wire = unescape_verilog_name(wire) if width is None: top_cell.add_net(name=wire) else: for idx in range(width + 1): top_cell.add_net(name='{}[{}]'.format(wire, idx)) # Update/create wire_name_net_map from the BELs. for site in top.sites: for bel in site.bels: bel.make_net_map(top=top, net_map=top.wire_name_net_map) for sink_wire, source_wire in top.wire_assigns.yield_wires(): net_name = flatten_wires(source_wire, top.wire_assigns, top.wire_name_net_map) if sink_wire in top.wire_name_net_map: assert top.wire_name_net_map[sink_wire] == net_name else: top.wire_name_net_map[sink_wire] = net_name # Create a list of each primative instances to later build up a primative # model library. hdi_primitives_cells = {} # Create cells instances from each bel in the design. for site in top.sites: for bel in sorted(site.bels, key=lambda bel: bel.priority): bel.output_interchange( top_cell=top_cell, top=top, net_map=top.wire_name_net_map, constant_nets=constant_nets, ) if bel.parent_cell is not None: continue if bel.module not in hdi_primitives_cells: hdi_primitives_cells[bel.module] = [] hdi_primitives_cells[bel.module].append(bel) # Add top level cell to the work cell library. work.add_cell(top_cell) # Construct library cells based on data from top module. for cellname in hdi_primitives_cells: instances = hdi_primitives_cells[cellname] cell = Cell(cellname) ports = {} for instance in instances: _, connections, port_is_output = instance.create_connections(top) for port in connections: if port_is_output[port]: # The current model doesn't handle IO at all, so add # special cases for IO ports in the library. if cellname.startswith('IOBUF') and port == "IO": direction = Direction.Inout else: direction = Direction.Output else: direction = Direction.Input width = connections[port].bus_width() if port in instance.port_width: if width is not None: assert width <= instance.port_width[port], port width = instance.port_width[port] if port in ports: port_dir, port_width = ports[port] assert port_dir == direction, (port, direction, port_dir, port_width) if width is not None: assert port_width <= width if width > port_width: ports[port] = (direction, width) else: assert port_width is None else: ports[port] = (direction, width) # Add instances of unconnected ports (as needed). for port, direction in instance.port_direction.items(): width = instance.port_width[port] if direction == "output": direction = Direction.Output elif direction == "inout": direction = Direction.Inout else: assert direction == "input", direction direction = Direction.Input if port in ports: assert (direction, width) == ports[port] else: ports[port] = (direction, width) for port, (direction, width) in ports.items(): if width is not None: cell.add_bus_port(port, direction, start=width - 1, end=0) else: cell.add_port(port, direction) hdi_primitives.add_cell(cell) # Make sure VCC and GND primatives are in the library. if "VCC" not in hdi_primitives.cells: cell = Cell("VCC") cell.add_port("P", Direction.Output) hdi_primitives.add_cell(cell) if "GND" not in hdi_primitives.cells: cell = Cell("GND") cell.add_port("G", Direction.Output) hdi_primitives.add_cell(cell) # Logical netlist is complete, output to file now! logical_netlist = LogicalNetlist( name=top.name, property_map={}, top_instance_name=top.name, top_instance=CellInstance( cell_name=top.name, view='netlist', property_map={}), libraries=libraries, ).convert_to_capnp(interchange) write_capnp_file(logical_netlist, f_logical) physical_netlist = PhysicalNetlist(part=part) site_type_pins = {} # Convert sites and bels into placement directives and physical nets. net_stubs = {} sub_cell_nets = {} for site in top.sites: physical_netlist.add_site_instance(site.site.name, site.site_type()) for bel in site.bels: if bel.site is None or (bel.bel is None and len(bel.physical_bels) == 0): continue cell_instance = unescape_verilog_name(bel.get_cell(top)) # bel.physical_bels is used to represent a transformation that # happens from the library cell (e.g. LUT6_2) into lower # primatives (LUT6_2 -> (LUT6, LUT5)). # # Rather than implement generic transformation support, for now # models implement the transformation by adding physical bels to # generate the correct placement constraints. # # TODO: Revisit this in the future? if len(bel.physical_bels) == 0: # Straight forward case, 1 logical Cell -> 1 physical Bel placement = Placement( cell_type=bel.module, cell_name=cell_instance, site=bel.site, bel=bel.bel, ) for (bel_name, bel_pin), cell_pin in bel.bel_pins_to_cell_pins.items(): placement.add_bel_pin_to_cell_pin( bel_pin=bel_pin, cell_pin=cell_pin, bel=bel_name, ) physical_netlist.placements.append(placement) else: # Transformation cases, create a placement constraint for # each bel in the physical_bels list. # # These represent a cell within the primative, hence the "/" # when constructing the cell name. for phys_bel in bel.physical_bels: placement = Placement( cell_type=phys_bel.module, cell_name=cell_instance + '/' + phys_bel.name, site=bel.site, bel=phys_bel.bel, ) for (bel_name, bel_pin ), cell_pin in phys_bel.bel_pins_to_cell_pins.items(): placement.add_bel_pin_to_cell_pin( bel_pin=bel_pin, cell_pin=cell_pin, bel=bel_name, ) physical_netlist.placements.append(placement) # Convert site routing to PhysicalNetlist objects (PhysicalBelPin, # PhysicalSitePin, PhysicalSitePip). # # Note: Calling output_site_routing must be done before # output_interchange_nets to ensure that Bel.final_net_names gets # populated, as that is computed during Site.output_site_routing. new_nets = site.output_site_routing( top=top, parent_cell=top_cell, net_map=top.wire_name_net_map, constant_nets=constant_nets, sub_cell_nets=sub_cell_nets) for site_pin, site_type_pin in site.site_type_pins.items(): site_type_pins[site.site.name, site_pin] = site_type_pin # Extend net stubs with the site routing. for net_name in new_nets: if net_name not in net_stubs: net_stubs[net_name] = [] net_stubs[net_name].extend(new_nets[net_name]) # Convert top level routing nets to pip lists and to relevant nets for net_name, pips in top.output_interchange_nets( constant_nets=constant_nets): if net_name not in net_stubs: net_stubs[net_name] = [] for tile, wire0, wire1 in pips: # TODO: Better handling of bipips? net_stubs[net_name].append( PhysicalPipForStitching( tile=tile, wire0=wire0, wire1=wire1, forward=False)) net_to_type = {} for val, net_name in constant_nets.items(): if val == 0: net_to_type[net_name] = PhysicalNetType.Gnd else: assert val == 1 net_to_type[net_name] = PhysicalNetType.Vcc cursor = top.conn.cursor() for net_name in net_stubs: sources = [] stubs = net_stubs[net_name] sources, stubs = stitch_stubs(net_stubs[net_name], cursor, site_type_pins) physical_netlist.add_physical_net( net_name=sub_cell_nets.get(net_name, net_name), sources=sources, stubs=stubs, net_type=net_to_type.get(net_name, PhysicalNetType.Signal)) phys_netlist_capnp = physical_netlist.convert_to_capnp(interchange) write_capnp_file(phys_netlist_capnp, f_physical) for l in top.output_extra_tcl(): print(l, file=f_xdc)
def main(): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('--schema_dir', required=True) parser.add_argument('--library', default="primitives") parser.add_argument('device_in') parser.add_argument('yosys_json') parser.add_argument('device_out') args = parser.parse_args() interchange = Interchange(args.schema_dir) with open(args.device_in, 'rb') as f: device = read_capnp_file(interchange.device_resources_schema.Device, f) device = device.as_builder() with open(args.yosys_json) as f: yosys_json = json.load(f) prim_lib = Library(args.library) assert 'modules' in yosys_json, yosys_json.keys() for module_name, module_data in sorted(yosys_json['modules'].items(), key=lambda x: x[0]): # Library should only contain blackboxes assert module_data['attributes'].get('blackbox', 0) or \ module_data['attributes'].get('whitebox', 0), module_name property_map = {} if 'attributes' in module_data: property_map.update(module_data['attributes']) if 'parameters' in module_data: property_map.update(module_data['parameters']) cell = Cell(module_name, property_map) for port_name, port_data in module_data['ports'].items(): if port_data['direction'] == 'input': direction = Direction.Input elif port_data['direction'] == 'output': direction = Direction.Output else: assert port_data['direction'] == 'inout' direction = Direction.Inout property_map = {} if 'attributes' in port_data: property_map = port_data['attributes'] offset = port_data.get('offset', 0) upto = port_data.get('upto', False) if is_bus(port_data['bits'], offset, upto): end = offset start = offset + len(port_data['bits']) - 1 if upto: start, end = end, start cell.add_bus_port(name=port_name, direction=direction, start=start, end=end, property_map=property_map) else: cell.add_port(name=port_name, direction=direction, property_map=property_map) prim_lib.add_cell(cell) libraries = {} libraries[args.library] = prim_lib # Create the netlist netlist = LogicalNetlist(name=args.library, property_map={}, top_instance_name=None, top_instance=None, libraries=libraries) str_list = [s for s in device.strList] netlist_capnp = netlist.convert_to_capnp(interchange, indexed_strings=str_list) # Patch device device.primLibs = netlist_capnp if len(device.strList) != len(str_list): # At least 1 string was added to the list, update the strList. device.init('strList', len(str_list)) for idx, s in enumerate(str_list): device.strList[idx] = s # Save patched device with open(args.device_out, 'wb') as f: write_capnp_file(device, f)
def main(): parser = argparse.ArgumentParser( description="Add timing information to Device") parser.add_argument("--schema_dir", required=True) parser.add_argument("--timing_dir", required=True) parser.add_argument("--family", required=True) parser.add_argument("device") parser.add_argument("patched_device") args = parser.parse_args() device_schema = get_schema(args.schema_dir, "device") with open(args.device, 'rb') as f: dev = read_capnp_file(device_schema, f) dev = dev.as_builder() node_model_map, wire_node_map = create_wire_to_node_map(dev) tileType_wire_name_wire_list_map = create_tile_type_wire_name_to_wire_list( dev) string_map = create_string_to_dev_string_map(dev) tile_name_tileType_map = create_tile_type_name_to_tile_type(dev) tileType_wires_pip_map = create_tile_type_wire0_wire1_pip_map(dev) siteName_siteType_map = create_site_name_to_site_type_map(dev) siteType_name_sitePin_map = create_site_type_name_to_site_pin_map(dev) tile_type_name_to_number = {} for i, tileType in enumerate(dev.tileTypeList): name = dev.strList[tileType.name] tile_type_name_to_number[name] = i pip_models = {} family_map = {"xc7": prjxray_db_reader} timing_dir = args.timing_dir timing_reader = family_map[args.family](timing_dir) timing_data = timing_reader.extract_data() for tile, _data in timing_data.items(): if tile not in string_map: continue tile_name = string_map[tile] tileType = tile_name_tileType_map[tile_name] for name, data in _data['wires'].items(): wire_name = string_map[name] for wire in tileType_wire_name_wire_list_map[(tileType, wire_name)]: if wire not in wire_node_map: continue node = wire_node_map[wire] model = node_model_map[node] res = list(model[0]) cap = list(model[1]) for i in range(len(res)): res[i] += data[0][i] for i in range(len(cap)): cap[i] += data[1][i] model = (tuple(res), tuple(cap)) node_model_map[node] = model for old_key, data in _data['pips'].items(): wire0 = string_map[old_key[0]] wire1 = string_map[old_key[1]] key = (tileType, wire0, wire1) if key not in tileType_wires_pip_map: continue pip = tileType_wires_pip_map[key] pip_models[pip] = data for site, data in _data['sites'].items(): siteType = siteName_siteType_map[string_map[site]] for sitePin, model in data.items(): sitePin_obj = siteType_name_sitePin_map[(siteType, string_map[sitePin])] if model[0][0] is not None and model[0][0] == 'r': sitePin_obj.model.init('resistance') corner_model = sitePin_obj.model.resistance populate_corner_model(corner_model, *model[0][1]) elif model[0][0] is not None and model[0][0] == 'c': sitePin_obj.model.init('capacitance') corner_model = sitePin_obj.model.capacitance populate_corner_model(corner_model, *model[0][1]) sitePin_obj.init('delay') corner_model = sitePin_obj.delay populate_corner_model(corner_model, *model[1]) timing_set = set() for timing in node_model_map.values(): timing_set.add(timing) timing_dict = {timing: i for i, timing in enumerate(timing_set)} dev.init("nodeTimings", len(timing_dict)) for model, i in timing_dict.items(): corner_model = dev.nodeTimings[i].resistance populate_corner_model(corner_model, *model[0]) corner_model = dev.nodeTimings[i].capacitance populate_corner_model(corner_model, *model[1]) for node, timing in node_model_map.items(): node.nodeTiming = timing_dict[timing] timing_set = set() for model in pip_models.values(): timing_set.add(model) timing_dict = {timing: i for i, timing in enumerate(timing_set)} dev.init("pipTimings", len(timing_dict)) for model, i in timing_dict.items(): pipTiming = dev.pipTimings[i] corner_model = pipTiming.inputCapacitance populate_corner_model(corner_model, *model[0]) corner_model = pipTiming.internalCapacitance populate_corner_model(corner_model, *model[1]) corner_model = pipTiming.internalDelay populate_corner_model(corner_model, *model[2]) corner_model = pipTiming.outputResistance populate_corner_model(corner_model, *model[3]) corner_model = pipTiming.outputCapacitance populate_corner_model(corner_model, *model[4]) for pip, timing in pip_models.items(): pip.timing = timing_dict[timing] with open(args.patched_device, "wb") as fp: write_capnp_file(dev, fp)