def main(): import argparse parser = argparse.ArgumentParser( description= 'Outputs a Vivavo highlight_objects command from a FASM file.') util.db_root_arg(parser) util.part_arg(parser) parser.add_argument('fn_in', help='Input FPGA assembly (.fasm) file') args = parser.parse_args() database = db.Database(args.db_root, args.part) grid = database.grid() def inner(): for line in fasm.parse_fasm_filename(args.fn_in): if not line.set_feature: continue parts = line.set_feature.feature.split('.') tile = parts[0] gridinfo = grid.gridinfo_at_tilename(tile) tile_type = database.get_tile_type(gridinfo.tile_type) for pip in tile_type.pips: if pip.net_from == parts[2] and pip.net_to == parts[1]: yield '{}/{}'.format(tile, pip.name) print( 'highlight_objects [concat {}]'.format( ' '.join('[get_pips {}]'.format(pip) for pip in inner())))
def main(): import argparse parser = argparse.ArgumentParser(description='Create multi-bit entries') util.db_root_arg(parser) parser.add_argument('--verbose', action='store_true', help='') parser.add_argument( '--clb-int', action='store_true', help='Fixup CLB interconnect') parser.add_argument('--zero-db', help='Apply custom patches') parser.add_argument('--seg-fn-in', help='') parser.add_argument('--seg-fn-out', help='') util.add_bool_arg(parser, "--strict", default=False) parser.add_argument( "-g", "--groups", type=str, default=None, help="Input tag group definition file") args = parser.parse_args() run( args.db_root, args.clb_int, args.zero_db, args.seg_fn_in, args.seg_fn_out, args.groups, strict=args.strict, verbose=args.verbose)
def main(): import argparse parser = argparse.ArgumentParser( description="Decode bits within a tile's address space") util.db_root_arg(parser) parser.add_argument('--verbose', action='store_true', help='') parser.add_argument( '-z', action='store_true', help="do not print a 'seg' header for empty segments") parser.add_argument( '-b', action='store_true', help='print bits outside of known segments') parser.add_argument( '-d', action='store_true', help='decode known segment bits and write them as tags') # XXX: possibly broken, or we have missing DB data parser.add_argument( '-D', action='store_true', help='decode known segment bits and omit them in the output') parser.add_argument('bits_file', help='') parser.add_argument( 'segnames', nargs='*', help='List of tile or tile:block to print') args = parser.parse_args() run( args.db_root, args.bits_file, args.segnames, args.z, args.b, args.d, args.D, args.verbose)
def main(): parser = argparse.ArgumentParser( description="Runs a sanity check on a prjxray database.") util.db_root_arg(parser) util.part_arg(parser) args = parser.parse_args() quick_test(args.db_root, args.part)
def main(): import argparse parser = argparse.ArgumentParser( description="Parse a db repository, checking for consistency") util.db_root_arg(parser) parser.add_argument('--verbose', action='store_true', help='') args = parser.parse_args() run(args.db_root, verbose=args.verbose)
def main(): """Tool to update all supported, available parts in a mapping file for the given family. It will read all parts from Vivado, filter them by the family, and will only add these where a device exists for. Example: prjxray$ ./utils/update_parts.py artix7 --db-root database/artix7/ """ parser = argparse.ArgumentParser( description="Saves all supported parts for a family.") parser.add_argument( 'family', help="Name of the device family.", choices=['artix7', 'kintex7', 'zynq7']) util.db_root_arg(parser) args = parser.parse_args() env = os.environ.copy() # Vivado does not use the suffix 7 for zynq env['FILTER'] = "zynq" if args.family == "zynq7" else args.family cwd = os.path.dirname(os.path.abspath(__file__)) information = {} # Read all supported devices supported_devices = util.get_devices(args.db_root).keys() # Fetch all parts for a family (FILTER = family) command = "{} -mode batch -source update_parts.tcl".format( env['XRAY_VIVADO']) result = subprocess.run( command.split(' '), check=True, env=env, cwd=cwd, stdout=subprocess.PIPE) parts = result.stdout.decode('utf-8').split('# }\n')[1].splitlines()[:-1] # Splits up the part number and checks if the device is supported for part in parts: part, device, package, speed = part.split(',') if device in supported_devices: information[part] = { 'device': device, 'package': package, 'speedgrade': speed[1:] } else: print("Part {} has an unsupported device {}".format(part, device)) # Overwrites the <family>/parts.yaml file completly with new data util.set_part_information(args.db_root, information)
def main(): parser = argparse.ArgumentParser( description= 'Convert FPGA configuration description ("FPGA assembly") into binary frame equivalent' ) util.db_root_arg(parser) util.part_arg(parser) parser.add_argument('--part_file', required=True, help="Part YAML file.") parser.add_argument( '--sparse', action='store_true', help="Don't zero fill all frames") parser.add_argument( '--roi', help="ROI design.json file defining which tiles are within the ROI.") parser.add_argument( '--emit_pudc_b_pullup', help="Emit an IBUF and PULLUP on the PUDC_B pin if unused", action='store_true') parser.add_argument( '--debug', action='store_true', help="Print debug dump") parser.add_argument( '--frm2bit', default="xc7frames2bit", help="xc7frames2bit tool.") parser.add_argument('--fn_in', help='Input FPGA assembly (.fasm) file') parser.add_argument('--bit_out', help='Output FPGA bitstream (.bit) file') parser.add_argument( '--frm_out', default=None, help='Output FPGA frame (.frm) file') args = parser.parse_args() frm_out = args.frm_out if frm_out is None: _, frm_out = tempfile.mkstemp() f_out = open(frm_out, 'w') fasm2frames( db_root=args.db_root, part=args.part, filename_in=args.fn_in, f_out=f_out, sparse=args.sparse, roi=args.roi, debug=args.debug, emit_pudc_b_pullup=args.emit_pudc_b_pullup) f_out.close() result = subprocess.check_output( "{} --frm_file {} --output_file {} --part_name {} --part_file {}". format(args.frm2bit, frm_out, args.bit_out, args.part, args.part_file), shell=True)
def main(): import argparse parser = argparse.ArgumentParser(description="Combine multiple .db files") util.db_root_arg(parser) parser.add_argument('--verbose', action='store_true', help='') parser.add_argument('--out', help='') parser.add_argument('ins', nargs='+', help='Last takes precedence') args = parser.parse_args() run(args.ins, args.out, strict=int(os.getenv("MERGEDB_STRICT", "1")), verbose=args.verbose)
def main(): import argparse parser = argparse.ArgumentParser( description="Parse a db file, checking for consistency") util.db_root_arg(parser) parser.add_argument('--verbose', action='store_true', help='') parser.add_argument( '--strict', action='store_true', help='Complain on unresolved entries (ex: <0 candidates>, <const0>)') parser.add_argument('fin', help='') parser.add_argument('fout', nargs='?', help='') args = parser.parse_args() run(args.fin, args.fout, strict=args.strict, verbose=args.verbose)
def main(): import argparse parser = argparse.ArgumentParser( description="Decode bits within a tile's address space") util.db_root_arg(parser) util.part_arg(parser) parser.add_argument('--verbose', action='store_true', help='') parser.add_argument( '-z', action='store_true', help="do not print a 'seg' header for empty segments") parser.add_argument( '-b', action='store_true', help='print bits outside of known segments') parser.add_argument( '-d', action='store_true', help='decode known segment bits and write them as tags') parser.add_argument( '-D', action='store_true', help='decode known segment bits and omit them in the output') parser.add_argument( '--bit-only', action='store_true', help='only decode real bitstream directives') parser.add_argument('bits_file', help='') parser.add_argument( 'segnames', nargs='*', help='List of tile or tile:block to print') args = parser.parse_args() run( args.db_root, args.part, args.bits_file, args.segnames, args.z, args.b, args.d, args.D, bit_only=args.bit_only, verbose=args.verbose)
def main(): parser = argparse.ArgumentParser( description= 'Convert FPGA configuration description ("FPGA assembly") into binary frame equivalent' ) util.db_root_arg(parser) util.part_arg(parser) parser.add_argument('--sparse', action='store_true', help="Don't zero fill all frames") parser.add_argument( '--roi', help="ROI design.json file defining which tiles are within the ROI.") parser.add_argument( '--emit_pudc_b_pullup', help="Emit an IBUF and PULLUP on the PUDC_B pin if unused", action='store_true') parser.add_argument('--debug', action='store_true', help="Print debug dump") parser.add_argument('fn_in', help='Input FPGA assembly (.fasm) file') parser.add_argument('fn_out', default='/dev/stdout', nargs='?', help='Output FPGA frame (.frm) file') args = parser.parse_args() fasm2frames(db_root=args.db_root, part=args.part, filename_in=args.fn_in, f_out=open(args.fn_out, 'w'), sparse=args.sparse, roi=args.roi, debug=args.debug, emit_pudc_b_pullup=args.emit_pudc_b_pullup)
def main(): """Tool to update the used resources by the fuzzers for each available part. Example: prjxray$ ./utils/update_resources.py artix7 --db-root database/artix7/ """ parser = argparse.ArgumentParser( description="Saves all resource information for a family.") parser.add_argument('family', help="Name of the device family.", choices=['artix7', 'kintex7', 'zynq7', 'spartan7']) db_root_arg(parser) args = parser.parse_args() env = os.environ.copy() cwd = os.path.dirname(os.path.abspath(__file__)) resource_path = os.path.join(os.getenv('XRAY_DIR'), 'settings', args.family) information = {} parts = get_parts(args.db_root) processed_parts = dict() for part in parts.keys(): # Skip parts which differ only in the speedgrade, as they have the same pins fields = part.split("-") common_part = fields[0] if common_part in processed_parts: information[part] = processed_parts[common_part] continue print("Find pins for {}".format(part)) env['XRAY_PART'] = part _, tmp_file = tempfile.mkstemp() # Asks with get_package_pins and different filters for pins with # specific properties. command = "env TMP_FILE={} {} -mode batch -source update_resources.tcl".format( tmp_file, env['XRAY_VIVADO']) result = subprocess.run(command.split(' '), check=True, env=env, cwd=cwd, stdout=subprocess.PIPE) with OpenSafeFile(tmp_file, "r") as fp: pins_json = json.load(fp) os.remove(tmp_file) clk_pins = pins_json["clk_pins"].split() data_pins = pins_json["data_pins"].split() pins = { 0: clk_pins[0], 1: data_pins[0], 2: data_pins[int(len(data_pins) / 2)], 3: data_pins[-1] } information[part] = {'pins': pins} processed_parts[common_part] = {'pins': pins} # Overwrites the <family>/resources.yaml file completly with new data set_part_resources(resource_path, information)
def main(): parser = argparse.ArgumentParser( description="Create timing worksheet for 7-series timing analysis.") util.db_root_arg(parser) util.part_arg(parser) parser.add_argument('--timing_json', required=True) parser.add_argument('--output_xlsx', required=True) parser.add_argument( '--wire_filter', help='List of wires that must be present in a net to be output') args = parser.parse_args() with open(args.timing_json) as f: timing = json.load(f) db = Database(args.db_root, args.part) nodes = {} for net in timing: for node in net['nodes']: nodes[node['name']] = node timing_lookup = TimingLookup(db, nodes) wb = Workbook() summary_ws = wb[wb.sheetnames[0]] summary_ws.title = 'Summary' summary_ws['A1'] = 'Name' cols = ['FAST_MAX', 'FAST_MIN', 'SLOW_MAX', 'SLOW_MIN'] cur_col = 'B' for col in cols: summary_ws['{}1'.format(cur_col)] = col cur_col = chr(ord(cur_col) + 1) summary_ws['{}1'.format(cur_col)] = 'Computed ' + col cur_col = chr(ord(cur_col) + 3) if args.wire_filter: wire_filter = build_wire_filter(args.wire_filter) else: wire_filter = lambda x: True summary_row = 2 timing = [net for net in timing if wire_filter(net)] for idx, net in enumerate(timing): if '<' in net['route']: print( "WARNING: Skipping net {} because it has complicated route description." .format(net['net'])) continue print('Process net {} ({} / {})'.format(net['net'], idx, len(timing))) for summary_cells in add_net(wb, net, timing_lookup): summary_ws['A{}'.format(summary_row)] = summary_cells['Name'] cur_col = 'B' for col in cols: truth_col = chr(ord(cur_col) + 0) computed_col = chr(ord(cur_col) + 1) error_col = chr(ord(cur_col) + 2) error_per_col = chr(ord(cur_col) + 3) summary_ws['{}{}'.format( truth_col, summary_row)] = '=' + summary_cells['truth'][col] summary_ws['{}{}'.format( computed_col, summary_row)] = '=' + summary_cells['computed'][col] summary_ws['{}{}'.format( error_col, summary_row)] = '={truth}{row}-{comp}{row}'.format( truth=truth_col, comp=computed_col, row=summary_row) summary_ws['{}{}'.format( error_per_col, summary_row)] = '={error}{row}/{truth}{row}'.format( error=error_col, truth=truth_col, row=summary_row) cur_col = chr(ord(cur_col) + 4) summary_row += 1 wb.save(filename=args.output_xlsx)
def main(): parser = argparse.ArgumentParser( description="Tool for checking which tiles have bits defined.") util.db_root_arg(parser) util.part_arg(parser) parser.add_argument('--show-only-missing', action='store_true') parser.add_argument('--verbose', action='store_true') args = parser.parse_args() db = Database(args.db_root, args.part) grid = db.grid() tile_types = {} for tile in grid.tiles(): gridinfo = grid.gridinfo_at_tilename(tile) if gridinfo.tile_type not in tile_types: tile_types[gridinfo.tile_type] = [] tile_types[gridinfo.tile_type].append((tile, gridinfo)) total_tile_count = 0 total_have_bits = 0 for tile_type, tiles in sorted(tile_types.items()): try: tile_type_info = db.get_tile_type(tile_type) # Skip empty tiles, as no base address is requied. if len(tile_type_info.get_pips()) == 0 and len( tile_type_info.get_sites()) == 0: continue except KeyError: pass # INT_INTERFACE tiles likely don't contain configuration? Remove this # if this ends up false. if 'INT_INTERFACE' in tile_type: continue if 'BRKH' in tile_type: continue have_bits = 0 for tile_name, gridinfo in tiles: total_tile_count += 1 if BlockType.CLB_IO_CLK in gridinfo.bits: have_bits += 1 total_have_bits += 1 if args.show_only_missing and have_bits == len(tiles): continue print( '{}: {}/{} ({:.2f} %)'.format( tile_type, have_bits, len(tiles), 100. * float(have_bits) / len(tiles))) if args.verbose: tiles_with_missing_bits = [] for tile_name, gridinfo in tiles: total_tile_count += 1 if BlockType.CLB_IO_CLK not in gridinfo.bits: tiles_with_missing_bits.append(tile_name) for tile_name in sorted(tiles_with_missing_bits): print('{} is missing CLB_IO_CLK'.format(tile_name)) print('') print( 'Summary: {}/{} ({:.2f} %)'.format( total_have_bits, total_tile_count, 100. * float(total_have_bits) / total_tile_count))
def main(): parser = argparse.ArgumentParser( description="Tests database against raw node list.") db_root_arg(parser) part_arg(parser) parser.add_argument('--raw_node_root', required=True) parser.add_argument('--error_nodes', default="error_nodes.json") parser.add_argument('--ignored_wires') args = parser.parse_args() processes = min(multiprocessing.cpu_count(), 10) print('{} Running {} processes'.format(datetime.datetime.now(), processes)) pool = multiprocessing.Pool(processes=processes) print('{} Reading raw data index'.format(datetime.datetime.now(), processes)) _, nodes = prjxray.lib.read_root_csv(args.raw_node_root) print('{} Reading raw_node_data'.format(datetime.datetime.now())) raw_node_data = [] with progressbar.ProgressBar(max_value=len(nodes)) as bar: for idx, node in enumerate( pool.imap_unordered( read_json5, nodes, chunksize=20, )): bar.update(idx) raw_node_data.append( (node['node'], tuple(wire['wire'] for wire in node['wires']))) bar.update(idx + 1) print('{} Creating connections'.format(datetime.datetime.now())) generated_nodes = make_connections(args.db_root, args.part) print('{} Verifying connections'.format(datetime.datetime.now())) error_nodes = [] prjxray.lib.verify_nodes(raw_node_data, generated_nodes, error_nodes) if len(error_nodes) > 0: if args.ignored_wires: with OpenSafeFile(args.ignored_wires, 'r') as f: ignored_wires = [l.strip() for l in f.readlines()] print('{} Found {} errors, writing errors to {}'.format( datetime.datetime.now(), len(error_nodes), args.error_nodes, )) with OpenSafeFile(args.error_nodes, 'w') as f: json.dump(error_nodes, f, indent=2) if not args.ignored_wires: sys.exit(1) if not prjxray.lib.check_errors(error_nodes, ignored_wires): print('{} Errors were not ignored via ignored_wires {}'.format( datetime.datetime.now(), args.ignored_wires, )) sys.exit(1) else: print('{} All errors were via ignored_wires {}'.format( datetime.datetime.now(), args.ignored_wires, ))
def main(): parser = argparse.ArgumentParser( description="Create timing worksheet for 7-series timing analysis.") util.db_root_arg(parser) util.part_arg(parser) parser.add_argument('--timing_json', required=True) parser.add_argument('--output_xlsx', required=True) args = parser.parse_args() with open(args.timing_json) as f: timing = json.load(f) db = Database(args.db_root, args.part) nodes = {} for net in timing: for node in net['nodes']: nodes[node['name']] = node timing_lookup = TimingLookup(db, nodes) wb = Workbook() summary_ws = wb.get_sheet_by_name(wb.sheetnames[0]) summary_ws.title = 'Summary' summary_ws['A1'] = 'Name' cols = ['FAST_MAX', 'FAST_MIN', 'SLOW_MAX', 'SLOW_MIN'] cur_col = 'B' for col in cols: summary_ws['{}1'.format(cur_col)] = col cur_col = chr(ord(cur_col) + 1) summary_ws['{}1'.format(cur_col)] = 'Computed ' + col cur_col = chr(ord(cur_col) + 3) summary_row = 2 for net in timing: if '<' in net['route']: print( "WARNING: Skipping net {} because it has complicated route description." .format(net['net'])) continue for summary_cells in add_net(wb, net, timing_lookup): summary_ws['A{}'.format(summary_row)] = summary_cells['Name'] cur_col = 'B' for col in cols: truth_col = chr(ord(cur_col) + 0) computed_col = chr(ord(cur_col) + 1) error_col = chr(ord(cur_col) + 2) error_per_col = chr(ord(cur_col) + 3) summary_ws['{}{}'.format( truth_col, summary_row)] = '=' + summary_cells['truth'][col] summary_ws['{}{}'.format( computed_col, summary_row)] = '=' + summary_cells['computed'][col] summary_ws['{}{}'.format( error_col, summary_row)] = '={truth}{row}-{comp}{row}'.format( truth=truth_col, comp=computed_col, row=summary_row) summary_ws['{}{}'.format( error_per_col, summary_row)] = '={error}{row}/{truth}{row}'.format( error=error_col, truth=truth_col, row=summary_row) cur_col = chr(ord(cur_col) + 4) summary_row += 1 wb.save(filename=args.output_xlsx)