def main(): import argparse parser = argparse.ArgumentParser( description= 'Outputs a Vivavo highlight_objects command from a FASM file.') util.db_root_arg(parser) util.part_arg(parser) parser.add_argument('fn_in', help='Input FPGA assembly (.fasm) file') args = parser.parse_args() database = db.Database(args.db_root, args.part) grid = database.grid() def inner(): for line in fasm.parse_fasm_filename(args.fn_in): if not line.set_feature: continue parts = line.set_feature.feature.split('.') tile = parts[0] gridinfo = grid.gridinfo_at_tilename(tile) tile_type = database.get_tile_type(gridinfo.tile_type) for pip in tile_type.pips: if pip.net_from == parts[2] and pip.net_to == parts[1]: yield '{}/{}'.format(tile, pip.name) print( 'highlight_objects [concat {}]'.format( ' '.join('[get_pips {}]'.format(pip) for pip in inner())))
def main(): parser = argparse.ArgumentParser( description="Runs a sanity check on a prjxray database.") util.db_root_arg(parser) util.part_arg(parser) args = parser.parse_args() quick_test(args.db_root, args.part)
def main(): import argparse parser = argparse.ArgumentParser( description="Parse a db repository, checking for consistency") util.db_root_arg(parser) util.part_arg(parser) parser.add_argument('--verbose', action='store_true', help='') args = parser.parse_args() run(args.db_root, args.part, verbose=args.verbose)
def main(): parser = argparse.ArgumentParser( description= 'Convert FPGA configuration description ("FPGA assembly") into binary frame equivalent' ) util.db_root_arg(parser) util.part_arg(parser) parser.add_argument('--part_file', required=True, help="Part YAML file.") parser.add_argument( '--sparse', action='store_true', help="Don't zero fill all frames") parser.add_argument( '--roi', help="ROI design.json file defining which tiles are within the ROI.") parser.add_argument( '--emit_pudc_b_pullup', help="Emit an IBUF and PULLUP on the PUDC_B pin if unused", action='store_true') parser.add_argument( '--debug', action='store_true', help="Print debug dump") parser.add_argument( '--frm2bit', default="xc7frames2bit", help="xc7frames2bit tool.") parser.add_argument('--fn_in', help='Input FPGA assembly (.fasm) file') parser.add_argument('--bit_out', help='Output FPGA bitstream (.bit) file') parser.add_argument( '--frm_out', default=None, help='Output FPGA frame (.frm) file') args = parser.parse_args() frm_out = args.frm_out if frm_out is None: _, frm_out = tempfile.mkstemp() f_out = open(frm_out, 'w') fasm2frames( db_root=args.db_root, part=args.part, filename_in=args.fn_in, f_out=f_out, sparse=args.sparse, roi=args.roi, debug=args.debug, emit_pudc_b_pullup=args.emit_pudc_b_pullup) f_out.close() result = subprocess.check_output( "{} --frm_file {} --output_file {} --part_name {} --part_file {}". format(args.frm2bit, frm_out, args.bit_out, args.part, args.part_file), shell=True)
def main(): import argparse parser = argparse.ArgumentParser( description="Decode bits within a tile's address space") util.db_root_arg(parser) util.part_arg(parser) parser.add_argument('--verbose', action='store_true', help='') parser.add_argument( '-z', action='store_true', help="do not print a 'seg' header for empty segments") parser.add_argument( '-b', action='store_true', help='print bits outside of known segments') parser.add_argument( '-d', action='store_true', help='decode known segment bits and write them as tags') parser.add_argument( '-D', action='store_true', help='decode known segment bits and omit them in the output') parser.add_argument( '--bit-only', action='store_true', help='only decode real bitstream directives') parser.add_argument('bits_file', help='') parser.add_argument( 'segnames', nargs='*', help='List of tile or tile:block to print') args = parser.parse_args() run( args.db_root, args.part, args.bits_file, args.segnames, args.z, args.b, args.d, args.D, bit_only=args.bit_only, verbose=args.verbose)
def main(): parser = argparse.ArgumentParser( description= 'Convert FPGA configuration description ("FPGA assembly") into binary frame equivalent' ) util.db_root_arg(parser) util.part_arg(parser) parser.add_argument('--sparse', action='store_true', help="Don't zero fill all frames") parser.add_argument( '--roi', help="ROI design.json file defining which tiles are within the ROI.") parser.add_argument( '--emit_pudc_b_pullup', help="Emit an IBUF and PULLUP on the PUDC_B pin if unused", action='store_true') parser.add_argument('--debug', action='store_true', help="Print debug dump") parser.add_argument('fn_in', help='Input FPGA assembly (.fasm) file') parser.add_argument('fn_out', default='/dev/stdout', nargs='?', help='Output FPGA frame (.frm) file') args = parser.parse_args() fasm2frames(db_root=args.db_root, part=args.part, filename_in=args.fn_in, f_out=open(args.fn_out, 'w'), sparse=args.sparse, roi=args.roi, debug=args.debug, emit_pudc_b_pullup=args.emit_pudc_b_pullup)
def main(): parser = argparse.ArgumentParser( description="Create timing worksheet for 7-series timing analysis.") util.db_root_arg(parser) util.part_arg(parser) parser.add_argument('--timing_json', required=True) parser.add_argument('--output_xlsx', required=True) parser.add_argument( '--wire_filter', help='List of wires that must be present in a net to be output') args = parser.parse_args() with open(args.timing_json) as f: timing = json.load(f) db = Database(args.db_root, args.part) nodes = {} for net in timing: for node in net['nodes']: nodes[node['name']] = node timing_lookup = TimingLookup(db, nodes) wb = Workbook() summary_ws = wb[wb.sheetnames[0]] summary_ws.title = 'Summary' summary_ws['A1'] = 'Name' cols = ['FAST_MAX', 'FAST_MIN', 'SLOW_MAX', 'SLOW_MIN'] cur_col = 'B' for col in cols: summary_ws['{}1'.format(cur_col)] = col cur_col = chr(ord(cur_col) + 1) summary_ws['{}1'.format(cur_col)] = 'Computed ' + col cur_col = chr(ord(cur_col) + 3) if args.wire_filter: wire_filter = build_wire_filter(args.wire_filter) else: wire_filter = lambda x: True summary_row = 2 timing = [net for net in timing if wire_filter(net)] for idx, net in enumerate(timing): if '<' in net['route']: print( "WARNING: Skipping net {} because it has complicated route description." .format(net['net'])) continue print('Process net {} ({} / {})'.format(net['net'], idx, len(timing))) for summary_cells in add_net(wb, net, timing_lookup): summary_ws['A{}'.format(summary_row)] = summary_cells['Name'] cur_col = 'B' for col in cols: truth_col = chr(ord(cur_col) + 0) computed_col = chr(ord(cur_col) + 1) error_col = chr(ord(cur_col) + 2) error_per_col = chr(ord(cur_col) + 3) summary_ws['{}{}'.format( truth_col, summary_row)] = '=' + summary_cells['truth'][col] summary_ws['{}{}'.format( computed_col, summary_row)] = '=' + summary_cells['computed'][col] summary_ws['{}{}'.format( error_col, summary_row)] = '={truth}{row}-{comp}{row}'.format( truth=truth_col, comp=computed_col, row=summary_row) summary_ws['{}{}'.format( error_per_col, summary_row)] = '={error}{row}/{truth}{row}'.format( error=error_col, truth=truth_col, row=summary_row) cur_col = chr(ord(cur_col) + 4) summary_row += 1 wb.save(filename=args.output_xlsx)
def main(): parser = argparse.ArgumentParser( description="Tool for checking which tiles have bits defined.") util.db_root_arg(parser) util.part_arg(parser) parser.add_argument('--show-only-missing', action='store_true') parser.add_argument('--verbose', action='store_true') args = parser.parse_args() db = Database(args.db_root, args.part) grid = db.grid() tile_types = {} for tile in grid.tiles(): gridinfo = grid.gridinfo_at_tilename(tile) if gridinfo.tile_type not in tile_types: tile_types[gridinfo.tile_type] = [] tile_types[gridinfo.tile_type].append((tile, gridinfo)) total_tile_count = 0 total_have_bits = 0 for tile_type, tiles in sorted(tile_types.items()): try: tile_type_info = db.get_tile_type(tile_type) # Skip empty tiles, as no base address is requied. if len(tile_type_info.get_pips()) == 0 and len( tile_type_info.get_sites()) == 0: continue except KeyError: pass # INT_INTERFACE tiles likely don't contain configuration? Remove this # if this ends up false. if 'INT_INTERFACE' in tile_type: continue if 'BRKH' in tile_type: continue have_bits = 0 for tile_name, gridinfo in tiles: total_tile_count += 1 if BlockType.CLB_IO_CLK in gridinfo.bits: have_bits += 1 total_have_bits += 1 if args.show_only_missing and have_bits == len(tiles): continue print( '{}: {}/{} ({:.2f} %)'.format( tile_type, have_bits, len(tiles), 100. * float(have_bits) / len(tiles))) if args.verbose: tiles_with_missing_bits = [] for tile_name, gridinfo in tiles: total_tile_count += 1 if BlockType.CLB_IO_CLK not in gridinfo.bits: tiles_with_missing_bits.append(tile_name) for tile_name in sorted(tiles_with_missing_bits): print('{} is missing CLB_IO_CLK'.format(tile_name)) print('') print( 'Summary: {}/{} ({:.2f} %)'.format( total_have_bits, total_tile_count, 100. * float(total_have_bits) / total_tile_count))
def main(): parser = argparse.ArgumentParser( description="Tests database against raw node list.") db_root_arg(parser) part_arg(parser) parser.add_argument('--raw_node_root', required=True) parser.add_argument('--error_nodes', default="error_nodes.json") parser.add_argument('--ignored_wires') args = parser.parse_args() processes = min(multiprocessing.cpu_count(), 10) print('{} Running {} processes'.format(datetime.datetime.now(), processes)) pool = multiprocessing.Pool(processes=processes) print('{} Reading raw data index'.format(datetime.datetime.now(), processes)) _, nodes = prjxray.lib.read_root_csv(args.raw_node_root) print('{} Reading raw_node_data'.format(datetime.datetime.now())) raw_node_data = [] with progressbar.ProgressBar(max_value=len(nodes)) as bar: for idx, node in enumerate( pool.imap_unordered( read_json5, nodes, chunksize=20, )): bar.update(idx) raw_node_data.append( (node['node'], tuple(wire['wire'] for wire in node['wires']))) bar.update(idx + 1) print('{} Creating connections'.format(datetime.datetime.now())) generated_nodes = make_connections(args.db_root, args.part) print('{} Verifying connections'.format(datetime.datetime.now())) error_nodes = [] prjxray.lib.verify_nodes(raw_node_data, generated_nodes, error_nodes) if len(error_nodes) > 0: if args.ignored_wires: with OpenSafeFile(args.ignored_wires, 'r') as f: ignored_wires = [l.strip() for l in f.readlines()] print('{} Found {} errors, writing errors to {}'.format( datetime.datetime.now(), len(error_nodes), args.error_nodes, )) with OpenSafeFile(args.error_nodes, 'w') as f: json.dump(error_nodes, f, indent=2) if not args.ignored_wires: sys.exit(1) if not prjxray.lib.check_errors(error_nodes, ignored_wires): print('{} Errors were not ignored via ignored_wires {}'.format( datetime.datetime.now(), args.ignored_wires, )) sys.exit(1) else: print('{} All errors were via ignored_wires {}'.format( datetime.datetime.now(), args.ignored_wires, ))
def main(): parser = argparse.ArgumentParser( description="Create timing worksheet for 7-series timing analysis.") util.db_root_arg(parser) util.part_arg(parser) parser.add_argument('--timing_json', required=True) parser.add_argument('--output_xlsx', required=True) args = parser.parse_args() with open(args.timing_json) as f: timing = json.load(f) db = Database(args.db_root, args.part) nodes = {} for net in timing: for node in net['nodes']: nodes[node['name']] = node timing_lookup = TimingLookup(db, nodes) wb = Workbook() summary_ws = wb.get_sheet_by_name(wb.sheetnames[0]) summary_ws.title = 'Summary' summary_ws['A1'] = 'Name' cols = ['FAST_MAX', 'FAST_MIN', 'SLOW_MAX', 'SLOW_MIN'] cur_col = 'B' for col in cols: summary_ws['{}1'.format(cur_col)] = col cur_col = chr(ord(cur_col) + 1) summary_ws['{}1'.format(cur_col)] = 'Computed ' + col cur_col = chr(ord(cur_col) + 3) summary_row = 2 for net in timing: if '<' in net['route']: print( "WARNING: Skipping net {} because it has complicated route description." .format(net['net'])) continue for summary_cells in add_net(wb, net, timing_lookup): summary_ws['A{}'.format(summary_row)] = summary_cells['Name'] cur_col = 'B' for col in cols: truth_col = chr(ord(cur_col) + 0) computed_col = chr(ord(cur_col) + 1) error_col = chr(ord(cur_col) + 2) error_per_col = chr(ord(cur_col) + 3) summary_ws['{}{}'.format( truth_col, summary_row)] = '=' + summary_cells['truth'][col] summary_ws['{}{}'.format( computed_col, summary_row)] = '=' + summary_cells['computed'][col] summary_ws['{}{}'.format( error_col, summary_row)] = '={truth}{row}-{comp}{row}'.format( truth=truth_col, comp=computed_col, row=summary_row) summary_ws['{}{}'.format( error_per_col, summary_row)] = '={error}{row}/{truth}{row}'.format( error=error_col, truth=truth_col, row=summary_row) cur_col = chr(ord(cur_col) + 4) summary_row += 1 wb.save(filename=args.output_xlsx)