def main(argv): args = parser.parse_args() reader = csv.DictReader(args.map) pin_map = {} for row in reader: pin_map[row['name']] = (int(row['x']), int(row['y']), int(row['z'])) locs = pcf.parse_pcf(args.pcf, pin_map) blif = eblif.parse_blif(args.blif) nl = len("Block name") for name in list(locs.keys()): if name in blif['inputs']['args']: net_type = 'in' elif name in blif['outputs']['args']: net_type = 'out' else: raise SyntaxError("""\ Unable to find net {} in blif {} Found inputs: {} Found outputs: {} """.format(name, args.blif.name, blif['inputs']['args'], blif['outputs']['args'])) nname = name if net_type == 'out': nname = 'out:' + name locs[nname] = locs[name] del locs[name] nl = max(nl, len(nname)) print("""\ #{name:<{nl}} x y z pcf_line #{s:-^{nl}} -- -- - ----""".format(name="Block Name", nl=nl, s=""), file=args.output) for name, ((x, y, z), pcf_line) in locs.items(): print("""\ {name:<{nl}} {x: 3} {y: 3} {z: 2} # {pcf_line}""".format(name=name, nl=nl, x=x, y=y, z=z, pcf_line=pcf_line), file=args.output)
def read_io_list_from_eblif(self, eblif_file): blif = eblif.parse_blif(eblif_file) self.inputs = set(blif['inputs']['args']) self.outputs = set(blif['outputs']['args']) # Build a net name map that maps products of an inout port split into # their formet name. # # For example, an inout port 'A' is split into 'A_$inp' and 'A_$outp' self.net_map = {} self.inout_nets = set() for net in itertools.chain(self.inputs, self.outputs): if net.endswith("_$inp") or net.endswith("_$out"): alias = net.rsplit("_", 1)[0] self.inout_nets.add(alias) self.net_map[net] = alias else: self.net_map[net] = net
def read_io_list_from_eblif(self, eblif_file): blif = eblif.parse_blif(eblif_file) self.inputs = set(blif['inputs']['args']) self.outputs = set(blif['outputs']['args']) # Build a net name map that maps products of an inout port split into # their formet name. # # For example, an inout port 'A' is split into 'A_$inp' and 'A_$out', # port B[2] into 'B_$inp[2]' and 'B_$out[2]'. self.net_map = {} self.inout_nets = set() for net in itertools.chain(self.inputs, self.outputs): match = INOUT_REGEX.match(net) if match: alias = match.group(1) + match.group(3) self.inout_nets.add(alias) self.net_map[net] = alias else: self.net_map[net] = net
def main(): parser = argparse.ArgumentParser( description='Convert a PCF file into a VPR io.place file.') parser.add_argument("--input", '-i', "-I", type=argparse.FileType('r'), default=sys.stdout, help='The input constraints place file') parser.add_argument("--output", '-o', "-O", type=argparse.FileType('w'), default=sys.stdout, help='The output constraints place file') parser.add_argument("--net", '-n', type=argparse.FileType('r'), required=True, help='top.net file') parser.add_argument('--vpr_grid_map', help='Map of canonical to VPR grid locations', required=True) parser.add_argument('--arch', help='Arch XML', required=True) parser.add_argument("--blif", '-b', type=argparse.FileType('r'), required=True, help='BLIF / eBLIF file') parser.add_argument('--roi', action='store_true', help='Using ROI') args = parser.parse_args() io_blocks = {} loc_in_use = set() for line in args.input: args.output.write(line) if line[0] == '#': continue block, x, y, z = line.strip().split()[0:4] io_blocks[block] = (int(x), int(y), int(z)) loc_in_use.add(io_blocks[block]) place_constraints = vpr_place_constraints.PlaceConstraints() place_constraints.load_loc_sites_from_net_file(args.net) grid_capacities = get_tile_capacities(args.arch) eblif_data = eblif.parse_blif(args.blif) vpr_grid = VprGrid(args.vpr_grid_map) blocks = {} block_locs = {} for block, loc in place_constraints.get_loc_sites(): vpr_loc = get_vpr_coords_from_site_name(vpr_grid, loc, grid_capacities) loc_in_use.add(vpr_loc) if block in io_blocks: assert io_blocks[block] == vpr_loc, (block, vpr_loc, io_blocks[block]) blocks[block] = vpr_loc block_locs[block] = loc place_constraints.constrain_block( block, vpr_loc, "Constraining block {}".format(block)) clock_placer = ClockPlacer(vpr_grid, io_blocks, eblif_data, args.roi) if clock_placer.has_clock_nets(): for block, loc in clock_placer.place_clocks(vpr_grid, loc_in_use, block_locs, blocks, grid_capacities): vpr_loc = get_vpr_coords_from_site_name(vpr_grid, loc, grid_capacities) place_constraints.constrain_block( block, vpr_loc, "Constraining clock block {}".format(block)) place_constraints.output_place_constraints(args.output)
def main(): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument( '--connection_database', required=True, help="Path to SQLite3 database for given FASM file part.") parser.add_argument( '--db_root', required=True, help="Path to prjxray database for given FASM file part.") parser.add_argument('--allow_orphan_sinks', action='store_true', help="Allow sinks to have no connection.") parser.add_argument( '--prune-unconnected-ports', action='store_true', help="Prune top-level I/O ports that are not connected to any logic.") parser.add_argument( '--iostandard_defs', help= "Specify a JSON file defining IOSTANDARD and DRIVE parameters for each IOB site" ) parser.add_argument('--fasm_file', help="FASM file to convert BELs and routes.", required=True) parser.add_argument('--bit_file', help="Bitstream file to convert to FASM.") parser.add_argument( '--bitread', help="Path to bitread executable, required if --bit_file is provided.") parser.add_argument( '--part', help="Name of part being targeted, required if --bit_file is provided." ) parser.add_argument('--top', default="top", help="Root level module name.") parser.add_argument('--pcf', help="Mapping of top-level pins to pads.") parser.add_argument('--route_file', help="VPR route output file.") parser.add_argument('--rr_graph', help="Real or virt xc7 graph") parser.add_argument('--eblif', help="EBLIF file used to generate design") parser.add_argument('verilog_file', help="Filename of output verilog file") parser.add_argument('tcl_file', help="Filename of output tcl script.") args = parser.parse_args() conn = sqlite3.connect('file:{}?mode=ro'.format(args.connection_database), uri=True) db = prjxray.db.Database(args.db_root) grid = db.grid() if args.bit_file: bit2fasm(args.db_root, db, grid, args.bit_file, args.fasm_file, args.bitread, args.part) tiles = {} maybe_get_wire = create_maybe_get_wire(conn) top = Module(db, grid, conn, name=args.top) if args.pcf: top.set_site_to_signal(load_io_sites(args.db_root, args.part, args.pcf)) if args.route_file: assert args.rr_graph net_map = load_net_list(conn, args.rr_graph, args.route_file) top.set_net_map(net_map) if args.eblif: with open(args.eblif) as f: parsed_eblif = eblif.parse_blif(f) top.add_to_cname_map(parsed_eblif) for fasm_line in fasm.parse_fasm_filename(args.fasm_file): if not fasm_line.set_feature: continue parts = fasm_line.set_feature.feature.split('.') tile = parts[0] if tile not in tiles: tiles[tile] = [] tiles[tile].append(fasm_line.set_feature) if len(parts) == 3: maybe_add_pip(top, maybe_get_wire, fasm_line.set_feature) if args.iostandard_defs: with open(args.iostandard_defs) as fp: defs = json.load(fp) top.set_iostandard_defs(defs) for tile, tile_features in tiles.items(): process_tile(top, tile, tile_features) top.make_routes(allow_orphan_sinks=args.allow_orphan_sinks) if args.prune_unconnected_ports: top.prune_unconnected_ports() with open(args.verilog_file, 'w') as f: for l in top.output_verilog(): print(l, file=f) with open(args.tcl_file, 'w') as f: for l in top.output_bel_locations(): print(l, file=f) for l in top.output_nets(): print(l, file=f)
def main(): parser = argparse.ArgumentParser( description='Creates placement constraints other than IOs') parser.add_argument("--input", '-i', "-I", type=argparse.FileType('r'), default=sys.stdin, help='The input constraints place file.') parser.add_argument("--output", '-o', "-O", type=argparse.FileType('w'), default=sys.stdout, help='The output constraints place file.') parser.add_argument("--map", type=argparse.FileType('r'), required=True, help="Clock pinmap CSV file") parser.add_argument("--blif", '-b', type=argparse.FileType('r'), required=True, help='BLIF / eBLIF file.') args = parser.parse_args() # Load clock map clock_to_gmux = {} for row in csv.DictReader(args.map): name = row["name"] src_loc = ( int(row["src.x"]), int(row["src.y"]), int(row["src.z"]), ) dst_loc = ( int(row["dst.x"]), int(row["dst.y"]), int(row["dst.z"]), ) clock_to_gmux[src_loc] = (dst_loc, name) # Load EBLIF eblif_data = eblif.parse_blif(args.blif) # Process the IO constraints file. Pass the constraints unchanged, store # them. io_constraints = {} for line in args.input: # Strip, skip comments line = line.strip() if line.startswith("#"): continue args.output.write(line + "\n") # Get block and its location block, x, y, z = line.split()[0:4] io_constraints[block] = ( int(x), int(y), int(z), ) # Analyze the BLIF netlist. Find clock inputs that go through CLOCK IOB to # GMUXes. clock_connections = [] IOB_CELL = ("CLOCK_CELL", "I_PAD", "O_CLK") BUF_CELL = ("GMUX_IP", "IP", "IZ") for inp_net in eblif_data["inputs"]["args"]: # This one is not constrained, skip it if inp_net not in io_constraints: continue # Search for a CLOCK cell connected to that net for cell in eblif_data["subckt"]: if cell["type"] == "subckt" and cell["args"][0] == IOB_CELL[0]: iob_cell = cell else: continue # Get the CLOCK to GMUX net for i in range(1, len(iob_cell["args"])): pin, net = iob_cell["args"][i].split("=") if pin == IOB_CELL[2]: con_net = net break else: continue # Search for a GMUX connected to the CLOCK cell for cell in eblif_data["subckt"]: if cell["type"] == "subckt" and cell["args"][0] == BUF_CELL[0]: buf_cell = cell else: continue # Get the output net of the GMUX for i in range(1, len(buf_cell["args"])): pin, net = buf_cell["args"][i].split("=") if pin == BUF_CELL[2]: clk_net = net break else: continue # Store data clock_connections.append( (inp_net, iob_cell, con_net, buf_cell, clk_net)) # Emit constraints for GCLK cells for inp_net, iob_cell, con_net, buf_cell, clk_net in clock_connections: src_loc = io_constraints[inp_net] if src_loc not in clock_to_gmux: print( "ERROR: No GMUX location fro input CLOCK pad for net '{}' at {}" .format(inp_net, src_loc)) continue dst_loc, name = clock_to_gmux[src_loc] # FIXME: Silently assuming here that VPR will name the GMUX block as # the GMUX cell in EBLIF. In order to fix that there will be a need # to read & parse the packed netlist file. line = "{} {} {} {} # {}\n".format(buf_cell["cname"][0], *dst_loc, name) args.output.write(line)
def main(): parser = argparse.ArgumentParser( description='Convert a PCF file into a VPR io.place file.') parser.add_argument("--input", '-i', "-I", type=argparse.FileType('r'), default=sys.stdout, help='The input constraints place file') parser.add_argument("--output", '-o', "-O", type=argparse.FileType('w'), default=sys.stdout, help='The output constraints place file') parser.add_argument("--net", '-n', type=argparse.FileType('r'), required=True, help='top.net file') parser.add_argument('--vpr_grid_map', help='Map of canonical to VPR grid locations', required=True) parser.add_argument('--arch', help='Arch XML', required=True) parser.add_argument('--db_root', required=True) parser.add_argument('--part', required=True) parser.add_argument("--blif", '-b', type=argparse.FileType('r'), required=True, help='BLIF / eBLIF file') parser.add_argument('--roi', action='store_true', help='Using ROI') parser.add_argument("--allow-bufg-logic-sources", action="store_true", help="When set allows BUFGs to be driven by logic") parser.add_argument('--graph_limit', help='Graph limit parameters') args = parser.parse_args() part = args.part device_families = { "xc7a": "artix7", "xc7k": "kintex7", "xc7z": "zynq7", } device_family = None for device in device_families: if part.startswith(device): device_family = device_families[device] break assert device_family db_root = os.path.join(args.db_root, device_family) db = prjxray.db.Database(db_root, args.part) canon_grid = db.grid() io_blocks = {} loc_in_use = set() for line in args.input: args.output.write(line) if line[0] == '#': continue block, x, y, z = line.strip().split()[0:4] io_blocks[block] = (int(x), int(y), int(z)) loc_in_use.add(io_blocks[block]) place_constraints = vpr_place_constraints.PlaceConstraints(args.net) place_constraints.load_loc_sites_from_net_file() grid_capacities = get_tile_capacities(args.arch) eblif_data = eblif.parse_blif(args.blif) vpr_grid = VprGrid(args.vpr_grid_map, args.graph_limit) # Constrain IO blocks and LOCed resources blocks = {} block_locs = {} for block, loc in place_constraints.get_loc_sites(): vpr_loc = get_vpr_coords_from_site_name(canon_grid, vpr_grid, loc, grid_capacities) loc_in_use.add(vpr_loc) if block in io_blocks: assert io_blocks[block] == vpr_loc, (block, vpr_loc, io_blocks[block]) blocks[block] = vpr_loc block_locs[block] = loc place_constraints.constrain_block( block, vpr_loc, "Constraining block {}".format(block)) # Constrain blocks directly connected to IO in the same x, y location constrain_special_ios(canon_grid, vpr_grid, io_blocks, eblif_data, blocks, place_constraints) # Constrain clock resources clock_placer = ClockPlacer(vpr_grid, io_blocks, eblif_data, args.roi, args.graph_limit, args.allow_bufg_logic_sources) if clock_placer.has_clock_nets(): for block, loc in clock_placer.place_clocks(canon_grid, vpr_grid, loc_in_use, block_locs, blocks, grid_capacities): vpr_loc = get_vpr_coords_from_site_name(canon_grid, vpr_grid, loc, grid_capacities) place_constraints.constrain_block( block, vpr_loc, "Constraining clock block {}".format(block)) """ Constrain IDELAYCTRL sites Prior to the invocation of this script, the IDELAYCTRL sites must have been replicated accordingly to the IDELAY specifications. There can be three different usage combinations of IDELAYCTRL and IDELAYs in a design: 1. IODELAYs and IDELAYCTRLs can be constrained to banks as needed, through an in-design LOC constraint. Manual replication of the constrained IDELAYCTRLs is necessary to provide a controller for each bank. 2. IODELAYs and a single IDELAYCTRL can be left entirely unconstrained, becoming a default group. The IDELAYCTRLis replicated depending on bank usage. Replication must have happened prior to this step 3. One or more IODELAY_GROUPs can be defined that contain IODELAYs and a single IDELAYCTRL each. These components can be otherwise unconstrained and the IDELAYCTRL for each group has to be replicated as needed (depending on bank usage). NOTE: IODELAY_GROUPS are not enabled at the moment. """ idelayctrl_cmts = set() idelay_instances = place_constraints.get_used_instances("IDELAYE2") for inst in idelay_instances: x, y, z = io_blocks[inst] idelayctrl_cmt = vpr_grid.get_vpr_loc_cmt()[(x, y)] idelayctrl_cmts.add(idelayctrl_cmt) idelayctrl_instances = place_constraints.get_used_instances("IDELAYCTRL") assert len(idelayctrl_cmts) == len( idelayctrl_instances ), "The number of IDELAYCTRL blocks and IO banks with IDELAYs used do not match." idelayctrl_sites = dict() for site_name, _, clk_region in vpr_grid.get_site_type_dict( )['IDELAYCTRL']: if clk_region in idelayctrl_cmts: idelayctrl_sites[clk_region] = site_name # Check and remove user constrained IDELAYCTRLs for idelayctrl_block in idelayctrl_instances: if idelayctrl_block in blocks.keys(): x, y, _ = blocks[idelayctrl_block] idelayctrl_cmt = vpr_grid.get_vpr_loc_cmt()[(x, y)] assert idelayctrl_cmt in idelayctrl_cmts idelayctrl_cmts.remove(idelayctrl_cmt) idelayctrl_instances.remove(idelayctrl_block) # TODO: Add possibility to bind IDELAY banks to IDELAYCTRL sites using # the IDELAY_GROUP attribute. for cmt, idelayctrl_block in zip(idelayctrl_cmts, idelayctrl_instances): x, y = vpr_grid.get_site_dict()[idelayctrl_sites[cmt]]['vpr_loc'] vpr_loc = (x, y, 0) place_constraints.constrain_block( idelayctrl_block, vpr_loc, "Constraining idelayctrl block {}".format(idelayctrl_block)) if len(idelayctrl_instances) > 0: print("Warning: IDELAY_GROUPS parameters are currently being ignored!", file=sys.stderr) place_constraints.output_place_constraints(args.output)
def read_io_list_from_eblif(self, eblif_file): blif = eblif.parse_blif(eblif_file) self.inputs = set(blif['inputs']['args']) self.outputs = set(blif['outputs']['args'])
def main(): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument( '--connection_database', required=True, help="Path to SQLite3 database for given FASM file part.") parser.add_argument( '--db_root', required=True, help="Path to prjxray database for given FASM file part.") parser.add_argument('--allow_orphan_sinks', action='store_true', help="Allow sinks to have no connection.") parser.add_argument( '--prune-unconnected-ports', action='store_true', help="Prune top-level I/O ports that are not connected to any logic.") parser.add_argument('--fasm_file', help="FASM file to convert BELs and routes.", required=True) parser.add_argument('--bit_file', help="Bitstream file to convert to FASM.") parser.add_argument( '--bitread', help="Path to bitread executable, required if --bit_file is provided.") parser.add_argument( '--part', help="Name of part being targeted, required if --bit_file is provided." ) parser.add_argument( '--allow-non-dedicated-clk-routes', action='store_true', help="Effectively sets CLOCK_DEDICATED_ROUTE to FALSE on all nets.") parser.add_argument('--iostandard', default=None, help="Default IOSTANDARD to use for IO buffers.") parser.add_argument('--drive', type=int, default=None, help="Default DRIVE to use for IO buffers.") parser.add_argument('--top', default="top", help="Root level module name.") parser.add_argument('--pcf', help="Mapping of top-level pins to pads.") parser.add_argument('--route_file', help="VPR route output file.") parser.add_argument('--rr_graph', help="Real or virt xc7 graph") parser.add_argument( '--vpr_capnp_schema_dir', help='Directory container VPR schema files', ) parser.add_argument('--eblif', help="EBLIF file used to generate design") parser.add_argument('verilog_file', help="Filename of output verilog file") parser.add_argument('xdc_file', help="Filename of output xdc constraints file.") args = parser.parse_args() conn = sqlite3.connect('file:{}?mode=ro'.format(args.connection_database), uri=True) db = prjxray.db.Database(args.db_root, args.part) grid = db.grid() if args.bit_file: bit2fasm(args.db_root, db, grid, args.bit_file, args.fasm_file, args.bitread, args.part) tiles = {} maybe_get_wire = create_maybe_get_wire(conn) top = Module(db, grid, conn, name=args.top) if args.eblif: with open(args.eblif) as f: parsed_eblif = eblif.parse_blif(f) if args.eblif or args.pcf: top.set_site_to_signal( load_io_sites(args.db_root, args.part, args.pcf, parsed_eblif)) if args.route_file: assert args.rr_graph assert args.vpr_capnp_schema_dir net_map = load_net_list(conn, args.vpr_capnp_schema_dir, args.rr_graph, args.route_file) top.set_net_map(net_map) if args.part: with open(os.path.join(args.db_root, args.part, 'part.json')) as f: part_data = json.load(f) top.set_io_banks(part_data['iobanks']) if args.eblif: top.add_to_cname_map(parsed_eblif) top.make_iosettings_map(parsed_eblif) top.set_default_iostandard(args.iostandard, args.drive) for fasm_line in fasm.parse_fasm_filename(args.fasm_file): if not fasm_line.set_feature: continue set_feature = process_set_feature(fasm_line.set_feature) parts = set_feature.feature.split('.') tile = parts[0] if tile not in tiles: tiles[tile] = [] tiles[tile].append(set_feature) if len(parts) == 3: maybe_add_pip(top, maybe_get_wire, set_feature) for tile, tile_features in tiles.items(): process_tile(top, tile, tile_features) # Check if the PS7 is present in the tilegrid. If so then insert it. pss_tile, ps7_site = get_ps7_site(db) if pss_tile is not None and ps7_site is not None: # First load the PS7 ports fname = os.path.join(args.db_root, "ps7_ports.json") with open(fname, "r") as fp: ps7_ports = json.load(fp) # Insert the PS7 insert_ps7(top, pss_tile, ps7_site, ps7_ports) top.make_routes(allow_orphan_sinks=args.allow_orphan_sinks) if args.prune_unconnected_ports: top.prune_unconnected_ports() if args.allow_non_dedicated_clk_routes: top.add_extra_tcl_line( "set_property CLOCK_DEDICATED_ROUTE FALSE [get_nets]") with open(args.verilog_file, 'w') as f: for line in top.output_verilog(): print(line, file=f) with open(args.xdc_file, 'w') as f: for line in top.output_bel_locations(): print(line, file=f) for line in top.output_nets(): print(line, file=f) for line in top.output_disabled_drcs(): print(line, file=f) for line in top.output_extra_tcl(): print(line, file=f)
def main(): # Parse arguments parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument("--sdc-in", type=str, required=True, help="Input SDC file") parser.add_argument("--pcf", type=str, required=True, help="Input PCF file") parser.add_argument("--sdc-out", type=str, required=True, help="Output SDC file") parser.add_argument("--eblif", type=str, default=None, help="Input EBLIF netlist file") parser.add_argument("--pin-map", type=str, default=None, help="Input CSV pin map file") args = parser.parse_args() # Read the input PCF file with open(args.pcf, "r") as fp: pcf_constraints = list(parse_simple_pcf(fp)) # Build a pad-to-net map pad_to_net = {} for constr in pcf_constraints: if isinstance(constr, PcfIoConstraint): assert constr.pad not in pad_to_net, \ "Multiple nets constrained to pin '{}'".format(constr.pad) pad_to_net[constr.pad] = constr # Read the input SDC file with open(args.sdc_in, "r") as fp: sdc = fp.read() # Read the input EBLIF file, extract all valid net names from it valid_nets = None if args.eblif is not None: with open(args.eblif, "r") as fp: eblif = parse_blif(fp) valid_nets = collect_eblif_nets(eblif) # Reat the input pinmap CSV file, extract valid pin names from it valid_pins = None if args.pin_map is not None: with open(args.pin_map, "r") as fp: reader = csv.DictReader(fp) csv_data = list(reader) valid_pins = [line["mapped_pin"] for line in csv_data] valid_pins = set(expand_indices(valid_pins)) # Process the SDC def sub_cb(match): return process_get_ports(match, pad_to_net, valid_pins, valid_nets) sdc_lines = sdc.splitlines() for i in range(len(sdc_lines)): if not sdc_lines[i].strip().startswith("#"): sdc_lines[i] = re.sub(r"\[\s*get_ports\s+(?P<arg>.*)\]", sub_cb, sdc_lines[i]) # Write the output SDC file sdc = "\n".join(sdc_lines) + "\n" with open(args.sdc_out, "w") as fp: fp.write(sdc)
def main(): """ Main """ # Parse arguments parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument( "--json", default=None, type=str, help="Read IOMUX configuration from the given JSON file") parser.add_argument("--eblif", default=None, type=str, help="EBLIF netlist file of a design") parser.add_argument("--pcf", default=None, type=str, help="PCF constraints file for a design") parser.add_argument("--map", "-m", "-M", type=argparse.FileType('r'), required=True, help='Pin map CSV file') parser.add_argument("--output-format", default=None, type=str, help='Output format of IOMUX commands (openocd/jlink)') args = parser.parse_args() # Read the requested configurtion from a JSON file if args.json is not None: if args.pcf is not None or args.eblif is not None: print("Use either '--json' or '--pcf' + '--eblif' options!") exit(-1) with open(args.json, "r") as fp: config = json.load(fp) # Generate the config according to the EBLIF netlist and PCF constraints. else: if args.json is not None or (args.eblif is None or args.pcf is None): print("Use either '--json' or '--pcf' + '--eblif' options!") exit(-1) pad_map = {} pad_alias_map = {} for pin_map_entry in csv.DictReader(args.map): if pin_map_entry['type'] not in IOB_TYPES: continue name = pin_map_entry['name'] alias = "" if 'alias' in pin_map_entry: alias = pin_map_entry['alias'] pad_alias_map[alias] = name pad_map[name] = alias else: pad_map[name] = name # Read and parse PCF with open(args.pcf, "r") as fp: pcf = list(parse_simple_pcf(fp)) # Read and parse BLIF/EBLIF with open(args.eblif, "r") as fp: eblif = parse_blif(fp) # Build the config config = {"pads": {}} eblif_inputs = eblif["inputs"]["args"] eblif_outputs = eblif["outputs"]["args"] for constraint in pcf: pad_name = constraint.pad if pad_name not in pad_map and pad_name not in pad_alias_map: print("PCF constraint '{}' from line {} constraints pad {} " "which is not in available pad map:\n{}".format( constraint.line_str, constraint.line_num, pad_name, '\n'.join(sorted(pad_map.keys()))), file=sys.stderr) sys.exit(1) # get pad alias to get IO pad count pad_alias = "" if pad_name in pad_map: pad_alias = pad_map[pad_name] # Alias is specified in pcf file so assign it to corresponding pad name if pad_name in pad_alias_map: pad_alias = pad_name pad = None match = re.match(r"^IO_([0-9]+)$", pad_alias) if match is not None: pad = int(match.group(1)) # Pad not found or out of range if pad is None or pad < 0 or pad >= 46: continue # Detect inouts: is_inout_in = constraint.net + '_$inp' in eblif_inputs is_inout_out = constraint.net + '_$out' in eblif_outputs if is_inout_in and is_inout_out: pad_config = { "ctrl_sel": "fabric", "mode": "inout", } elif constraint.net in eblif_inputs: pad_config = { "ctrl_sel": "fabric", "mode": "input", } # Configure as output elif constraint.net in eblif_outputs: pad_config = { "ctrl_sel": "fabric", "mode": "output", } else: assert False, (constraint.net, constraint.pad) config["pads"][str(pad)] = pad_config # Convert the config to IOMUX register content iomux_regs = generate_iomux_register_content(config) if args.output_format == "openocd": # Output openOCD process for adr in sorted(iomux_regs.keys()): print(" mww 0x{:08x} 0x{:08x}".format(adr, iomux_regs[adr])) elif args.output_format == "jlink": # Output JLink commands for adr in sorted(iomux_regs.keys()): print("w4 0x{:08x} 0x{:08x}".format(adr, iomux_regs[adr])) elif args.output_format == "binary": # Output binary file: <REGADDR 4B><REGVAL 4B>... for adr in sorted(iomux_regs.keys()): # first the address addr_bytes = int(adr).to_bytes(4, byteorder='little') # output the address as raw bytes, bypass the print(), LE, 4B sys.stdout.buffer.write(addr_bytes) # second the value val_bytes = int(iomux_regs[adr]).to_bytes(4, byteorder='little') # output the value as raw bytes, bypass the print(), LE, 4B sys.stdout.buffer.write(val_bytes) else: print("Use either 'openocd' or 'jlink' or 'binary' output format!") exit(-1)