def load_pcf_constraints(pcf): """ Loads constraints for the repacker from a parsed PCF file """ logging.debug(" Repacking constraints:") constraints = [] for pcf_constr in parse_simple_pcf(pcf): if (type(pcf_constr).__name__ == 'PcfClkConstraint'): # There are only "clb" and "io" tile types # We select the same global clock for # each tile where net is used constraint = RepackingConstraint(net=pcf_constr.net, block_type='clb', port_spec=pcf_constr.pin) constraints.append(constraint) logging.debug(" {}: {}.{}[{}]".format(constraint.net, constraint.block_type, constraint.port, constraint.pin)) constraint = RepackingConstraint(net=pcf_constr.net, block_type='io', port_spec=pcf_constr.pin) constraints.append(constraint) logging.debug(" {}: {}.{}[{}]".format(constraint.net, constraint.block_type, constraint.port, constraint.pin)) return constraints
def load_io_sites(db_root, part, pcf): """ Load map of sites to signal names from pcf and part pin definitions. Args: db_root (str): Path to database root folder part (str): Part name being targeted. pcf (str): Full path to pcf file for this bitstream. Returns: Dict from pad site name to net name. """ pin_to_signal = {} with open(pcf) as f: for pcf_constraint in parse_simple_pcf(f): assert pcf_constraint.pad not in pin_to_signal, pcf_constraint.pad pin_to_signal[pcf_constraint.pad] = pcf_constraint.net site_to_signal = {} with open(os.path.join(db_root, '{}_package_pins.csv'.format(part))) as f: for d in csv.DictReader(f): if d['pin'] in pin_to_signal: site_to_signal[d['site']] = pin_to_signal[d['pin']] del pin_to_signal[d['pin']] assert len(pin_to_signal) == 0, pin_to_signal.keys() return site_to_signal
def main(): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('--pcf', required=True, help="Input PCF file") parser.add_argument('--xdc', required=True, help="Output PCF file") parser.add_argument('--iostandard', required=True, help="IOSTANDARD to use") args = parser.parse_args() with open(args.pcf) as f, open(args.xdc, 'w') as f_out: for pcf_constraint in parse_simple_pcf(f): print( 'set_property -dict "PACKAGE_PIN {pin} IOSTANDARD {iostandard}" [get_ports {port}]' .format(pin=pcf_constraint.pad, port=pcf_constraint.net, iostandard=args.iostandard), file=f_out)
def load_io_sites(db_root, part, pcf, eblif): """ Load map of sites to signal names from pcf or eblif and part pin definitions. Args: db_root (str): Path to database root folder part (str): Part name being targeted. pcf (str): Full path to pcf file for this bitstream. eblif (str): Parsed contents of EBLIF file. Returns: Dict from pad site name to net name. """ pin_to_signal = {} if pcf: with open(pcf) as f: for pcf_constraint in parse_simple_pcf(f): assert pcf_constraint.pad not in pin_to_signal, pcf_constraint.pad pin_to_signal[pcf_constraint.pad] = pcf_constraint.net if eblif: io_place = vpr_io_place.IoPlace() io_place.read_io_loc_pairs(eblif) for net, pad in io_place.net_to_pad: if pad not in pin_to_signal: pin_to_signal[pad] = net elif net != pin_to_signal[pad]: print("""ERROR: Conflicting pin constraints for pad {}:\n{}\n{}""".format( pad, net, pin_to_signal[pad]), file=sys.stderr) sys.exit(1) site_to_signal = {} with open(os.path.join(db_root, part, 'package_pins.csv')) as f: for d in csv.DictReader(f): if d['pin'] in pin_to_signal: site_to_signal[d['site']] = pin_to_signal[d['pin']] del pin_to_signal[d['pin']] assert len(pin_to_signal) == 0, pin_to_signal.keys() return site_to_signal
def main(): parser = argparse.ArgumentParser( description='Convert a PCF file into a VPR io.place file.') parser.add_argument("--pcf", '-p', "-P", type=argparse.FileType('r'), required=True, help='PCF input file') parser.add_argument("--blif", '-b', type=argparse.FileType('r'), required=True, help='BLIF / eBLIF file') parser.add_argument("--map", '-m', "-M", type=argparse.FileType('r'), required=True, help='Pin map CSV file') parser.add_argument("--output", '-o', "-O", type=argparse.FileType('w'), default=sys.stdout, help='The output io.place file') args = parser.parse_args() io_place = vpr_io_place.IoPlace() io_place.read_io_list_from_eblif(args.blif) # Map of pad names to VPR locations. pad_map = {} for pin_map_entry in csv.DictReader(args.map): pad_map[pin_map_entry['name']] = ( int(pin_map_entry['x']), int(pin_map_entry['y']), int(pin_map_entry['z']), ) for pcf_constraint in parse_simple_pcf(args.pcf): if not io_place.is_net(pcf_constraint.net): print( 'PCF constraint "{}" from line {} constraints net {} which is not in available netlist:\n{}' .format(pcf_constraint.line_str, pcf_constraint.line_num, pcf_constraint.net, '\n'.join(io_place.get_nets())), file=sys.stderr) sys.exit(1) if pcf_constraint.pad not in pad_map: print( 'PCF constraint "{}" from line {} constraints pad {} which is not in available pad map:\n{}' .format(pcf_constraint.line_str, pcf_constraint.line_num, pcf_constraint.pad, '\n'.join(sorted(pad_map.keys()))), file=sys.stderr) sys.exit(1) io_place.constrain_net(net_name=pcf_constraint.net, loc=pad_map[pcf_constraint.pad], comment=pcf_constraint.line_str) io_place.output_io_place(args.output)
def main(): parser = argparse.ArgumentParser( description='Convert a PCF file into a VPR io.place file.' ) parser.add_argument( "--pcf", '-p', "-P", type=argparse.FileType('r'), required=False, help='PCF input file' ) parser.add_argument( "--blif", '-b', type=argparse.FileType('r'), required=True, help='BLIF / eBLIF file' ) parser.add_argument( "--map", '-m', "-M", type=argparse.FileType('r'), required=True, help='Pin map CSV file' ) parser.add_argument( "--output", '-o', "-O", type=argparse.FileType('w'), default=sys.stdout, help='The output io.place file' ) parser.add_argument( "--iostandard_defs", help='(optional) Output IOSTANDARD def file' ) parser.add_argument( "--iostandard", default="LVCMOS33", help='Default IOSTANDARD to use for pins', ) parser.add_argument( "--drive", type=int, default=12, help='Default drive to use for pins', ) parser.add_argument( "--net", '-n', type=argparse.FileType('r'), required=True, help='top.net file' ) args = parser.parse_args() io_place = vpr_io_place.IoPlace() io_place.read_io_list_from_eblif(args.blif) io_place.load_block_names_from_net_file(args.net) # Map of pad names to VPR locations. pad_map = {} for pin_map_entry in csv.DictReader(args.map): pad_map[pin_map_entry['name']] = ( ( int(pin_map_entry['x']), int(pin_map_entry['y']), int(pin_map_entry['z']), ), pin_map_entry['is_output'], pin_map_entry['iob'], pin_map_entry['real_io_assoc'], ) iostandard_defs = {} # Load iostandard constraints. This is a temporary workaround that allows # to pass them into fasm2bels. As soon as there is support for XDC this # will not be needed anymore. # If there is a JSON file with the same name as the PCF file then it is # loaded and used as iostandard constraint source NOT for the design but # to be used in fasm2bels. iostandard_constraints = {} if args.pcf: fname = args.pcf.name.replace(".pcf", ".json") if os.path.isfile(fname): with open(fname, "r") as fp: iostandard_constraints = json.load(fp) net_to_pad = io_place.net_to_pad if args.pcf: pcf_constraints = parse_simple_pcf(args.pcf) net_to_pad |= set( (constr.net, constr.pad) for constr in pcf_constraints ) # Check for conflicting pad constraints net_to_pad_map = dict() for (net, pad) in net_to_pad: if net not in net_to_pad_map: net_to_pad_map[net] = pad elif pad != net_to_pad_map[net]: print( """ERROR: Conflicting pad constraints for net {}:\n{}\n{}""".format( net, pad, net_to_pad_map[net] ), file=sys.stderr ) sys.exit(1) # Constrain nets for net, pad in net_to_pad: if not io_place.is_net(net): print( """ERROR: Constrained net {} is not in available netlist:\n{}""".format( net, '\n'.join(io_place.get_nets()) ), file=sys.stderr ) sys.exit(1) if pad not in pad_map: print( """ERROR: Constrained pad {} is not in available pad map:\n{}""".format( pad, '\n'.join(sorted(pad_map.keys())) ), file=sys.stderr ) sys.exit(1) loc, is_output, iob, real_io_assoc = pad_map[pad] io_place.constrain_net( net_name=net, loc=loc, comment="set_property LOC {} [get_ports {{{}}}]".format(pad, net) ) if real_io_assoc == 'True': if pad in iostandard_constraints: iostandard_defs[iob] = iostandard_constraints[pad] else: if is_output: iostandard_defs[iob] = { 'DRIVE': args.drive, 'IOSTANDARD': args.iostandard, } else: iostandard_defs[iob] = { 'IOSTANDARD': args.iostandard, } io_place.output_io_place(args.output) # Write iostandard definitions if args.iostandard_defs: with open(args.iostandard_defs, 'w') as f: json.dump(iostandard_defs, f, indent=2)
def main(): parser = argparse.ArgumentParser( description='Convert a PCF file into a VPR io.place file.') parser.add_argument("--pcf", '-p', "-P", type=argparse.FileType('r'), required=True, help='PCF input file') parser.add_argument("--blif", '-b', type=argparse.FileType('r'), required=True, help='BLIF / eBLIF file') parser.add_argument("--map", '-m', "-M", type=argparse.FileType('r'), required=True, help='Pin map CSV file') parser.add_argument("--output", '-o', "-O", type=argparse.FileType('w'), default=sys.stdout, help='The output io.place file') parser.add_argument("--iostandard_defs", help='(optional) Output IOSTANDARD def file') parser.add_argument( "--iostandard", default="LVCMOS33", help='IOSTANDARD to use for pins', ) parser.add_argument( "--drive", type=int, default=12, help='Drive to use for pins', ) parser.add_argument("--net", '-n', type=argparse.FileType('r'), required=True, help='top.net file') args = parser.parse_args() io_place = vpr_io_place.IoPlace() io_place.read_io_list_from_eblif(args.blif) io_place.load_block_names_from_net_file(args.net) # Map of pad names to VPR locations. pad_map = {} for pin_map_entry in csv.DictReader(args.map): pad_map[pin_map_entry['name']] = ( ( int(pin_map_entry['x']), int(pin_map_entry['y']), int(pin_map_entry['z']), ), pin_map_entry['is_output'], pin_map_entry['iob'], ) iostandard_defs = {} for pcf_constraint in parse_simple_pcf(args.pcf): if not io_place.is_net(pcf_constraint.net): print( 'PCF constraint "{}" from line {} constraints net {} which is not in available netlist:\n{}' .format(pcf_constraint.line_str, pcf_constraint.line_num, pcf_constraint.net, '\n'.join(io_place.get_nets())), file=sys.stderr) sys.exit(1) if pcf_constraint.pad not in pad_map: print( 'PCF constraint "{}" from line {} constraints pad {} which is not in available pad map:\n{}' .format(pcf_constraint.line_str, pcf_constraint.line_num, pcf_constraint.pad, '\n'.join(sorted(pad_map.keys()))), file=sys.stderr) sys.exit(1) loc, is_output, iob = pad_map[pcf_constraint.pad] io_place.constrain_net(net_name=pcf_constraint.net, loc=loc, comment=pcf_constraint.line_str) if is_output: iostandard_defs[iob] = { 'DRIVE': args.drive, 'IOSTANDARD': args.iostandard, } else: iostandard_defs[iob] = { 'IOSTANDARD': args.iostandard, } io_place.output_io_place(args.output) if args.iostandard_defs: with open(args.iostandard_defs, 'w') as f: json.dump(iostandard_defs, f, indent=2)
def gen_io_def(args): ''' Generate io.place file from pcf file ''' io_place = vpr_io_place.IoPlace() io_place.read_io_list_from_eblif(args.blif) io_place.load_block_names_from_net_file(args.net) # Load all the necessary data from the pinmap_xml io_cells, port_map = read_pinmapfile_data(args.pinmap_xml) # Map of pad names to VPR locations. pad_map = defaultdict(lambda: dict()) with open(args.csv_file, mode='r') as csv_fp: reader = csv.DictReader(csv_fp) for line in reader: port_name_list = vec_to_scalar(line['port_name']) pin_name = vec_to_scalar(line['mapped_pin']) gpio_type = line['GPIO_type'].strip() if len(port_name_list) != len(pin_name): print( 'CSV port name "{}" length does not match with mapped pin name "{}" length' .format(line['port_name'], line['mapped_pin']), file=sys.stderr ) sys.exit(1) for port, pin in zip(port_name_list, pin_name): if port in port_map: curr_map = port_map[port] if gpio_type is None or gpio_type == '': pad_map[pin] = ( int(curr_map.x), int(curr_map.y), int(curr_map.z) ) else: gpio_pin = pin + ":" + gpio_type pad_map[gpio_pin] = ( int(curr_map.x), int(curr_map.y), int(curr_map.z) ) else: print( 'Port name "{}" specified in csv file "{}" is invalid. {} "{}"' .format( line['port_name'], args.csv_file, "Specify from port names in xml file", args.pinmap_xml ), file=sys.stderr ) sys.exit(1) for pcf_constraint in parse_simple_pcf(args.pcf): pad_name = pcf_constraint.pad if not io_place.is_net(pcf_constraint.net): print( 'PCF constraint "{}" from line {} constraints net {} {}:\n{}'. format( pcf_constraint.line_str, pcf_constraint.line_num, pcf_constraint.net, '\n'.join(io_place.get_nets()), "which is not in available netlist" ), file=sys.stderr ) sys.exit(1) if pad_name not in pad_map: print( 'PCF constraint "{}" from line {} constraints pad {} {}:\n{}'. format( pcf_constraint.line_str, pcf_constraint.line_num, pad_name, '\n'.join(sorted(pad_map.keys())), "which is not in available pad map" ), file=sys.stderr ) sys.exit(1) # Get the top-level block instance, strip its index inst = io_place.get_top_level_block_instance_for_net( pcf_constraint.net ) if inst is None: continue match = BLOCK_INSTANCE_RE.match(inst) assert match is not None, inst inst = match.group("name") # Constraint the net (block) locs = pad_map[pad_name] io_place.constrain_net( net_name=pcf_constraint.net, loc=locs, comment=pcf_constraint.line_str ) if io_place.constraints: io_place.output_io_place(args.output)
def main(): parser = argparse.ArgumentParser( description='Convert a PCF file into a VPR io.place file.') parser.add_argument("--pcf", "-p", "-P", type=argparse.FileType('r'), required=True, help='PCF input file') parser.add_argument("--blif", "-b", type=argparse.FileType('r'), required=True, help='BLIF / eBLIF file') parser.add_argument("--map", "-m", "-M", type=argparse.FileType('r'), required=True, help='Pin map CSV file') parser.add_argument("--output", "-o", "-O", type=argparse.FileType('w'), default=sys.stdout, help='The output io.place file') parser.add_argument("--net", "-n", type=argparse.FileType('r'), required=True, help='top.net file') args = parser.parse_args() io_place = vpr_io_place.IoPlace() io_place.read_io_list_from_eblif(args.blif) io_place.load_block_names_from_net_file(args.net) # Map of pad names to VPR locations. pad_map = defaultdict(lambda: dict()) pad_alias_map = defaultdict(lambda: dict()) for pin_map_entry in csv.DictReader(args.map): if pin_map_entry['type'] not in IOB_TYPES: continue name = pin_map_entry['name'] alias = "" if 'alias' in pin_map_entry: alias = pin_map_entry['alias'] for type in IOB_TYPES[pin_map_entry['type']]: pad_map[name][type] = ( int(pin_map_entry['x']), int(pin_map_entry['y']), int(pin_map_entry['z']), ) if 'alias' in pin_map_entry: pad_alias_map[alias] = name for pcf_constraint in parse_simple_pcf(args.pcf): pad_name = pcf_constraint.pad if not io_place.is_net(pcf_constraint.net): print('PCF constraint "{}" from line {} constraints net {} \ which is not in available netlist:\n{}'.format( pcf_constraint.line_str, pcf_constraint.line_num, pcf_constraint.net, '\n'.join(io_place.get_nets())), file=sys.stderr) sys.exit(1) if pad_name not in pad_map and pad_name not in pad_alias_map: print('PCF constraint "{}" from line {} constraints pad {} \ which is not in available pad map:\n{}'.format( pcf_constraint.line_str, pcf_constraint.line_num, pad_name, '\n'.join(sorted(pad_map.keys()))), file=sys.stderr) sys.exit(1) # Alias is specified in pcf file so assign it to corresponding pad name if pad_name in pad_alias_map: pad_name = pad_alias_map[pad_name] # Get the top-level block instance, strip its index inst = io_place.get_top_level_block_instance_for_net( pcf_constraint.net) if inst is None: continue match = BLOCK_INSTANCE_RE.match(inst) assert match is not None, inst inst = match.group("name") # Pick correct loc for that pb_type locs = pad_map[pad_name] if inst not in locs: print( 'PCF constraint "{}" from line {} constraints net {} of a block type {} \ to a location for block types:\n{}'.format( pcf_constraint.line_str, pcf_constraint.line_num, pcf_constraint.net, inst, '\n'.join(sorted(list(locs.keys())))), file=sys.stderr) sys.exit(1) # Constraint the net (block) loc = locs[inst] io_place.constrain_net(net_name=pcf_constraint.net, loc=loc, comment=pcf_constraint.line_str) io_place.output_io_place(args.output)
def main(): # Parse arguments parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument("--sdc-in", type=str, required=True, help="Input SDC file") parser.add_argument("--pcf", type=str, required=True, help="Input PCF file") parser.add_argument("--sdc-out", type=str, required=True, help="Output SDC file") parser.add_argument("--eblif", type=str, default=None, help="Input EBLIF netlist file") parser.add_argument("--pin-map", type=str, default=None, help="Input CSV pin map file") args = parser.parse_args() # Read the input PCF file with open(args.pcf, "r") as fp: pcf_constraints = list(parse_simple_pcf(fp)) # Build a pad-to-net map pad_to_net = {} for constr in pcf_constraints: if isinstance(constr, PcfIoConstraint): assert constr.pad not in pad_to_net, \ "Multiple nets constrained to pin '{}'".format(constr.pad) pad_to_net[constr.pad] = constr # Read the input SDC file with open(args.sdc_in, "r") as fp: sdc = fp.read() # Read the input EBLIF file, extract all valid net names from it valid_nets = None if args.eblif is not None: with open(args.eblif, "r") as fp: eblif = parse_blif(fp) valid_nets = collect_eblif_nets(eblif) # Reat the input pinmap CSV file, extract valid pin names from it valid_pins = None if args.pin_map is not None: with open(args.pin_map, "r") as fp: reader = csv.DictReader(fp) csv_data = list(reader) valid_pins = [line["mapped_pin"] for line in csv_data] valid_pins = set(expand_indices(valid_pins)) # Process the SDC def sub_cb(match): return process_get_ports(match, pad_to_net, valid_pins, valid_nets) sdc_lines = sdc.splitlines() for i in range(len(sdc_lines)): if not sdc_lines[i].strip().startswith("#"): sdc_lines[i] = re.sub(r"\[\s*get_ports\s+(?P<arg>.*)\]", sub_cb, sdc_lines[i]) # Write the output SDC file sdc = "\n".join(sdc_lines) + "\n" with open(args.sdc_out, "w") as fp: fp.write(sdc)
def main(): """ Main """ # Parse arguments parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument( "--json", default=None, type=str, help="Read IOMUX configuration from the given JSON file") parser.add_argument("--eblif", default=None, type=str, help="EBLIF netlist file of a design") parser.add_argument("--pcf", default=None, type=str, help="PCF constraints file for a design") parser.add_argument("--map", "-m", "-M", type=argparse.FileType('r'), required=True, help='Pin map CSV file') parser.add_argument("--output-format", default=None, type=str, help='Output format of IOMUX commands (openocd/jlink)') args = parser.parse_args() # Read the requested configurtion from a JSON file if args.json is not None: if args.pcf is not None or args.eblif is not None: print("Use either '--json' or '--pcf' + '--eblif' options!") exit(-1) with open(args.json, "r") as fp: config = json.load(fp) # Generate the config according to the EBLIF netlist and PCF constraints. else: if args.json is not None or (args.eblif is None or args.pcf is None): print("Use either '--json' or '--pcf' + '--eblif' options!") exit(-1) pad_map = {} pad_alias_map = {} for pin_map_entry in csv.DictReader(args.map): if pin_map_entry['type'] not in IOB_TYPES: continue name = pin_map_entry['name'] alias = "" if 'alias' in pin_map_entry: alias = pin_map_entry['alias'] pad_alias_map[alias] = name pad_map[name] = alias else: pad_map[name] = name # Read and parse PCF with open(args.pcf, "r") as fp: pcf = list(parse_simple_pcf(fp)) # Read and parse BLIF/EBLIF with open(args.eblif, "r") as fp: eblif = parse_blif(fp) # Build the config config = {"pads": {}} eblif_inputs = eblif["inputs"]["args"] eblif_outputs = eblif["outputs"]["args"] for constraint in pcf: pad_name = constraint.pad if pad_name not in pad_map and pad_name not in pad_alias_map: print("PCF constraint '{}' from line {} constraints pad {} " "which is not in available pad map:\n{}".format( constraint.line_str, constraint.line_num, pad_name, '\n'.join(sorted(pad_map.keys()))), file=sys.stderr) sys.exit(1) # get pad alias to get IO pad count pad_alias = "" if pad_name in pad_map: pad_alias = pad_map[pad_name] # Alias is specified in pcf file so assign it to corresponding pad name if pad_name in pad_alias_map: pad_alias = pad_name pad = None match = re.match(r"^IO_([0-9]+)$", pad_alias) if match is not None: pad = int(match.group(1)) # Pad not found or out of range if pad is None or pad < 0 or pad >= 46: continue # Detect inouts: is_inout_in = constraint.net + '_$inp' in eblif_inputs is_inout_out = constraint.net + '_$out' in eblif_outputs if is_inout_in and is_inout_out: pad_config = { "ctrl_sel": "fabric", "mode": "inout", } elif constraint.net in eblif_inputs: pad_config = { "ctrl_sel": "fabric", "mode": "input", } # Configure as output elif constraint.net in eblif_outputs: pad_config = { "ctrl_sel": "fabric", "mode": "output", } else: assert False, (constraint.net, constraint.pad) config["pads"][str(pad)] = pad_config # Convert the config to IOMUX register content iomux_regs = generate_iomux_register_content(config) if args.output_format == "openocd": # Output openOCD process for adr in sorted(iomux_regs.keys()): print(" mww 0x{:08x} 0x{:08x}".format(adr, iomux_regs[adr])) elif args.output_format == "jlink": # Output JLink commands for adr in sorted(iomux_regs.keys()): print("w4 0x{:08x} 0x{:08x}".format(adr, iomux_regs[adr])) elif args.output_format == "binary": # Output binary file: <REGADDR 4B><REGVAL 4B>... for adr in sorted(iomux_regs.keys()): # first the address addr_bytes = int(adr).to_bytes(4, byteorder='little') # output the address as raw bytes, bypass the print(), LE, 4B sys.stdout.buffer.write(addr_bytes) # second the value val_bytes = int(iomux_regs[adr]).to_bytes(4, byteorder='little') # output the value as raw bytes, bypass the print(), LE, 4B sys.stdout.buffer.write(val_bytes) else: print("Use either 'openocd' or 'jlink' or 'binary' output format!") exit(-1)