Exemplo n.º 1
0
def main():
    global max_row, max_col
    pytrellis.load_database(database.get_db_root())
    args = parser.parse_args()

    # Read port pin file
    with open(args.portspins) as f:
        for line in f:
            line = line.replace("(", " ")
            line = line.replace(")", " ")
            line = line.split()
            if len(line) == 0:
                continue
            assert len(line) == 2
            assert line[0] == "X"
            idx = len(portpins) + 1
            portpins[line[1]] = idx

    # print("Initialising chip...")
    chip = pytrellis.Chip(dev_names[args.device])
    # print("Building routing graph...")
    ddrg = pytrellis.make_dedup_chipdb(chip)
    max_row = chip.get_max_row()
    max_col = chip.get_max_col()
    process_pio_db(ddrg, args.device)
    process_loc_globals(chip)
    # print("{} unique location types".format(len(ddrg.locationTypes)))
    bba = write_database(args.device, chip, ddrg, "le")
Exemplo n.º 2
0
def main():
    db = libpyprjoxide.Database(database.get_db_root())
    libpyprjoxide.copy_db(db, "LIFCL", "SYSIO_B5_1", [
        "SYSIO_B5_1_V18", "SYSIO_B5_1_15K_DQS51", "SYSIO_B5_1_15K_DQS50",
        "SYSIO_B5_1_15K_ECLK_L_V52"
    ], "PEWC", "")
    libpyprjoxide.copy_db(db, "LIFCL", "SYSIO_B5_0", ["SYSIO_B5_0_15K_DQS52"],
                          "PEWC", "")
    libpyprjoxide.copy_db(db, "LIFCL", "SYSIO_B4_0", [
        "SYSIO_B4_0_DQS1", "SYSIO_B4_0_DQS3", "SYSIO_B4_0_DLY50",
        "SYSIO_B4_0_DLY42", "SYSIO_B4_0_15K_DQS42", "SYSIO_B4_0_15K_BK4_V42",
        "SYSIO_B4_0_15K_V31"
    ], "PEWC", "")
    libpyprjoxide.copy_db(db, "LIFCL", "SYSIO_B4_1", [
        "SYSIO_B4_1_DQS0", "SYSIO_B4_1_DQS2", "SYSIO_B4_1_DQS4",
        "SYSIO_B4_1_DLY52", "SYSIO_B4_1_15K_DQS41"
    ], "PEWC", "")
    libpyprjoxide.copy_db(db, "LIFCL", "SYSIO_B3_0", [
        "SYSIO_B3_0_DLY30_V18", "SYSIO_B3_0_DQS1", "SYSIO_B3_0_DQS3",
        "SYSIO_B3_0_15K_DQS32"
    ], "PEWC", "")
    libpyprjoxide.copy_db(db, "LIFCL", "SYSIO_B3_1", [
        "SYSIO_B3_1_DLY32", "SYSIO_B3_1_DQS0", "SYSIO_B3_1_DQS2",
        "SYSIO_B3_1_DQS4", "SYSIO_B3_1_ECLK_R", "SYSIO_B3_1_V18",
        "SYSIO_B3_1_15K_DQS30", "SYSIO_B3_1_15K_ECLK_R_DQS31"
    ], "PEWC", "")

    libpyprjoxide.copy_db(db, "LIFCL", "SYSIO_B1_0_ODD", ["SYSIO_B1_0_C"], "C",
                          "")
    libpyprjoxide.copy_db(db, "LIFCL", "SYSIO_B2_0_ODD", ["SYSIO_B2_0_C"], "C",
                          "")
    libpyprjoxide.copy_db(db, "LIFCL", "SYSIO_B6_0_ODD", ["SYSIO_B6_0_C"], "C",
                          "")
    libpyprjoxide.copy_db(db, "LIFCL", "SYSIO_B7_0_ODD", ["SYSIO_B7_0_C"], "C",
                          "")
Exemplo n.º 3
0
def main():
    for cfg, ip_sites in cfgs:
        cfg.setup(skip_specimen=True)
        ip_base = {}
        for site, prim in ip_sites:
            prim_type = prim
            wid_idx = prim_type.find("_WID")
            if wid_idx != -1:
                prim_type = prim_type[0:wid_idx]
            bit = cfg.build_design(
                cfg.sv,
                dict(cmt="",
                     prim=prim_type,
                     site=site,
                     config=ip_settings[prim]))
            chip = libpyprjoxide.Chip.from_bitstream(fuzzconfig.db, bit)
            ipv = chip.get_ip_values()
            assert len(ipv) > 0
            addr = ipv[0][0]
            ip_name = site
            if "EBR_CORE" in ip_name:
                ip_name = prim.replace("_CORE", "")
            #if "LRAM_CORE" in ip_name:
            #    ip_name = "LRAM"
            ip_base[ip_name] = {
                "addr": addr & ~((1 << ip_abits[prim_type]) - 1),
                "abits": ip_abits[prim_type]
            }
        with open(
                path.join(database.get_db_root(), "LIFCL", cfg.device,
                          "baseaddr.json"), "w") as jf:
            print(json.dumps(dict(regions=ip_base), sort_keys=True, indent=4),
                  file=jf)
Exemplo n.º 4
0
def main():
    pytrellis.load_database(database.get_db_root())
    des = design.Design("LFE5U-45F")
    ctr_q = make_bus("Q", 8)
    des.router.bind_net_to_port("clk", "R62C89_JQ5")  #random input! check!
    make_counter(des, "ctr", 8, "clk", ctr_q)
    des.make_bitstream("counter.bit")
Exemplo n.º 5
0
def main():
    global max_row, max_col, const_id_count

    pytrellis.load_database(database.get_db_root())
    args = parser.parse_args()

    const_id_count = 1 # count ID_NONE
    with open(args.constids) as f:
        for line in f:
            line = line.replace("(", " ")
            line = line.replace(")", " ")
            line = line.split()
            if len(line) == 0:
                continue
            assert len(line) == 2
            assert line[0] == "X"
            idx = len(constids) + 1
            constids[line[1]] = idx
            const_id_count += 1

    constids["SLICE"] = constids["FACADE_SLICE"]
    constids["PIO"] = constids["FACADE_IO"]

    chip = pytrellis.Chip(dev_names[args.device])
    rg = pytrellis.make_optimized_chipdb(chip)
    max_row = chip.get_max_row()
    max_col = chip.get_max_col()
    process_pio_db(rg, args.device)
    bba = write_database(args.device, chip, rg, "le")
Exemplo n.º 6
0
def main():
    global max_row, max_col, const_id_count
    pytrellis.load_database(database.get_db_root())
    args = parser.parse_args()

    # Read port pin file
    const_id_count = 1 # count ID_NONE
    with open(args.constids) as f:
        for line in f:
            line = line.replace("(", " ")
            line = line.replace(")", " ")
            line = line.split()
            if len(line) == 0:
                continue
            assert len(line) == 2
            assert line[0] == "X"
            idx = len(constids) + 1
            constids[line[1]] = idx
            const_id_count += 1

    constids["SLICE"] = constids["TRELLIS_SLICE"]
    constids["PIO"] = constids["TRELLIS_IO"]

    # print("Initialising chip...")
    chip = pytrellis.Chip(dev_names[args.device])
    # print("Building routing graph...")
    ddrg = pytrellis.make_dedup_chipdb(chip)
    max_row = chip.get_max_row()
    max_col = chip.get_max_col()
    process_timing_data()
    process_pio_db(ddrg, args.device)
    process_loc_globals(chip)
    # print("{} unique location types".format(len(ddrg.locationTypes)))
    bba = write_database(args.device, chip, ddrg, "le")
Exemplo n.º 7
0
def main(argv):
    args = parser.parse_args(argv[1:])
    db = libpyprjoxide.Database(database.get_db_root())
    libpyprjoxide.write_tilegrid_html(db, args.family, args.device,
                                      args.outfile)
    libpyprjoxide.write_region_html(db, args.family, args.device,
                                    args.routfile)
Exemplo n.º 8
0
def process_pio_db(ddrg, device):
    piofile = path.join(database.get_db_root(), "ECP5", dev_names[device],
                        "iodb.json")
    with open(piofile, 'r') as f:
        piodb = json.load(f)
        for pkgname, pkgdata in sorted(piodb["packages"].items()):
            pins = []
            for name, pinloc in sorted(pkgdata.items()):
                x = pinloc["col"]
                y = pinloc["row"]
                loc = pytrellis.Location(x, y)
                pio = "PIO" + pinloc["pio"]
                bel_idx = get_bel_index(ddrg, loc, pio)
                if bel_idx is not None:
                    pins.append((name, loc, bel_idx))
            packages[pkgname] = pins
        for metaitem in piodb["pio_metadata"]:
            x = metaitem["col"]
            y = metaitem["row"]
            loc = pytrellis.Location(x, y)
            pio = "PIO" + metaitem["pio"]
            bank = metaitem["bank"]
            if "function" in metaitem:
                pinfunc = metaitem["function"]
            else:
                pinfunc = None
            bel_idx = get_bel_index(ddrg, loc, pio)
            if bel_idx is not None:
                pindata.append((loc, bel_idx, bank, pinfunc))
Exemplo n.º 9
0
def cells_db_path(family, speedgrade):
    tmgroot = path.join(database.get_db_root(), family, "timing")
    if not path.exists(tmgroot):
        os.mkdir(tmgroot)
    sgroot = path.join(tmgroot, "speed_{}".format(speedgrade))
    if not path.exists(sgroot):
        os.mkdir(sgroot)
    return path.join(sgroot, "cells.json")
Exemplo n.º 10
0
def main():
	devices = database.get_devices()
	tang_root = database.get_tangdinasty_root()
	shutil.rmtree(database.get_db_root(), ignore_errors=True)
	os.mkdir(database.get_db_root())

	shutil.copy(path.join(database.get_tang_root(), "devices.json"), path.join(database.get_db_root(), "devices.json"))

	for family in devices["families"].keys():
		print("Family: " + family)
		for device in devices["families"][family]["devices"].keys():
			print("Device: " + device)
			selected_device = devices["families"][family]["devices"][device]

			database_dir = database.get_db_subdir(family, device)
			if not path.exists(path.join(database_dir,"bits")):
				os.mkdir(path.join(database_dir,"bits"))
			chipdb = path.join(tang_root, "arch", device + ".db")
			unlogic.decode_chipdb(["create_database", chipdb, "--db_dir", database_dir])
Exemplo n.º 11
0
def main(args):
    pytrellis.load_database(database.get_db_root())
    ci = pytrellis.get_chip_info(pytrellis.find_device_by_name(args.device))
    chip_size = (ci.max_row, ci.max_col)

    globals_json = dict()
    globals_json["lr-conns"] = {
        "lr1": {
            "row": center_map[chip_size][0],
            "row-span": row_spans[chip_size]
        }
    }

    globals_json["ud-conns"] = {}

    for n, c in enumerate(column_routing(chip_size[1],
                                         start_stride[chip_size])):
        globals_json["ud-conns"][str(n)] = c
        if n == chip_size[1] - 1:
            last_stride = c

    globals_json["branch-spans"] = {}

    for col, grps in enumerate(
            branch_spans(chip_size[1], start_stride[chip_size])):
        span_dict = {}
        for gn, span in enumerate(grps):
            if span:
                for glb_no in inv_global_group[gn]:
                    span_dict[str(glb_no)] = span

        globals_json["branch-spans"][str(col)] = span_dict

    # For the first and last columns, globals at the stride's current
    # position have DCCs when viewed in EPIC. These DCCs don't appear to
    # physically exist on-chip. See minitests/machxo2/dcc/dcc2.v. However,
    # in the bitstream (for the first and last columns) global conns going
    # into "DCCs" have different bits controlling them as opposed to globals
    # without DCC connections.
    zero_col_dccs = set(
        inv_global_group[(global_group[start_stride[chip_size]] - 1) % 4])
    zero_col_conns = set(globals_json["ud-conns"]["0"])
    missing_dccs_l = tuple(zero_col_conns.difference(zero_col_dccs))

    last_col_dccs = set(inv_global_group[(global_group[last_stride] + 1) % 4])
    last_col_conns = set(globals_json["ud-conns"][str(chip_size[1])])
    missing_dccs_r = tuple(last_col_conns.difference(last_col_dccs))

    globals_json["missing-dccs"] = {
        "0": missing_dccs_l,
        str(chip_size[1]): missing_dccs_r
    }

    with args.outfile as jsonf:
        jsonf.write(json.dumps(globals_json, indent=4, separators=(',', ': ')))
Exemplo n.º 12
0
def main(argv):
    global bitmap, labels
    bitmap = dict(dict())
    labels = dict(dict())
    args = parser.parse_args(argv[1:])
    f = args.outfile
    print("""<html>
            <head><title>{} Bit Data</title>
        """.format(args.tile),
          file=f)
    print("""
            <script type="text/javascript">
            origClr = {};
            origClass = "";
            
            function mov(event) {
                if (event.target.className != "unknown") {
                    origClass = event.target.className;
                    var elems = document.getElementsByClassName(origClass);
                    for(var i = 0; i < elems.length; i++) {
                       if(!(elems[i].id in origClr)) {
                          origClr[elems[i].id] = elems[i].style.backgroundColor;
                       }
                       elems[i].style.backgroundColor = "white";
                    }

                }
            }
            
            function mou(event) {
                var elems = document.getElementsByClassName(origClass);
                for(var i = 0; i < elems.length; i++) {
                   elems[i].style.backgroundColor = origClr[elems[i].id] || "#ffffff";
                }
            }
            </script>
            </head>
            <body>
        """,
          file=f)
    print("""<h1>{} Bit Data</h1>
    """.format(args.tile), file=f)
    pytrellis.load_database(database.get_db_root())
    tdb = pytrellis.get_tile_bitdata(
        pytrellis.TileLocator(args.family, args.device, args.tile))
    ch = pytrellis.Chip(args.device)
    ti = ch.get_tiles_by_type(args.tile)[0].info
    find_bits(tdb)
    bit_grid_html(ti, f)
    muxes_html(tdb, f)
    setwords_html(tdb, f)
    setenums_html(tdb, f)
    fixed_conns_html(tdb, f)
    print("""</body></html>""", file=f)
Exemplo n.º 13
0
def main():
    pytrellis.load_database(database.get_db_root())
    chip = pytrellis.Chip("LFE5U-45F")
    rt = Autorouter(chip)
    config = {_.info.name: pytrellis.TileConfig() for _ in chip.get_all_tiles()}
    rt.bind_net_to_port("x", "R15C10_Q0")
    rt.route_net_to_wire("x", "R15C50_A0", config)
    for tile, tcfg in sorted(config.items()):
        cfgtext = tcfg.to_string()
        if len(cfgtext.strip()) > 0:
            print(".tile {}".format(tile))
            print(cfgtext)
Exemplo n.º 14
0
    def setup(self, skip_specimen=False):
        """
        Create a working directory, and run Radiant on a minimal Verilog file to create a udb for Tcl usage etc
        """

        # Load the global database if it doesn't exist already
        global db
        if db is None:
            db = libpyprjoxide.Database(database.get_db_root())

        self.make_workdir()
        if not skip_specimen:
            self.build_design(self.sv, {})
Exemplo n.º 15
0
def process_pio_db(rg, device):
    piofile = path.join(database.get_db_root(), "MachXO2", dev_names[device], "iodb.json")
    with open(piofile, 'r') as f:
        piodb = json.load(f)
        for pkgname, pkgdata in sorted(piodb["packages"].items()):
            pins = []
            for name, pinloc in sorted(pkgdata.items()):
                x = pinloc["col"]
                y = pinloc["row"]
                if x == 0 or x == max_col:
                    # FIXME: Oversight in read_pinout.py. We use 0-based
                    # columns for 0 and max row, but we otherwise extract
                    # the names from the CSV, and...
                    loc = pytrellis.Location(x, y)
                else:
                    # Lattice uses 1-based columns!
                    loc = pytrellis.Location(x - 1, y)
                pio = "PIO" + pinloc["pio"]
                bel_idx = get_bel_index(rg, loc, pio)
                if bel_idx is not None:
                    pins.append((name, loc, bel_idx))
            packages[pkgname] = pins
        for metaitem in piodb["pio_metadata"]:
            x = metaitem["col"]
            y = metaitem["row"]
            if x == 0 or x == max_col:
                loc = pytrellis.Location(x, y)
            else:
                loc = pytrellis.Location(x - 1, y)
            pio = "PIO" + metaitem["pio"]
            bank = metaitem["bank"]
            if "function" in metaitem:
                pinfunc = metaitem["function"]
            else:
                pinfunc = None
            dqs = -1
            if "dqs" in metaitem:
                pass
                # tdqs = metaitem["dqs"]
                # if tdqs[0] == "L":
                #     dqs = 0
                # elif tdqs[0] == "R":
                #     dqs = 2048
                # suffix_size = 0
                # while tdqs[-(suffix_size+1)].isdigit():
                #     suffix_size += 1
                # dqs |= int(tdqs[-suffix_size:])
            bel_idx = get_bel_index(rg, loc, pio)
            if bel_idx is not None:
                pindata.append((loc, bel_idx, bank, pinfunc, dqs))
Exemplo n.º 16
0
def main():
    devices = database.get_devices()
    for fam, famdata in sorted(devices["families"].items()):
        tdroot = path.join(database.get_db_root(), fam, "tiledata")
        if not path.exists(tdroot):
            os.mkdir(tdroot)
        for device in sorted(famdata["devices"].keys()):
            if famdata["devices"][device]["fuzz"]:
                tilegrid = database.get_tilegrid(fam, device)
                for tilename in sorted(tilegrid.keys()):
                    tile = tiles.type_from_fullname(tilename)
                    tile_dir = path.join(tdroot, tile)
                    if not path.exists(tile_dir):
                        os.mkdir(tile_dir)
                    tile_db = path.join(tile_dir, "bits.db")
                    if not path.exists(tile_db):
                        with open(tile_db, 'w') as f:
                            f.write('\n')
Exemplo n.º 17
0
def main(argv):
    args = parser.parse_args(argv[1:])
    if not path.exists(args.fld):
        os.mkdir(args.fld)
    commit_hash = database.get_db_commit()
    build_dt = time.strftime('%Y-%m-%d %H:%M:%S')
    docs_toc = ""
    pytrellis.load_database(database.get_db_root())
    for fam, fam_data in sorted(database.get_devices()["families"].items()):
        fdir = path.join(args.fld, fam)
        if not path.exists(fdir):
            os.mkdir(fdir)
        thdir = path.join(fdir, "tilehtml")
        if not path.exists(thdir):
            os.mkdir(thdir)
        docs_toc += "<h3>{} Family</h3>".format(fam)
        docs_toc += "<ul>"
        tiles = get_device_tiles(fam, fam_data["devices"])
        for dev, devdata in sorted(fam_data["devices"].items()):
            if devdata["fuzz"]:
                ddir = path.join(fdir, dev)
                if not path.exists(ddir):
                    os.mkdir(ddir)
                print(
                    "********* Generating documentation for device {}".format(
                        dev))
                generate_device_docs(fam, dev, ddir)
                if (fam, dev) in tiles:
                    for tile in tiles[fam, dev]:
                        print(
                            "*** Generating documentation for tile {}".format(
                                tile))
                        generate_tile_docs(fam, dev, tile, thdir)
                docs_toc += '<li><a href="{}">{} Documentation</a></li>'.format(
                    '{}/{}/index.html'.format(fam, dev), dev)

        docs_toc += "</ul>"

    index_html = Template(trellis_docs_index).substitute(datetime=build_dt,
                                                         commit=commit_hash,
                                                         docs_toc=docs_toc)
    with open(path.join(args.fld, "index.html"), 'w') as f:
        f.write(index_html)
Exemplo n.º 18
0
def process_pio_db(ddrg, device):
    piofile = path.join(database.get_db_root(), "ECP5", dev_names[device],
                        "iodb.json")
    with open(piofile, 'r') as f:
        piodb = json.load(f)
        for pkgname, pkgdata in sorted(piodb["packages"].items()):
            pins = []
            for name, pinloc in sorted(pkgdata.items()):
                x = pinloc["col"]
                y = pinloc["row"]
                loc = pytrellis.Location(x, y)
                pio = "PIO" + pinloc["pio"]
                bel_idx = get_bel_index(ddrg, loc, pio)
                if bel_idx is not None:
                    pins.append((name, loc, bel_idx))
            packages[pkgname] = pins
        for metaitem in piodb["pio_metadata"]:
            x = metaitem["col"]
            y = metaitem["row"]
            loc = pytrellis.Location(x, y)
            pio = "PIO" + metaitem["pio"]
            bank = metaitem["bank"]
            if "function" in metaitem:
                pinfunc = metaitem["function"]
            else:
                pinfunc = None
            dqs = -1
            if "dqs" in metaitem:
                tdqs = metaitem["dqs"]
                if tdqs[0] == "L":
                    dqs = 0
                elif tdqs[0] == "R":
                    dqs = 2048
                suffix_size = 0
                while tdqs[-(suffix_size + 1)].isdigit():
                    suffix_size += 1
                dqs |= int(tdqs[-suffix_size:])
            bel_idx = get_bel_index(ddrg, loc, pio)
            if bel_idx is not None:
                pindata.append((loc, bel_idx, bank, pinfunc, dqs))
Exemplo n.º 19
0
def main():
    pytrellis.load_database(database.get_db_root())
    c = pytrellis.Chip("LFE5U-45F")
    chip_size = (c.get_max_row(), c.get_max_col())

    # Get fan-in to a net
    # Returns (source, configurable, loc)
    def get_fanin(net):
        drivers = []
        npos = tiles.pos_from_name(net)
        for tile in c.get_all_tiles():
            tinf = tile.info
            tname = tinf.name
            pos = tiles.pos_from_name(tname)
            if abs(pos[0] - npos[0]) >= 10 or abs(pos[1] - npos[1]) >= 10:
                continue
            if net.startswith("G_"):
                tnet = net
            else:
                tnet = nets.normalise_name(chip_size, tname, net)
            tdb = pytrellis.get_tile_bitdata(
                pytrellis.TileLocator(c.info.family, c.info.name, tinf.type))
            try:
                mux = tdb.get_mux_data_for_sink(tnet)
                for src in mux.get_sources():
                    drivers.append(
                        (nets.canonicalise_name(chip_size, tname,
                                                src), True, tname))
            except IndexError:
                pass
            for fc in tdb.get_fixed_conns():
                if fc.sink == tnet:
                    drivers.append(
                        (nets.canonicalise_name(chip_size, tname,
                                                fc.source), False, tname))
        return drivers

    # Get fan-out of a net
    # Returns (dest, configurable, loc)
    def get_fanout(net):
        drivers = []
        npos = tiles.pos_from_name(net)
        for tile in c.get_all_tiles():
            tinf = tile.info
            tname = tinf.name
            pos = tiles.pos_from_name(tname)
            if abs(pos[0] - npos[0]) >= 12 or abs(pos[1] - npos[1]) >= 12:
                continue
            if net.startswith("G_"):
                tnet = net
            else:
                tnet = nets.normalise_name(chip_size, tname, net)
            tdb = pytrellis.get_tile_bitdata(
                pytrellis.TileLocator(c.info.family, c.info.name, tinf.type))
            for sink in tdb.get_sinks():
                mux = tdb.get_mux_data_for_sink(sink)
                if tnet in mux.arcs:
                    drivers.append(
                        (nets.canonicalise_name(chip_size, tname,
                                                sink), True, tname))
            for fc in tdb.get_fixed_conns():
                if fc.source == tnet:
                    drivers.append(
                        (nets.canonicalise_name(chip_size, tname,
                                                fc.sink), False, tname))
        return drivers

    # Get all nets at a location
    net_tile_cache = {}
    non_tile_re = re.compile(r"^([NS]\d+)?([EW]\d+)?[GLR]?_.*")

    def get_nets_at(loc):
        if loc in net_tile_cache:
            return net_tile_cache[loc]
        row, col = loc
        nets = set()
        for tile in c.get_tiles_by_position(row, col):
            tinf = tile.info
            tdb = pytrellis.get_tile_bitdata(
                pytrellis.TileLocator(c.info.family, c.info.name, tinf.type))
            for sink in tdb.get_sinks():
                if not non_tile_re.match(sink):
                    nets.add(sink)
                mux = tdb.get_mux_data_for_sink(sink)
                for src in mux.get_sources():
                    if not non_tile_re.match(src):
                        nets.add(src)
            for fc in tdb.get_fixed_conns():
                if not non_tile_re.match(fc.sink):
                    nets.add(fc.sink)
                if not non_tile_re.match(fc.source):
                    nets.add(fc.source)
        nets = list(sorted((["R{}C{}_{}".format(row, col, _) for _ in nets])))
        net_tile_cache[loc] = nets
        return nets

    tile_net_re = re.compile(r"^R\d+C\d+_.*")

    def completer(str, idx):
        if not tile_net_re.match(str):
            return None
        loc = tiles.pos_from_name(str)
        nets = get_nets_at(loc)
        for n in nets:
            if n.startswith(str):
                if idx > 0:
                    idx -= 1
                else:
                    return n
        return None

    readline.parse_and_bind("tab: complete")
    readline.set_completer(completer)

    hist_buf = []
    while True:
        net = input("> ")
        if net.strip() == "":
            continue
        if net == "quit":
            return
        if net.isdigit():
            idx = int(net)
            if idx >= len(hist_buf):
                print("Invalid index into last result")
                continue
            else:
                net = hist_buf[idx]
        if not tile_net_re.match(net):
            print("Bad netname, expected RyCx_...")
            continue
        hist_buf = []
        fi = get_fanin(net)
        for drv in fi:
            finet, conf, tile = drv
            if finet is None: continue
            arrow = "<--" if conf else "<=="
            print("[{:3d}]  {} {} {:25s} [in {}]".format(
                len(hist_buf), net, arrow, finet, tile))
            hist_buf.append(finet)
        print()
        fo = get_fanout(net)
        for src in fo:
            fonet, conf, tile = src
            if fonet is None: continue
            arrow = "-->" if conf else "==>"
            print("[{:3d}]  {} {} {:25s} [in {}]".format(
                len(hist_buf), net, arrow, fonet, tile))
            hist_buf.append(fonet)
Exemplo n.º 20
0
def main(argv):
    args = parser.parse_args(argv[1:])
    db = libpyprjoxide.Database(database.get_db_root())
    docs_root = path.join(database.get_oxide_root(), "docs")
    libpyprjoxide.write_tilebits_html(db, docs_root, args.family, args.device,
                                      args.tiletype, args.outdir)
Exemplo n.º 21
0
def main(args):
    global max_row, max_col

    pytrellis.load_database(database.get_db_root())
    chip = pytrellis.Chip(args.device)

    max_row = chip.get_max_row()
    max_col = chip.get_max_col()

    if chip.info.family == "MachXO2":
        # I/O Grouping is present in MachXO2 pinouts but not ECP5.
        pkg_index_start = 8
    else:
        pkg_index_start = 7

    metadata = dict()
    package_data = dict()
    package_indicies = None
    found_header = False
    with args.infile as csvf:
        for line in csvf:
            trline = line.strip()
            splitline = trline.split(",")
            if len(splitline) < (pkg_index_start + 1):
                continue
            if len(splitline[0].strip()) == 0:
                continue
            if splitline[0] == "PAD":
                # is header
                found_header = True
                package_indicies = splitline[pkg_index_start:]
                for pkg in package_indicies:
                    package_data[pkg] = {}
            elif found_header:
                if splitline[1][0] != "P" or splitline[1].startswith(
                        "PROGRAM"):
                    continue
                bel = get_bel(splitline[1])
                bank = int(splitline[2])
                function = splitline[3]
                dqs = splitline[6]
                if chip.info.family == "MachXO2":
                    io_grouping = splitline[7]
                    metadata[bel] = bank, function, dqs, io_grouping
                else:
                    metadata[bel] = bank, function, dqs
                for i in range(len(package_indicies)):
                    if splitline[pkg_index_start + i] == "-":
                        continue
                    package_data[package_indicies[i]][splitline[pkg_index_start
                                                                + i]] = bel
    json_data = {"packages": {}, "pio_metadata": []}
    for pkg, pins in package_data.items():
        json_data["packages"][pkg] = {}
        for pin, bel in pins.items():
            json_data["packages"][pkg][pin] = {
                "col": bel[0],
                "row": bel[1],
                "pio": bel[2]
            }
    for bel, data in sorted(metadata.items()):
        if chip.info.family == "MachXO2":
            bank, function, dqs, io_grouping = data
        else:
            bank, function, dqs = data
        meta = {"col": bel[0], "row": bel[1], "pio": bel[2], "bank": bank}
        if function != "-":
            meta["function"] = function
        if dqs != "-":
            meta["dqs"] = dqs

        if chip.info.family == "MachXO2":
            # Since "+" is used, "-" means "minus" presumably, as opposed to
            # "not applicable".
            meta["io_grouping"] = io_grouping

        json_data["pio_metadata"].append(meta)
    with args.outfile as jsonf:
        jsonf.write(
            json.dumps(json_data,
                       sort_keys=True,
                       indent=4,
                       separators=(',', ': ')))
Exemplo n.º 22
0
def main(argv: List[str]) -> None:
    import argparse
    import json

    parser = argparse.ArgumentParser(
        "Convert a .bit file into a .v verilog file for simulation")

    parser.add_argument("bitfile", help="Input .bit file")
    parser.add_argument(
        "--package",
        help="Physical package (e.g. CABGA256), for renaming I/O ports")
    parser.add_argument(
        "--lpf",
        help="Use LOCATE COMP commands from this LPF file to name I/O ports")
    parser.add_argument("-n",
                        "--module-name",
                        help="Name for the top-level module (default: top)",
                        default="top")
    args = parser.parse_args(argv)

    if args.lpf and not args.package:
        parser.error(
            "Cannot use a LPF file without specifying the chip package")

    pytrellis.load_database(database.get_db_root())

    print("Loading bitstream...", file=sys.stderr)
    bitstream = pytrellis.Bitstream.read_bit(args.bitfile)
    chip = bitstream.deserialise_chip()

    if args.package:
        dbfn = os.path.join(
            database.get_db_subdir(chip.info.family, chip.info.name),
            "iodb.json")
        with open(dbfn, "r") as f:
            iodb = json.load(f)

        if args.lpf:
            lpf_map = parse_lpf(args.lpf)
        else:
            lpf_map = {}

        # Rename PIO and IOLOGIC BELs based on their connected pins, for readability
        mod_renames = {}
        for pin_name, pin_data in iodb["packages"][args.package].items():
            if pin_name in lpf_map:
                # escape LPF name in case it has funny characters
                pin_name = "\\" + lpf_map[pin_name]
            # PIO and IOLOGIC do not share pin names except for IOLDO/IOLTO
            mod_renames["R{row}C{col}_PIO{pio}".format(
                **pin_data)] = f"{pin_name}"
            mod_renames["R{row}C{col}_IOLOGIC{pio}".format(
                **pin_data)] = f"{pin_name}"

        # Note: the mod_name_map only affects str(node), not node.mod_name
        Node.mod_name_map = mod_renames

    print("Computing routing graph...", file=sys.stderr)
    rgraph = chip.get_routing_graph()

    print("Computing connection graph...", file=sys.stderr)
    tiles_by_loc = make_tiles_by_loc(chip)
    graph = gen_config_graph(chip, rgraph, tiles_by_loc)

    print("Generating Verilog...", file=sys.stderr)
    print_verilog(graph, tiles_by_loc, args.module_name)

    print("Done!", file=sys.stderr)
Exemplo n.º 23
0
def main():
    # Import SDF and pickle files
    folder = sys.argv[1]
    for picklef in glob.glob(path.join(folder, "*_route.pickle")):
        if path.exists(picklef.replace("_route.pickle", "_10.sdf.pickle")):
            print("Importing {}...".format(picklef))
            process_design(picklef,
                           picklef.replace("_route.pickle", "_10.sdf.pickle"))

    row_ind = []
    col_ind = []
    data_values = []
    rhs = []
    for i, coeff in enumerate(eqn_coeffs):
        for j, val in coeff:
            row_ind.append(i)
            col_ind.append(j)
            data_values.append(val)
    rows = len(eqn_coeffs)
    A = csc_matrix((data_values, (row_ind, col_ind)), (rows, len(var_names)))
    bmin = np.zeros(rows)
    bmax = np.zeros(rows)

    speedgrades = ["4", "5", "6", "10", "11", "12", "M"]
    for speed in speedgrades:
        # For each speedgrade, set up the right hand side of the equation system by using
        # the delays in the interconnect section of the SDF file
        for design, arc2row in design_arc2row.items():
            sdf = "{}_{}.sdf.pickle".format(design, speed)
            with open(sdf, "rb") as sdff:
                parsed_sdf = pickle.load(sdff).cells["top"]
            for from_pin, to_pin in sorted(parsed_sdf.interconnect.keys()):
                if (from_pin, to_pin) not in arc2row:
                    continue
                dly = parsed_sdf.interconnect[from_pin, to_pin]
                bmin[arc2row[from_pin, to_pin]] = max(dly.rising.minv,
                                                      dly.falling.minv)
                bmax[arc2row[from_pin, to_pin]] = max(dly.rising.maxv,
                                                      dly.falling.maxv)
        print("Running least squares solver for speed {}...".format(speed))

        # Run the least squares solver on the system of equations
        xmin, istop, itn, r1norm = lsqr(A, bmin)[:4]
        xmax, istop, itn, r1norm = lsqr(A, bmax)[:4]

        delay_json = {"pip_classes": {}}

        for i, var in sorted(enumerate(var_names), key=lambda x: x[1]):
            print("  {:32s} {:20s} {:6.0f} {:6.0f}".format(
                var[0], var[1], xmin[i], xmax[i]))
            if var[0] not in delay_json["pip_classes"]:
                delay_json["pip_classes"][var[0]] = {}
            delay_json["pip_classes"][var[0]][var[1]] = [
                int(xmin[i] + 0.5), int(xmax[i] + 0.5)
            ]
        for zd in sorted(zero_delay_classes):
            print("  {:32s} {:20s} {:6.0f} {:6.0f} (fixed)".format(
                zd, "base", 0, 0))
            delay_json["pip_classes"][zd] = dict(base=[0, 0])

        # Write JSON so it can be used as part of the database  import process
        timing_root = path.join(database.get_db_root(), "LIFCL", "timing")
        os.makedirs(timing_root, exist_ok=True)
        with open(path.join(timing_root, "interconnect_{}.json".format(speed)),
                  "w") as jf:
            json.dump(delay_json, jf, indent=4, sort_keys=True)

        # Compute Ax and compare to b for a simple estimation of the model error
        for i, var in sorted(enumerate(var_names), key=lambda x: x[1]):
            if xmin[i] < 0:
                xmin[i] = 0
            if xmax[i] < 0:
                xmax[i] = 0
        min_err = 99999
        max_err = -99999
        rms_err = 0
        N = 0
        min_coeffs = {}
        for i, coeff in enumerate(eqn_coeffs):
            model = 0
            for j, val in coeff:
                model += val * xmax[j]
            err = model - bmax[i]
            if err < min_err:
                min_coeffs = coeff
                min_err = err
            min_err = min(err, min_err)
            max_err = max(err, max_err)
            rms_err += err**2
            N += 1

        print("  error: neg={:.1f}ps, max={:.1f}ps, rms={:.1f}ps".format(
            min_err, max_err, math.sqrt(rms_err / N)))
        print("            neg eqn {}".format(" + ".join(
            "{}*{}".format(val, var_names[j][0]) for j, val in min_coeffs)))
Exemplo n.º 24
0
import database
import tiles
import json
from os import path
"""
Despite Lattice assigning them the same tile type; "odd" and "even" top/left/right IO
locations have slightly different routing - swapped output tristate and data

This script fixes this by patching tile names
"""

for f, d in [("LIFCL", "LIFCL-40"), ("LIFCL", "LFD2NX-40"),
             ("LFCPNX", "LFCPNX-100")]:
    tgp = path.join(database.get_db_root(), f, d, "tilegrid.json")
    with open(tgp, "r") as infile:
        tg = json.load(infile)["tiles"]

    tiles_by_xy = [[]]
    max_row = 0
    max_col = 0
    for tile in sorted(tg.keys()):
        r, c = tiles.pos_from_name(tile)
        max_row = max(r, max_row)
        max_col = max(c, max_col)
        while r >= len(tiles_by_xy):
            tiles_by_xy.append([])
        while c >= len(tiles_by_xy[r]):
            tiles_by_xy[r].append([])
        tiles_by_xy[r][c].append(tile)

    # Top tiles
Exemplo n.º 25
0
import pytrellis
import database

pytrellis.load_database(database.get_db_root())
chip = pytrellis.Chip("LFE5U-12F")
graph = chip.get_routing_graph()
tile = graph.tiles[pytrellis.Location(12, 6)]


def rid_to_arc(routingId):
    return graph.tiles[routingId.loc].arcs[routingId.id]


def rid_to_wire(routingId):
    return graph.tiles[routingId.loc].wires[routingId.id]


def rid_to_bel(bel):
    return graph.tiles[bel.loc].bels[bel.id]


routingBelIds = dict()
routingPortIds = dict()
routingArcIds = dict()
routingWiresIds = dict()
foreignRoutingWiresIds = dict()
counter = 0

toBeMatched = set()
for arc in [
        x.data() for x in tile.arcs if x.data().source.loc == tile.loc and (
Exemplo n.º 26
0
def main():
    for name, max_row, max_col in configs:
        cfg = FuzzConfig(job="GLOBAL_{}".format(name), device=name, sv="../shared/empty_40.v", tiles=[])
        cfg.setup()
        db_path = path.join(database.get_db_root(), "LIFCL", name, "globals.json")
        def load_db():
            if path.exists(db_path):
                with open(db_path, "r") as dbf:
                   return json.load(dbf)
            else:
                return {"branches": []}
        def save_db():
            with open(db_path, "w") as dbf:
                print(json.dumps(gdb, sort_keys=True, indent=4), file=dbf)
        gdb = load_db()
        # Determine branch driver locations
        test_row = 4
        clock_wires = ["R{}C{}_JCLK0".format(test_row, c) for c in range(1, max_col)]
        clock_info = lapie.get_node_data(cfg.udb, clock_wires)
        branch_to_col = {}
        for n in clock_info:
            r, c = pos_from_name(n.name)
            hpbx_c = None
            for uh in n.uphill_pips:
                if "_HPBX0" in uh.from_wire:
                    hpbx_r, hpbx_c = pos_from_name(uh.from_wire)
                    assert hpbx_r == r
                    break
            assert hpbx_c is not None
            if hpbx_c not in branch_to_col:
                branch_to_col[hpbx_c] = []
            branch_to_col[hpbx_c].append(c)
        branches = []

        branch_wires = ["R{}C{}_HPBX0000".format(test_row, bc) for bc in sorted(branch_to_col.keys())]
        branch_wire_info = lapie.get_node_data(cfg.udb, branch_wires)
        branch_driver_col = {}
        # Branches directly driven by a VPSX
        # Also, get a test column for the spine exploration later
        sp_test_col = None
        for bw in branch_wire_info:
            r, c = pos_from_name(bw.name)
            for uh in bw.uphill_pips:
                if "VPSX" in uh.from_wire:
                    vpsx_r, vpsx_c = pos_from_name(uh.from_wire)
                    branch_driver_col[c] = vpsx_c
                    if sp_test_col is None:
                        sp_test_col = c
        # Branches driven by another branch
        for bw in branch_wire_info:
            r, c = pos_from_name(bw.name)
            if c in branch_driver_col:
                continue
            for uh in bw.uphill_pips:
                if "HPBX0" in uh.from_wire:
                    hpbx_r, hpbx_c = pos_from_name(uh.from_wire)
                    branch_driver_col[c] = branch_driver_col[hpbx_c]            
        for bc, scs in sorted(branch_to_col.items()):
            tap_drv_col = branch_driver_col[bc] + 1
            side = "R" if tap_drv_col < bc else "L" 
            branches.append(dict(branch_col=bc, tap_driver_col=tap_drv_col, tap_side=side, from_col=min(scs), to_col=max(scs)))
        gdb["branches"] = branches
        save_db()
        # Spines
        sp_branch_wires = ["R{}C{}_HPBX0000".format(r, sp_test_col) for r in range(1, max_row)]
        spine_to_branch_row = {}
        sp_info = lapie.get_node_data(cfg.udb, sp_branch_wires)
        for n in sp_info:
            r, c = pos_from_name(n.name)
            vpsx_r = None
            for uh in n.uphill_pips:
                if "VPSX" in uh.from_wire:
                    vpsx_r, vpsx_c = pos_from_name(uh.from_wire)
                    break
            assert vpsx_r is not None
            if vpsx_r not in spine_to_branch_row:
                spine_to_branch_row[vpsx_r] = []
            spine_to_branch_row[vpsx_r].append(r)
        spines = []
        sp_test_row = None
        for sr, brs in sorted(spine_to_branch_row.items()):
            if sp_test_row is None:
                sp_test_row = sr
            spines.append(dict(spine_row=sr, from_row=min(brs), to_row=max(brs)))
        gdb["spines"] = spines
        save_db()
        # HROWs
        hrow_to_spine_col = {}
        spine_wires = ["R{}C{}_VPSX0000".format(sp_test_row, c) for c in sorted(set(branch_driver_col.values()))]
        hr_info = lapie.get_node_data(cfg.udb, spine_wires)
        for n in hr_info:
            r, c = pos_from_name(n.name)
            hrow_c = None
            for uh in n.uphill_pips:
                if "HPRX0" in uh.from_wire:
                    hrow_r, hrow_c = pos_from_name(uh.from_wire)
                    break
            assert hrow_c is not None
            if hrow_c not in hrow_to_spine_col:
                hrow_to_spine_col[hrow_c] = []
            hrow_to_spine_col[hrow_c].append(c)
        hrows = []
        sp_test_row = None
        for hrc, scs in sorted(hrow_to_spine_col.items()):
            hrows.append(dict(hrow_col=hrc, spine_cols=scs))
        gdb["hrows"] = hrows
        save_db()
Exemplo n.º 27
0
def main(argv):
    args = parser.parse_args(argv[1:])
    if not path.exists(args.fld):
        os.mkdir(args.fld)
    commit_hash = database.get_db_commit()
    build_dt = time.strftime('%Y-%m-%d %H:%M:%S')
    docs_toc = ""
    pytrellis.load_database(database.get_db_root())
    for fam, fam_data in sorted(database.get_devices()["families"].items()):
        if fam == "MachXO2":
            continue
        fdir = path.join(args.fld, fam)
        if not path.exists(fdir):
            os.mkdir(fdir)
        thdir = path.join(fdir, "tilehtml")
        if not path.exists(thdir):
            os.mkdir(thdir)
        docs_toc += "<h3>{} Family</h3>".format(fam)
        docs_toc += "<h4>Bitstream Documentation</h4>"
        docs_toc += "<ul>"
        tiles = get_device_tiles(fam, fam_data["devices"])
        for dev, devdata in sorted(fam_data["devices"].items()):
            if devdata["fuzz"]:
                ddir = path.join(fdir, dev)
                if not path.exists(ddir):
                    os.mkdir(ddir)
                print(
                    "********* Generating documentation for device {}".format(
                        dev))
                generate_device_docs(fam, dev, ddir)
                if (fam, dev) in tiles:
                    for tile in tiles[fam, dev]:
                        print(
                            "*** Generating documentation for tile {}".format(
                                tile))
                        generate_tile_docs(fam, dev, tile, thdir)
                docs_toc += '<li><a href="{}">{} Documentation</a></li>'.format(
                    '{}/{}/index.html'.format(fam, dev), dev)

        docs_toc += "</ul>"
        docs_toc += "<h4>Cell Timing Documentation</h4>"
        docs_toc += "<ul>"
        for spgrade in ["6", "7", "8", "8_5G"]:
            tdir = path.join(fdir, "timing")
            if not path.exists(tdir):
                os.mkdir(tdir)
            docs_toc += '<li><a href="{}">Speed Grade -{}</a></li>'.format(
                '{}/timing/cell_timing_{}.html'.format(fam, spgrade), spgrade)
            cell_html.make_cell_timing_html(
                timing_dbs.cells_db_path(fam, spgrade), fam, spgrade,
                path.join(tdir, 'cell_timing_{}.html'.format(spgrade)))
        docs_toc += "</ul>"
        docs_toc += "<h4>Interconnect Timing Documentation</h4>"
        docs_toc += "<ul>"
        for spgrade in ["6", "7", "8", "8_5G"]:
            tdir = path.join(fdir, "timing")
            if not path.exists(tdir):
                os.mkdir(tdir)
            docs_toc += '<li><a href="{}">Speed Grade -{}</a></li>'.format(
                '{}/timing/interconn_timing_{}.html'.format(fam, spgrade),
                spgrade)
            interconnect_html.make_interconn_timing_html(
                timing_dbs.interconnect_db_path(fam, spgrade), fam, spgrade,
                path.join(tdir, 'interconn_timing_{}.html'.format(spgrade)))
        docs_toc += "</ul>"

    index_html = Template(trellis_docs_index).substitute(datetime=build_dt,
                                                         commit=commit_hash,
                                                         docs_toc=docs_toc)
    with open(path.join(args.fld, "index.html"), 'w') as f:
        f.write(index_html)
Exemplo n.º 28
0
def main(argv):
    db = libpyprjoxide.Database(database.get_db_root())
    libpyprjoxide.build_sites(db, "LIFCL-40", "PLC")
Exemplo n.º 29
0
def main():
    db = libpyprjoxide.Database(database.get_db_root())
    libpyprjoxide.copy_db(db, "LIFCL", "EBR_10", ["TRUNK_L_EBR_10", ], "PEWC", "")