Ejemplo n.º 1
0
def run(fnin, fnout=None, strict=False, verbose=False):
    with OpenSafeFile(fnin) as f:
        lines = f.read().split('\n')
    tags = dict()
    bitss = dict()
    for line in lines:
        line = line.strip()
        if line == '':
            continue
        # TODO: figure out what to do with masks
        if line.startswith("bit "):
            continue
        tag, bits, mode, _ = parse_db_line(line)
        if strict:
            if mode != "always":
                assert not mode, "strict: got ill defined line: %s" % (line, )
            if tag in tags:
                print("Original line: %s" % tags[tag], file=sys.stderr)
                print("New line: %s" % line, file=sys.stderr)
                assert 0, "strict: got duplicate tag %s" % (tag, )
            assert bits not in bitss, "strict: got duplicate bits %s: %s %s" % (
                bits, tag, bitss[bits])
        tags[tag] = line
        if bits != None:
            bitss[bits] = tag

    if fnout:
        with OpenSafeFile(fnout, "w") as fout:
            for line in sorted(lines):
                line = line.strip()
                if line == '':
                    continue
                fout.write(line + '\n')
Ejemplo n.º 2
0
def main():

    parser = argparse.ArgumentParser()
    parser.add_argument('--sdfs',
                        nargs='+',
                        type=str,
                        help="List of sdf files to merge")
    parser.add_argument('--site', type=str, help="Site we want to merge")
    parser.add_argument('--json', type=str, help="Debug JSON")
    parser.add_argument('--out', type=str, help="Merged sdf name")

    args = parser.parse_args()

    timings_list = list()

    for sdf in args.sdfs:
        with OpenSafeFile(sdf, 'r') as fp:
            timing = sdfparse.parse(fp.read())
            timings_list.append(timing)

    merged_sdf = merge(timings_list, args.site)
    with OpenSafeFile(args.out, 'w') as fp:
        fp.write(sdfparse.emit(merged_sdf, timescale='1ns'))

    if args.json is not None:
        with OpenSafeFile(args.json, 'w') as fp:
            json.dump(merged_sdf, fp, indent=4, sort_keys=True)
Ejemplo n.º 3
0
    def __init__(self, tile_db):
        self.segbits = {}
        self.ppips = {}
        self.feature_addresses = {}

        if tile_db.ppips is not None:
            with OpenSafeFile(tile_db.ppips) as f:
                self.ppips = read_ppips(f)

        if tile_db.segbits is not None:
            with OpenSafeFile(tile_db.segbits) as f:
                self.segbits[BlockType.CLB_IO_CLK] = read_segbits(f)

        if tile_db.block_ram_segbits is not None:
            with OpenSafeFile(tile_db.block_ram_segbits) as f:
                self.segbits[BlockType.BLOCK_RAM] = read_segbits(f)

        for block_type in self.segbits:
            for feature in self.segbits[block_type]:
                sidx = feature.rfind('[')
                eidx = feature.rfind(']')

                if sidx != -1:
                    assert eidx != -1

                    base_feature = feature[:sidx]

                    if base_feature not in self.feature_addresses:
                        self.feature_addresses[base_feature] = {}

                    self.feature_addresses[base_feature][int(
                        feature[sidx + 1:eidx])] = (block_type, feature)
Ejemplo n.º 4
0
def sort_db_text(n):
    rows = []
    with OpenSafeFile(n) as f:
        for l in f:
            rows.append(([extract_num(s) for s in l.split()], l))

    rows.sort(key=lambda i: i[0])

    with OpenSafeFile(n, 'w') as f:
        for l in rows:
            f.write(l[-1])

    return True
Ejemplo n.º 5
0
def sort_json(filename):
    """Sort a XXX.json file."""

    try:
        with OpenSafeFile(filename) as f:
            d = json.load(f)
    except json.JSONDecodeError as e:
        print(e)
        return False

    with OpenSafeFile(filename, 'w') as f:
        xjson.pprint(f, d)

    return True
Ejemplo n.º 6
0
def write_segbits(file_name, all_tags, all_bits, W):
    """
    Writes solution to a raw database file.

    Parameters
    ----------

    file_name:
        Name of the .rdb file.
    all_tags:
        List of considered tags.
    all_bits:
        List of considered bits.
    W:
        Matrix with binary solution.
    """
    lines = []

    for r in range(W.shape[0]):
        bits = []
        for c in range(W.shape[1]):
            w = W[r, c]
            if w < 0:
                bits.append("!" + all_bits[c])
            if w > 0:
                bits.append(all_bits[c])

        if len(bits) == 0:
            bits = ["<0 candidates>"]

        lines.append(all_tags[r] + " " + " ".join(bits) + "\n")

    with OpenSafeFile(file_name, "w") as fp:
        for line in lines:
            fp.write(line)
Ejemplo n.º 7
0
def load_and_sort_segbits(file_name, tagmap=lambda tag: tag):
    """
    Loads a segbit file (.db or .rdb). Skips bits containing '<' or '>'
    """

    # Load segbits
    segbits = {}
    with OpenSafeFile(file_name, "r") as fp:
        lines = fp.readlines()

        # Parse lines
        for line in lines:
            line = line.strip()
            fields = line.split()

            if len(fields) < 2:
                print("Malformed line: '%s'" % line)
                continue

            # Map name
            feature = tagmap(fields[0])

            # Decode bits
            bits = []
            for bit in fields[1:]:
                if "<" in bit or ">" in bit:
                    continue
                bits.append(parse_bit(bit))

            # Sort bits
            bits.sort(key=lambda bit: (bit[0], bit[1],))
            segbits[feature] = bits

    return segbits
Ejemplo n.º 8
0
    def load_bits(self, bitsfile):
        '''Load self.bits holding the bits that occured in the bitstream'''
        '''
        Format:
        self.bits[base_frame][bit_wordidx] = set()
        Where elements are (bit_frame, bit_wordidx, bit_bitidx))
        bit_frame is a relatively large number forming the FDRI address
        base_frame is a truncated bit_frame address of related FDRI addresses
        0 <= bit_wordidx <= 100
        0 <= bit_bitidx < 31

        Sample bits input
        bit_00020500_000_08
        bit_00020500_000_14
        bit_00020500_000_17
        '''
        self.bits = dict()
        print("Loading bits from %s." % bitsfile)
        with OpenSafeFile(bitsfile, "r") as f:
            for line in f:
                # ex: bit_00020500_000_17
                line = line.split("_")
                bit_frame = int(line[1], 16)
                bit_wordidx = int(line[2], 10)
                bit_bitidx = int(line[3], 10)
                base_frame = bit_frame & ~0x7f

                self.bits.setdefault(base_frame, dict()).setdefault(
                    bit_wordidx, set()).add(
                        (bit_frame, bit_wordidx, bit_bitidx))
        if self.verbose:
            print('Loaded bits: %u bits in %u base frames' %
                  (recurse_sum(self.bits), len(self.bits)))
Ejemplo n.º 9
0
def load_segbits(file_name):
    """
    Loads a segbits file.
    """

    segbits = {}

    with OpenSafeFile(file_name, "r") as fp:
        for line in fp:
            line = line.strip()
            fields = line.split()

            if len(fields) < 2:
                raise RuntimeError("Malformed line: '%s'" % line)

            tag = fields[0]

            if "<" in line or ">" in line:
                segbits[tag] = " ".join(fields[1:])

            else:
                bits = set([parse_bit(bit) for bit in fields[1:]])
                segbits[tag] = bits

    return segbits
Ejemplo n.º 10
0
    def write(self, suffix=None, roi=False, allow_empty=False):
        assert self.segments_by_type, 'No data to write'

        if not allow_empty:
            assert sum([
                len(segments) for segments in self.segments_by_type.values()
            ]) != 0, "Didn't  generate any segments"

        for segtype in self.segments_by_type.keys():
            if suffix is not None:
                filename = "segdata_%s_%s.txt" % (segtype.lower(), suffix)
            else:
                filename = "segdata_%s.txt" % (segtype.lower())

            segments = self.segments_by_type[segtype]
            if segments:
                print("Writing %s." % filename)
                with OpenSafeFile(filename, "w") as f:
                    for segname, segdata in sorted(segments.items()):
                        # seg 00020300_010
                        print("seg %s" % segname, file=f)
                        for bitname in sorted(segdata["bits"]):
                            print("bit %s" % bitname, file=f)
                        for tagname, tagval in sorted(segdata["tags"].items()):
                            print("tag %s %d" % (tagname, tagval), file=f)
Ejemplo n.º 11
0
 def load_grid(self):
     '''Load self.grid holding tile addresses'''
     with OpenSafeFile(
             os.path.join(self.db_root, self.fabric, "tilegrid.json"),
             "r") as f:
         self.grid = json.load(f)
     assert "segments" not in self.grid, "Old format tilegrid.json"
Ejemplo n.º 12
0
def load_tag_groups(file_name):
    """
    Loads tag groups from a text file.

    A tag group is defined by specifying a space separated list of tags within
    a single line. Lines that are empty or start with '#' are ignored.
    """
    tag_groups = []

    # Load tag group specifications
    with OpenSafeFile(file_name, "r") as fp:
        for line in fp:
            line = line.strip()

            if len(line) == 0 or line.startswith("#"):
                continue

            group = set(line.split())
            if len(group):
                tag_groups.append(group)

    # Check if all tag groups are exclusive
    for tag_group_a, tag_group_b in itertools.combinations(tag_groups, 2):

        tags = tag_group_a & tag_group_b
        if len(tags):
            raise RuntimeError(
                "Tag(s) {} are present in multiple groups".format(
                    " ".join(tags)))

    return tag_groups
Ejemplo n.º 13
0
 def load_from_root_csv(self, nodes):
     import pyjson5 as json5
     import progressbar
     for node in progressbar.progressbar(nodes):
         with OpenSafeFile(node) as f:
             node_wires = json5.load(f)
             assert node_wires['node'] not in self.nodes
             self.nodes[node_wires['node']] = node_wires['wires']
Ejemplo n.º 14
0
def process_db(db, tile_type, process, verbose):
    ttdb = db.get_tile_type(tile_type)

    fns = [ttdb.tile_dbs.segbits, ttdb.tile_dbs.ppips]
    verbose and print("process_db(%s): %s" % (tile_type, fns))
    for fn in fns:
        if fn:
            with OpenSafeFile(fn, "r") as f:
                for line in f:
                    process(line)
Ejemplo n.º 15
0
def build_address_map(tilegrid_file):
    """
    Loads the tilegrid and generates a map (baseaddr, offset) -> tile name(s).

    Parameters
    ----------

    tilegrid_file:
        The tilegrid.json file/

    Returns
    -------

    A dict with lists of tile names.

    """

    address_map = {}

    # Load tilegrid
    with OpenSafeFile(tilegrid_file, "r") as fp:
        tilegrid = json.load(fp)

    # Loop over tiles
    for tile_name, tile_data in tilegrid.items():

        # No bits or bits empty
        if "bits" not in tile_data:
            continue
        if not len(tile_data["bits"]):
            continue

        bits = tile_data["bits"]

        # No bus
        if "CLB_IO_CLK" not in bits:
            continue

        bus = bits["CLB_IO_CLK"]

        # Make the address as integers
        baseaddr = int(bus["baseaddr"], 16)
        offset = int(bus["offset"])
        address = (
            baseaddr,
            offset,
        )

        # Add tile to the map
        if address not in address_map:
            address_map[address] = []
        address_map[address].append(tile_name)

    return address_map
Ejemplo n.º 16
0
def main():

    parser = argparse.ArgumentParser()
    parser.add_argument('--json', type=str, help="Input JSON file")
    parser.add_argument('--sdf', type=str, help="SDF files output directory")

    args = parser.parse_args()

    with OpenSafeFile(args.json, 'r') as fp:
        timings = json.load(fp)

    produce_sdf(timings, args.sdf)
Ejemplo n.º 17
0
 def write_frames(self, file_name):
     '''Write configuration data to frames file'''
     frame_stream = StringIO()
     for i in range(len(self.frame_data)):
         if i % 65 == 0:
             frame_stream.write("0x{:08x} ".format(i // 65))
         frame_stream.write("0x{:04x}".format(self.frame_data[i]))
         if i % 65 == 64:
             frame_stream.write("\n")
         elif i < len(self.frame_data) - 1:
             frame_stream.write(",")
     with OpenSafeFile(file_name, "w") as f:
         print(frame_stream.getvalue(), file=f)
Ejemplo n.º 18
0
def bits_to_fasm(db_root, part, bits_file, verbose, canonical):
    db = Database(db_root, part)
    grid = db.grid()
    disassembler = fasm_disassembler.FasmDisassembler(db)

    with OpenSafeFile(bits_file) as f:
        bitdata = bitstream.load_bitdata(f)

    model = fasm.output.merge_and_sort(
        disassembler.find_features_in_bitstream(bitdata, verbose=verbose),
        zero_function=disassembler.is_zero_feature,
        sort_key=grid.tile_key,
    )

    print(fasm.fasm_tuple_to_string(model, canonical=canonical), end='')
Ejemplo n.º 19
0
 def write_frames_txt(self, file_name):
     '''Write frame data in a more readable format'''
     frame_stream = StringIO()
     for i in range(len(self.frame_data)):
         if i % 65 == 0:
             frame_stream.write("\nFrame {:4}\n".format(i // 65))
         #IOB word
         if i % 65 == 32:
             frame_stream.write("\n#{:3}:{:6}\n".format(
                 i % 65, hex(self.frame_data[i])))
         else:
             frame_stream.write("#{:3}:{:6},".format(
                 i % 65, hex(self.frame_data[i])))
     with OpenSafeFile(file_name, "w") as f:
         print(frame_stream.getvalue(), file=f)
Ejemplo n.º 20
0
def read_segbits(segbits_file):
    """
    Loads and parses segbits_*.db file. Returns only segbit names.
    """
    segbits = []

    with OpenSafeFile(segbits_file, "r") as fp:
        for line in fp.readlines():
            line = line.split()
            if len(line) > 1:
                fields = line[0].split(".")
                segbit = ".".join(fields[1:])
                segbits.append(segbit)

    return segbits
Ejemplo n.º 21
0
def read_ppips(ppips_file):
    """
    Loads and parses ppips_*.db file. Returns a dict indexed by PIP name which
    contains their types ("always", "default" or "hint")
    """
    ppips = {}

    with OpenSafeFile(ppips_file, "r") as fp:
        for line in fp.readlines():
            line = line.split()
            if len(line) == 2:
                full_pip_name = line[0].split(".")
                pip_name = ".".join(full_pip_name[1:])
                ppips[pip_name] = line[1]

    return ppips
Ejemplo n.º 22
0
    def __init__(self, db, tile_type, bits_map):
        # Name of tile_type that is using the alias
        self.tile_type = tile_type

        # Name of aliased tile_type
        self.alias_tile_type = None

        # BlockType -> BitAlias map
        self.alias = {}

        self.bits_map = bits_map

        # BlockType -> aliased Bits map
        self.alias_bits_map = {}

        # aliased site name to site name map
        self.sites_rev_map = {}

        for block_type in bits_map:
            self.alias[block_type] = bits_map[block_type].alias
            self.alias_bits_map[block_type] = Bits(
                base_address=bits_map[block_type].base_address,
                frames=bits_map[block_type].frames,
                offset=bits_map[block_type].offset -
                self.alias[block_type].start_offset,
                words=bits_map[block_type].words,
                alias=None,
            )

            if self.alias_tile_type is None:
                self.alias_tile_type = self.alias[block_type].tile_type
            else:
                assert self.alias_tile_type == self.alias[block_type].tile_type

            self.sites_rev_map[block_type] = {}
            for site, alias_site in self.alias[block_type].sites.items():
                assert alias_site not in self.sites_rev_map[block_type]
                self.sites_rev_map[block_type][alias_site] = site

        tile_db = db.tile_types[self.tile_type]
        self.ppips = {}

        if tile_db.ppips is not None:
            with OpenSafeFile(tile_db.ppips) as f:
                self.ppips = read_ppips(f)
        self.tile_segbits = db.get_tile_segbits(self.alias_tile_type)
Ejemplo n.º 23
0
def save_segbits(file_name, segbits):
    """
    Save segbits to a .db or .rdb file
    """

    with OpenSafeFile(file_name, "w") as fp:
        for tag, bits in segbits.items():

            if isinstance(bits, str):
                line = tag + " " + bits

            elif isinstance(bits, set):
                line = tag + " "
                line += " ".join(
                    [bit_to_str(bit) for bit in sorted(list(bits))])

            fp.write(line + "\n")
Ejemplo n.º 24
0
def read_pips_from_tile(tile_file):
    """
    Loads pip definition from a tile type JSON file and returns non-pseudo
    PIP name strings. Names are formatted as <dst_wire>.<src_wire>
    """

    with OpenSafeFile(tile_file, "r") as fp:
        root = json.load(fp)
        pips = root["pips"]

        pip_names = []
        for pip in pips.values():
            if int(pip["is_pseudo"]) == 0:
                pip_names.append(
                    "{}.{}".format(pip["dst_wire"], pip["src_wire"]))

    return pip_names
Ejemplo n.º 25
0
 def __init__(self, file_name, verbose=False):
     self.frame_data = []
     self.idcode = 0
     self.exp_sign = 0
     self.far_min = 0
     self.far_maj = 0
     self.curr_fdri_write_len = 0
     self.curr_crc_check = 0
     self.fdri_in_progress = False
     with OpenSafeFile(file_name, "rb") as f:
         self.bytes = f.read()
     pos, self.header = self.get_header()
     self.body = [
         (i << 8) | j
         for i, j in zip(self.bytes[pos::2], self.bytes[pos + 1::2])
     ]
     self.parse_bitstream(verbose)
Ejemplo n.º 26
0
def run(db_root,
        part,
        bits_file,
        segnames,
        omit_empty_segs=False,
        flag_unknown_bits=False,
        flag_decode_emit=False,
        flag_decode_omit=False,
        bit_only=False,
        verbose=False):
    db = prjxraydb.Database(db_root, part)
    tiles = load_tiles(db_root, part)
    segments = mk_segments(tiles)
    with OpenSafeFile(bits_file) as f:
        bitdata = bitstream.load_bitdata2(f)

    if flag_unknown_bits:
        print_unknown_bits(tiles, bitdata)
        print("")

    # Default: print all
    if segnames:
        for i, segname in enumerate(segnames):
            # Default to common tile config area if tile given without explicit block
            if ':' not in segname:
                segnames[i] = mksegment(segname, 'CLB_IO_CLK')
    else:
        segnames = sorted(tile_segnames(tiles))
    print('Segments: %u' % len(segnames))

    # XXX: previously this was sorted by address, not name
    # revisit?
    for segname in segnames:
        handle_segment(db,
                       segname,
                       bitdata,
                       flag_decode_emit,
                       flag_decode_omit,
                       omit_empty_segs,
                       segments,
                       bit_only=bit_only,
                       verbose=verbose)
Ejemplo n.º 27
0
def build_wire_filter(wire_filter):
    wires_to_include = set()

    with OpenSafeFile(wire_filter) as f:
        for l in f:
            wire = l.strip()
            if not wire:
                continue
            wires_to_include.add(wire)

    def filter_net(net):
        wires_in_net = set()

        for node in net['nodes']:
            for wire in node['wires']:
                wires_in_net.add(wire['name'])

        return len(wires_in_net & wires_to_include) > 0

    return filter_net
Ejemplo n.º 28
0
def read_root_csv(root_dir):
    """ Reads root.csv from raw db directory.

  This should only be used during database generation.

  """
    tiles = {}
    nodes = []

    with OpenSafeFile(os.path.join(root_dir, 'root.csv')) as f:
        for d in csv.DictReader(f):
            if d['filetype'] == 'tile':
                if d['subtype'] not in tiles:
                    tiles[d['subtype']] = []

                tiles[d['subtype']].append(
                    os.path.join(root_dir, d['filename']))
            elif d['filetype'] == 'node':
                nodes.append(os.path.join(root_dir, d['filename']))

    return tiles, nodes
Ejemplo n.º 29
0
def load_just_bits(file_name):
    """
    Read bits from a .db or .rdb file. Ignores tags and bit values.
    """

    with OpenSafeFile(file_name, "r") as fp:
        lines = fp.readlines()

    bits = set()
    for line in lines:
        for word in line.split(" "):
            match = re.match("^(!?)([0-9]+)_([0-9]+)$", word)
            if match is not None:
                frm = int(match.group(2))
                bit = int(match.group(3))

                bits.add((
                    frm,
                    bit,
                ))

    return bits
Ejemplo n.º 30
0
def main(argv):
    if len(argv) != 3:
        print("Usage example: python3 %s HCLK_R HCLK_SW6E3" % sys.argv[0])
        sys.exit(1)

    with OpenSafeFile("%s/%s/tileconn.json" % (os.getenv("XRAY_DATABASE_DIR"),
                                       os.getenv("XRAY_DATABASE")), "r") as f:
        tileconn = json.load(f)

    outdata = list()
    max_tiletype_len = 1

    for entry in tileconn:
        if entry["tile_types"][0] == sys.argv[1]:
            this_idx, other_idx = 0, 1
            delta_x, delta_y = entry["grid_deltas"]
        elif entry["tile_types"][1] == sys.argv[1]:
            this_idx, other_idx = 1, 0
            delta_x, delta_y = -entry["grid_deltas"][0], -entry["grid_deltas"][
                1]
        else:
            continue

        for wire_pair in entry["wire_pairs"]:
            if wire_pair[this_idx] != sys.argv[2]:
                continue

            outdata.append(
                (
                    delta_x, delta_y, entry["tile_types"][other_idx],
                    wire_pair[other_idx]))
            max_tiletype_len = max(
                max_tiletype_len, len(entry["tile_types"][other_idx]))

    for entry in outdata:
        print(
            "%3d %3d  %-*s  %s" %
            (entry[0], entry[1], max_tiletype_len, entry[2], entry[3]))