Exemple #1
0
def bits_to_fasm(db_root, part, bits_file, verbose, canonical,
                 suppress_zero_features):
    db = Database(db_root, part)
    grid = db.grid()
    disassembler = fasm_disassembler.FasmDisassembler(db)

    with open(bits_file) as f:
        bitdata = bitstream.load_bitdata(f, bitstream.WORD_SIZE_BITS)

    model = fasm.output.merge_and_sort(
        disassembler.find_features_in_bitstream(bitdata, verbose=verbose),
        zero_function=disassembler.is_zero_feature,
        sort_key=grid.tile_key,
    )

    if suppress_zero_features:
        output_lines = []

        for line in model:
            if line.set_feature is None:
                output_lines.append(line)
            elif not disassembler.is_zero_feature(line.set_feature.feature):
                output_lines.append(line)

        print(fasm.fasm_tuple_to_string(output_lines, canonical=canonical),
              end='')
    else:
        print(fasm.fasm_tuple_to_string(model, canonical=canonical), end='')
def bit2fasm(db_root, db, grid, bit_file, fasm_file, bitread, part):
    """ Convert bitstream to FASM file. """
    part_yaml = os.path.join(db_root, '{}.yaml'.format(part))
    with tempfile.NamedTemporaryFile() as f:
        bits_file = f.name
        subprocess.check_output(
            '{} --part_file {} -o {} -z -y {}'.format(
                bitread, part_yaml, bits_file, bit_file
            ),
            shell=True
        )

        disassembler = fasm_disassembler.FasmDisassembler(db)

        with open(bits_file) as f:
            bitdata = bitstream.load_bitdata(f)

    model = fasm.output.merge_and_sort(
        disassembler.find_features_in_bitstream(bitdata, verbose=True),
        zero_function=disassembler.is_zero_feature,
        sort_key=grid.tile_key,
    )

    with open(fasm_file, 'w') as f:
        print(
            fasm.fasm_tuple_to_string(model, canonical=False), end='', file=f
        )
def patch_fasm_with_mem(initfile, fasmfile, outfile, width, depth):
    fasm_tuples = fasm.parse_fasm_filename(fasmfile)
    mem_tuples = fasm.parse_fasm_filename('memfasm.fasm')
    merged_tuples = merge_tuples(fasm_tuples, mem_tuples)
    with open(outfile, 'w') as out:
        out.write(fasm.fasm_tuple_to_string(merged_tuples))
    import os
    os.remove('memfasm.fasm')
Exemple #4
0
def bits_to_fasm(db_root, bits_file, verbose, canonical):
    disassembler = fasm_disassembler.FasmDisassembler(db.Database(db_root))

    with open(bits_file) as f:
        bitdata = bitstream.load_bitdata(f)

    print(
        fasm.fasm_tuple_to_string(disassembler.find_features_in_bitstream(
            bitdata, verbose=verbose),
                                  canonical=canonical))
Exemple #5
0
def bits_to_fasm(db_root, bits_file, verbose, canonical):
    db = Database(db_root)
    grid = db.grid()
    disassembler = fasm_disassembler.FasmDisassembler(db)

    with open(bits_file) as f:
        bitdata = bitstream.load_bitdata(f)

    model = fasm.output.merge_and_sort(
        disassembler.find_features_in_bitstream(bitdata, verbose=verbose),
        zero_function=disassembler.is_zero_feature,
        sort_key=grid.tile_key,
    )

    print(fasm.fasm_tuple_to_string(model, canonical=canonical), end='')
Exemple #6
0
def main():
    myargs = parse_args()
    assert myargs.fasm is not None
    assert myargs.init is not None
    assert myargs.width is not None
    assert myargs.depth is not None
    fasm_to_patch = myargs.fasm
    new_init = myargs.init
    width = int(myargs.width)
    depth = int(myargs.depth)
    outfile = myargs.outfile

    # init_data = read_meminit(fname=new_init)
    fasm_tups = read_fasm(fasm_to_patch)
    # cleared_tups = fasmread.clear_init(fasm_tups)
    # in_use_tiles = fasmread.get_in_use_tiles(fasm_tups)

    # memfasm_tups = initdata_to_memfasm(init_data=init_data, tileorder=in_use_tiles,
    #                                    width=width, depth=depth, write_per_block=1, memfasm_name=f'{DIRECTORY}/mem.fasm')
    # merged = merge_tuples(cleared_tups=cleared_tups, mem_tups=memfasm_tups)

    # with open(outfile, 'w+') as out:
    #     out.write(fasm.fasm_tuple_to_string(merged))

    # print(f'Patched {outfile} successfully (probably)')
    # mem_from_reinit = f'{"/".join(new_init.split("/")[0:-1])}/init_frm_reinit.txt'
    # os.system(
    #     f'python3 ../meminit/fasmchange.py -extract_mem -infile {outfile} -outfile {mem_from_reinit}')
    # print(f'Memory extracted from {outfile} to {mem_from_reinit}')
    # print()

    # sorted_tiles = fasmread.get_sorted_tiledata(fasm_tups)
    # print()
    # rw_widths = fasmread.get_rw_widths(sorted_tiles)
    # for tile, width in rw_widths.items():
    #     print(f'{tile}: {width}')
    # print()
    # print()

    memfasm = initfile_to_memfasm(infile=new_init,
                                  fasm_tups=fasm_tups,
                                  memfasm_name=f'{DIRECTORY}/mem.fasm',
                                  width=width,
                                  depth=depth)
    cleared_tups = fasmread.clear_init(fasm_tups)
    merged = merge_tuples(cleared_tups=cleared_tups, mem_tups=memfasm)
    with open(outfile, 'w+') as out:
        out.write(fasm.fasm_tuple_to_string(merged))
Exemple #7
0
    def test_one_line_feature(self):
        result = list(fasm.parse_fasm_filename(example('feature_only.fasm')))
        self.assertEqual(result, [
            fasm.FasmLine(
                set_feature=fasm.SetFasmFeature(
                    feature='EXAMPLE_FEATURE.X0.Y0.BLAH',
                    start=None,
                    end=None,
                    value=1,
                    value_format=None,
                ),
                annotations=None,
                comment=None,
            )
        ])

        self.assertEqual(fasm.fasm_tuple_to_string(result),
                         'EXAMPLE_FEATURE.X0.Y0.BLAH\n')
        check_round_trip(self, result)
Exemple #8
0
def main():
    parser = argparse.ArgumentParser('FASM tool')
    parser.add_argument('file', help='Filename to process')
    parser.add_argument('--canonical',
                        action='store_true',
                        help='Return canonical form of FASM.')
    parser.add_argument(
        '--parser',
        type=nullable_string,
        help='Select FASM parser to use. '
        'Default is to choose the best implementation available.')

    args = parser.parse_args()

    try:
        fasm_parser = get_fasm_parser(args.parser)
        fasm_tuples = fasm_parser.parse_fasm_filename(args.file)
        print(fasm_tuple_to_string(fasm_tuples, args.canonical))
    except Exception as e:
        print('Error: ' + str(e))
Exemple #9
0
def run(db_root, part, fasm_file, canonical):
    print(
        fasm.fasm_tuple_to_string(process_fasm(db_root, part, fasm_file,
                                               canonical),
                                  canonical=canonical))
def print_tiledata(datatups, outfile):
    with open(outfile, 'w') as f:
        datatups = fasm.output.merge_and_sort(datatups)
        f.write(fasm.fasm_tuple_to_string(datatups))
Exemple #11
0
def run():
    """
    Main.
    """

    # Parse arguments
    parser = argparse.ArgumentParser()
    parser.add_argument("--design",
                        type=str,
                        required=True,
                        help="Design JSON file")
    parser.add_argument("--fasm",
                        type=str,
                        required=True,
                        help="Decoded fasm file")
    parser.add_argument("-o",
                        type=str,
                        default="results.csv",
                        help="Output CSV file")
    parser.add_argument("-j", type=str, default=None, help="Output JSON file")

    args = parser.parse_args()

    # Load IOB features
    features = load_iob_segbits()

    # Load the design data
    with open(args.design, "r") as fp:
        design = json.load(fp)

    # Load disassembled fasm
    fasm_tuples = fasm.parse_fasm_filename(args.fasm)
    set_features = fasm.fasm_tuple_to_string(fasm_tuples).split("\n")

    # Correlate features for given IOB types
    results = []
    for region in design:
        result = dict(region["iosettings"])

        for l in ["input", "output", "inout", "unused_sites"]:

            # TODO: Check if this is true eg. for all unused sites, not just
            # one random site.
            tile, site = random.choice(region[l]).split(".")
            matches = correlate_features(features, tile, site, set_features)

            result[l] = matches

        results.append(result)

    # Save results
    if args.j:
        with open(args.j, "w") as fp:
            json.dump(results, fp, indent=2, sort_keys=True)

    # Save results to CSV
    with open(args.o, "w") as fp:
        csv_data = defaultdict(lambda: {})

        # Collect data
        for result in results:
            iostandard = result["iostandard"]
            drive = result["drive"]
            slew = result["slew"]

            if drive is None:
                drive = "_FIXED"

            iosettings = "{}.I{}.{}".format(iostandard, drive, slew)

            is_diff = "DIFF" in iostandard

            for feature in sorted(features):
                I = [f[1] for f in result["input"] if f[0] == feature and f[1]]
                O = [
                    f[1] for f in result["output"] if f[0] == feature and f[1]
                ]
                T = [f[1] for f in result["inout"] if f[0] == feature and f[1]]
                U = [
                    f[1] for f in result["unused_sites"]
                    if f[0] == feature and f[1]
                ]

                s = "".join([
                    "I" if len(I) > 0 else "",
                    "O" if len(O) > 0 else "",
                    "T" if len(T) > 0 else "",
                    "U" if len(U) > 0 else "",
                ])

                csv_data[iosettings][feature] = s

        # Write header
        line = ["iosettings"] + sorted(features)
        fp.write(",".join(line) + "\n")

        # Write data
        for iosettings in sorted(csv_data.keys()):
            data = csv_data[iosettings]
            line = [iosettings
                    ] + [data[feature] for feature in sorted(features)]

            fp.write(",".join(line) + "\n")
def main():
    parser = argparse.ArgumentParser(
        description=
        "Creates design.json from output of ROI generation tcl script.")
    parser.add_argument('--design_txt', required=True)
    parser.add_argument('--design_info_txt', required=True)
    parser.add_argument('--pad_wires', required=True)
    parser.add_argument('--design_fasm', required=True)

    args = parser.parse_args()

    design_json = {}
    design_json['ports'] = []
    design_json['info'] = {}
    with open(args.design_txt) as f:
        for d in csv.DictReader(f, delimiter=' '):
            if d['name'].startswith('dout['):
                d['type'] = 'out'
            elif d['name'].startswith('din['):
                d['type'] = 'in'
            elif d['name'].startswith('clk'):
                d['type'] = 'clk'
            else:
                assert False, d

            design_json['ports'].append(d)

    with open(args.design_info_txt) as f:
        for l in f:
            name, value = l.strip().split(' = ')

            design_json['info'][name] = int(value)

    db = Database(get_db_root(), get_part())
    grid = db.grid()

    roi = Roi(
        db=db,
        x1=design_json['info']['GRID_X_MIN'],
        y1=design_json['info']['GRID_Y_MIN'],
        x2=design_json['info']['GRID_X_MAX'],
        y2=design_json['info']['GRID_Y_MAX'],
    )

    with open(args.pad_wires) as f:
        for l in f:
            parts = l.strip().split(' ')
            name = parts[0]
            pin = parts[1]
            wires = parts[2:]

            wires_outside_roi = []

            for wire in wires:
                tile = wire.split('/')[0]

                loc = grid.loc_of_tilename(tile)

                if not roi.tile_in_roi(loc):
                    wires_outside_roi.append(wire)

            set_port_wires(design_json['ports'], name, pin, wires_outside_roi)

    frames_in_use = set()
    for tile in roi.gen_tiles():
        gridinfo = grid.gridinfo_at_tilename(tile)

        for bit in gridinfo.bits.values():
            frames_in_use.add(bit.base_address)

    required_features = []
    for fasm_line in fasm.parse_fasm_filename(args.design_fasm):
        if fasm_line.annotations:
            for annotation in fasm_line.annotations:
                if annotation.name != 'unknown_segment':
                    continue

                unknown_base_address = int(annotation.value, 0)

                assert False, "Found unknown bit in base address 0x{:08x}".format(
                    unknown_base_address)

        if not fasm_line.set_feature:
            continue

        tile = fasm_line.set_feature.feature.split('.')[0]

        loc = grid.loc_of_tilename(tile)
        gridinfo = grid.gridinfo_at_tilename(tile)

        not_in_roi = not roi.tile_in_roi(loc)

        if not_in_roi:
            required_features.append(fasm_line)

    design_json['required_features'] = sorted(
        fasm.fasm_tuple_to_string(required_features,
                                  canonical=True).split('\n'),
        key=extract_numbers)

    design_json['ports'].sort(key=lambda x: extract_numbers(x['name']))

    xjson.pprint(sys.stdout, design_json)
Exemple #13
0
def check_round_trip(test, result):
    s = fasm.fasm_tuple_to_string(result)
    test.assertEqual(list(fasm.parse_fasm_string(s)), result)
def write_fasm(outfile, merged_tups):
    with open(outfile, 'w+') as out:
        out.write(fasm.fasm_tuple_to_string(merged_tups))