예제 #1
0
def main():
    parser = argparse.ArgumentParser(
        description=
        "Reduces raw database dump into prototype tiles, grid, and connections."
    )
    parser.add_argument('--root_dir', required=True)
    parser.add_argument('--output_dir', required=True)
    parser.add_argument('--ignore_cache', action='store_true')

    args = parser.parse_args()

    print('{} Reading root.csv'.format(datetime.datetime.now()))
    tiles, nodes = prjxray.lib.read_root_csv(args.root_dir)

    print('{} Loading node<->wire mapping'.format(datetime.datetime.now()))
    database_file = os.path.join(args.output_dir, 'nodes.db')
    if os.path.exists(database_file) and not args.ignore_cache:
        node_lookup = prjxray.node_lookup.NodeLookup(database_file)
    else:
        node_lookup = prjxray.node_lookup.NodeLookup(database_file)
        node_lookup.build_database(nodes=nodes, tiles=tiles)

    site_types = {}

    processes = multiprocessing.cpu_count()
    print('Running {} processes'.format(processes))
    pool = multiprocessing.Pool(processes=processes)

    for tile_type in sorted(tiles.keys()):
        #for tile_type in ['CLBLL_L', 'CLBLL_R', 'CLBLM_L', 'CLBLM_R', 'INT_L', 'INT_L']:
        tile_type_file = os.path.join(args.output_dir,
                                      'tile_type_{}.json'.format(tile_type))
        site_types = {}
        if os.path.exists(tile_type_file):
            print('{} Skip reduced tile for {}'.format(datetime.datetime.now(),
                                                       tile_type))
            continue
        print('{} Generating reduced tile for {}'.format(
            datetime.datetime.now(), tile_type))
        reduced_tile = reduce_tile(pool, site_types, tile_type,
                                   tiles[tile_type], database_file)

        annotate_speed_model(tile_type, reduced_tile, args.root_dir)

        for site_type in site_types:
            with open(
                    os.path.join(
                        args.output_dir,
                        'tile_type_{}_site_type_{}.json'.format(
                            tile_type, site_types[site_type]['type'])),
                    'w') as f:
                json.dump(site_types[site_type], f, indent=2, sort_keys=True)

        reduced_tile['sites'] = sorted(
            reduced_tile['sites'],
            key=lambda site: extract_numbers('{}_{}'.format(
                site['name'], site['prefix'])))

        with open(tile_type_file, 'w') as f:
            json.dump(reduced_tile, f, indent=2, sort_keys=True)
예제 #2
0
def main():
    parser = argparse.ArgumentParser(
        description=
        "Creates design.json from output of ROI generation tcl script.")
    parser.add_argument('--design_txt', required=True)
    parser.add_argument('--design_info_txt', required=True)
    parser.add_argument('--pad_wires', required=True)
    parser.add_argument('--design_fasm', required=True)

    args = parser.parse_args()

    design_json = {}
    design_json['ports'] = []
    design_json['info'] = {}
    with open(args.design_txt) as f:
        for d in csv.DictReader(f, delimiter=' '):
            if d['name'].startswith('dout['):
                d['type'] = 'out'
            elif d['name'].startswith('din['):
                d['type'] = 'in'
            elif d['name'].startswith('clk'):
                d['type'] = 'clk'
            else:
                assert False, d

            design_json['ports'].append(d)

    with open(args.design_info_txt) as f:
        for l in f:
            name, value = l.strip().split(' = ')

            design_json['info'][name] = int(value)

    db = Database(get_db_root(), get_part())
    grid = db.grid()

    roi = Roi(
        db=db,
        x1=design_json['info']['GRID_X_MIN'],
        y1=design_json['info']['GRID_Y_MIN'],
        x2=design_json['info']['GRID_X_MAX'],
        y2=design_json['info']['GRID_Y_MAX'],
    )

    with open(args.pad_wires) as f:
        for l in f:
            parts = l.strip().split(' ')
            name = parts[0]
            pin = parts[1]
            wires = parts[2:]

            wires_outside_roi = []

            for wire in wires:
                tile = wire.split('/')[0]

                loc = grid.loc_of_tilename(tile)

                if not roi.tile_in_roi(loc):
                    wires_outside_roi.append(wire)

            set_port_wires(design_json['ports'], name, pin, wires_outside_roi)

    frames_in_use = set()
    for tile in roi.gen_tiles():
        gridinfo = grid.gridinfo_at_tilename(tile)

        for bit in gridinfo.bits.values():
            frames_in_use.add(bit.base_address)

    required_features = []
    for fasm_line in fasm.parse_fasm_filename(args.design_fasm):
        if fasm_line.annotations:
            for annotation in fasm_line.annotations:
                if annotation.name != 'unknown_segment':
                    continue

                unknown_base_address = int(annotation.value, 0)

                assert False, "Found unknown bit in base address 0x{:08x}".format(
                    unknown_base_address)

        if not fasm_line.set_feature:
            continue

        tile = fasm_line.set_feature.feature.split('.')[0]

        loc = grid.loc_of_tilename(tile)
        gridinfo = grid.gridinfo_at_tilename(tile)

        not_in_roi = not roi.tile_in_roi(loc)

        if not_in_roi:
            required_features.append(fasm_line)

    design_json['required_features'] = sorted(
        fasm.fasm_tuple_to_string(required_features,
                                  canonical=True).split('\n'),
        key=extract_numbers)

    design_json['ports'].sort(key=lambda x: extract_numbers(x['name']))

    xjson.pprint(sys.stdout, design_json)
예제 #3
0
def main():
    parser = argparse.ArgumentParser(
        description=
        "Reduces raw database dump into prototype tiles, grid, and connections."
    )
    parser.add_argument('--root_dir', required=True)
    parser.add_argument('--output_dir', required=True)
    parser.add_argument('--verify_only', action='store_true')
    parser.add_argument('--ignored_wires')
    parser.add_argument('--max_cpu', type=int, default=10)

    args = parser.parse_args()

    tiles, nodes = lib.read_root_csv(args.root_dir)

    processes = min(multiprocessing.cpu_count(), args.max_cpu)
    print('{} Running {} processes'.format(datetime.datetime.now(), processes))
    pool = multiprocessing.Pool(processes=processes)

    node_tree_file = os.path.join(args.output_dir, 'node_tree.json')

    tileconn_file = os.path.join(args.output_dir, 'tileconn.json')
    wire_map_file = os.path.join(args.output_dir, 'wiremap.pickle')

    print('{} Reading tilegrid'.format(datetime.datetime.now()))
    with open(os.path.join(util.get_db_root(), util.get_fabric(),
                           'tilegrid.json')) as f:
        grid = json.load(f)

    if not args.verify_only:
        print('{} Creating tile map'.format(datetime.datetime.now()))
        grid2, wire_map = generate_tilegrid(pool, tiles)

        # Make sure tilegrid from 005-tilegrid matches tilegrid from
        # generate_tilegrid.
        db_grid_keys = set(grid.keys())
        generated_grid_keys = set(grid2.keys())
        assert db_grid_keys == generated_grid_keys, (
            db_grid_keys ^ generated_grid_keys)

        for tile in db_grid_keys:
            for k in grid2[tile]:
                if k == 'ignored':
                    continue

                if k == 'sites' and grid2[tile]['ignored']:
                    continue

                assert k in grid[tile], k
                assert grid[tile][k] == grid2[tile][k], (
                    tile, k, grid[tile][k], grid2[tile][k])

        with open(wire_map_file, 'wb') as f:
            pickle.dump(wire_map, f)

        print('{} Reading node tree'.format(datetime.datetime.now()))
        with open(node_tree_file) as f:
            node_tree = json.load(f)

        print('{} Creating tile connections'.format(datetime.datetime.now()))
        tileconn, raw_node_data = generate_tileconn(
            pool, node_tree, nodes, wire_map, grid)

        for data in tileconn:
            data['wire_pairs'] = tuple(
                sorted(
                    data['wire_pairs'],
                    key=lambda x: tuple(extract_numbers(s) for s in x)))

        tileconn = tuple(
            sorted(
                tileconn, key=lambda x: (x['tile_types'], x['grid_deltas'])))

        print('{} Writing tileconn'.format(datetime.datetime.now()))
        with open(tileconn_file, 'w') as f:
            json.dump(tileconn, f, indent=2, sort_keys=True)
    else:
        with open(wire_map_file, 'rb') as f:
            wire_map = pickle.load(f)

        print('{} Reading raw_node_data'.format(datetime.datetime.now()))
        raw_node_data = []
        with progressbar.ProgressBar(max_value=len(nodes)) as bar:
            for idx, node in enumerate(pool.imap_unordered(
                    read_json5,
                    nodes,
                    chunksize=20,
            )):
                bar.update(idx)
                raw_node_data.append(node)
                bar.update(idx + 1)

        print('{} Reading tileconn'.format(datetime.datetime.now()))
        with open(tileconn_file) as f:
            tileconn = json.load(f)

    wire_nodes_file = os.path.join(args.output_dir, 'wire_nodes.pickle')
    if os.path.exists(wire_nodes_file) and args.verify_only:
        with open(wire_nodes_file, 'rb') as f:
            wire_nodes = pickle.load(f)
    else:
        print(
            "{} Connecting wires to verify tileconn".format(
                datetime.datetime.now()))
        wire_nodes = connect_wires(grid, tileconn, wire_map)
        with open(wire_nodes_file, 'wb') as f:
            pickle.dump(wire_nodes, f)

    print('{} Verifing tileconn'.format(datetime.datetime.now()))
    error_nodes = []
    lib.verify_nodes(
        [
            (node['node'], tuple(wire['wire']
                                 for wire in node['wires']))
            for node in raw_node_data
        ], wire_nodes, error_nodes)

    if len(error_nodes) > 0:
        error_nodes_file = os.path.join(args.output_dir, 'error_nodes.json')
        with open(error_nodes_file, 'w') as f:
            json.dump(error_nodes, f, indent=2, sort_keys=True)

        ignored_wires = []
        ignored_wires_file = args.ignored_wires
        if os.path.exists(ignored_wires_file):
            with open(ignored_wires_file) as f:
                ignored_wires = set(l.strip() for l in f)

        if not lib.check_errors(error_nodes, ignored_wires):
            print(
                '{} errors detected, see {} for details.'.format(
                    len(error_nodes), error_nodes_file))
            sys.exit(1)
        else:
            print(
                '{} errors ignored because of {}\nSee {} for details.'.format(
                    len(error_nodes), ignored_wires_file, error_nodes_file))