예제 #1
0
def run(json_in_fn, json_out_fn, verbose=False):
    # Load input files
    database = json.load(open(json_in_fn, "r"))
    tiles_by_grid = make_tiles_by_grid(database)

    tile_frames_map = localutil.TileFrames()
    propagate_INT_lr_bits(database,
                          tiles_by_grid,
                          tile_frames_map,
                          verbose=verbose)
    propagate_INT_bits_in_column(database, tiles_by_grid, tile_frames_map)
    propagate_INT_INTERFACE_bits_in_column(database, tiles_by_grid,
                                           "GTP_INT_INTERFACE",
                                           tile_frames_map)
    propagate_INT_INTERFACE_bits_in_column(database, tiles_by_grid,
                                           "PCIE_INT_INTERFACE",
                                           tile_frames_map)
    propagate_rebuf(database, tiles_by_grid)
    propagate_IOB_SING(database, tiles_by_grid)
    propagate_IOI_SING(database, tiles_by_grid)
    propagate_IOI_Y9(database, tiles_by_grid)
    alias_HCLKs(database)

    # Save
    xjson.pprint(open(json_out_fn, "w"), database)
예제 #2
0
def run(tiles_fn, json_fn, verbose=False):
    # Load input files
    tiles = load_tiles(tiles_fn)

    # Index input
    database = make_database(tiles)

    # Save
    xjson.pprint(open(json_fn, 'w'), database)
예제 #3
0
def run(json_in_fn, json_out_fn, verbose=False):
    # Load input files
    database = json.load(open(json_in_fn, "r"))
    tiles_by_grid = make_tiles_by_grid(database)

    propagate_INT_lr_bits(database, tiles_by_grid, verbose=verbose)
    propagate_INT_bits_in_column(database, tiles_by_grid)

    # Save
    xjson.pprint(open(json_out_fn, "w"), database)
예제 #4
0
def run(tiles_fn, pin_func_fn, json_fn, verbose=False):
    # Load input files
    tiles = load_tiles(tiles_fn)

    # Read site map
    pin_func = load_pin_functions(pin_func_fn)

    # Index input
    database = make_database(tiles, pin_func)

    # Save
    xjson.pprint(open(json_fn, 'w'), database)
예제 #5
0
def sort_json(filename):
    """Sort a XXX.json file."""
    try:
        d = json.load(open(filename))
    except json.JSONDecodeError as e:
        print(e)
        return False

    with open(filename, 'w') as f:
        xjson.pprint(f, d)

    return True
예제 #6
0
def main():
    parser = argparse.ArgumentParser(
        description=
        "Reduces raw database dump into prototype tiles, grid, and connections."
    )
    parser.add_argument('--root_dir', required=True)
    parser.add_argument('--output_dir', required=True)
    parser.add_argument('--ignore_cache', action='store_true')

    args = parser.parse_args()

    print('{} Reading root.csv'.format(datetime.datetime.now()))
    tiles, nodes = prjxray.lib.read_root_csv(args.root_dir)

    print('{} Loading node<->wire mapping'.format(datetime.datetime.now()))
    node_lookup = prjxray.lib.NodeLookup()
    node_lookup_file = os.path.join(args.output_dir, 'nodes.pickle')
    if os.path.exists(node_lookup_file) and not args.ignore_cache:
        node_lookup.load_from_file(node_lookup_file)
    else:
        node_lookup.load_from_root_csv(nodes)
        node_lookup.save_to_file(node_lookup_file)

    site_types = {}

    processes = min(multiprocessing.cpu_count(), 10)
    print('Running {} processes'.format(processes))
    pool = multiprocessing.Pool(processes=processes)

    for tile_type in sorted(tiles.keys()):
        #for tile_type in ['CLBLL_L', 'CLBLL_R', 'CLBLM_L', 'CLBLM_R', 'INT_L', 'INT_L']:
        tile_type_file = os.path.join(args.output_dir,
                                      'tile_type_{}.json'.format(tile_type))
        site_types = {}
        if os.path.exists(tile_type_file):
            print('{} Skip reduced tile for {}'.format(datetime.datetime.now(),
                                                       tile_type))
            continue
        print('{} Generating reduced tile for {}'.format(
            datetime.datetime.now(), tile_type))
        reduced_tile = reduce_tile(pool, site_types, tile_type,
                                   tiles[tile_type], node_lookup)
        for site_type in site_types:
            with open(
                    os.path.join(
                        args.output_dir,
                        'tile_type_{}_site_type_{}.json'.format(
                            tile_type, site_types[site_type]['type'])),
                    'w') as f:
                xjson.pprint(f, site_types[site_type])

        with open(tile_type_file, 'w') as f:
            xjson.pprint(f, reduced_tile)
예제 #7
0
def run(json_in_fn, json_out_fn, verbose=False):
    # Load input files
    database = json.load(open(json_in_fn, "r"))
    tiles_by_grid = make_tiles_by_grid(database)

    tile_frames_map = localutil.TileFrames()
    propagate_INT_bits_in_column(database, tiles_by_grid, tile_frames_map)
    propagate_PS8_INTF_bits_in_column(database, tiles_by_grid, tile_frames_map)
    propagate_RCLK_bits_in_row(database, tiles_by_grid, tile_frames_map)
    propagate_XIPHY_bits_in_column(database, tiles_by_grid, tile_frames_map)

    # Save
    xjson.pprint(open(json_out_fn, "w"), database)
예제 #8
0
def main():
    parser = argparse.ArgumentParser(
        description="Reduces per tile site types to generic site types.")
    parser.add_argument('--output_dir', required=True)

    args = parser.parse_args()

    SITE_TYPE = re.compile('^tile_type_(.+)_site_type_(.+)\.json$')
    site_types = {}
    for path in os.listdir(args.output_dir):
        match = SITE_TYPE.fullmatch(path)
        if match is None:
            continue

        site_type = match.group(2)
        if site_type not in site_types:
            site_types[site_type] = []

        site_types[site_type].append(path)

    for site_type in site_types:
        proto_site_type = None
        for instance in site_types[site_type]:
            with open(os.path.join(args.output_dir, instance)) as f:
                instance_site_type = json.load(f)

                for site_pin in instance_site_type['site_pins'].values():
                    if 'index_in_site' in site_pin:
                        del site_pin['index_in_site']

            if proto_site_type is None:
                proto_site_type = instance_site_type
            else:
                prjxray.lib.compare_prototype_site(
                    proto_site_type,
                    instance_site_type,
                )

        with open(
                os.path.join(args.output_dir,
                             'site_type_{}.json'.format(site_type)), 'w') as f:
            xjson.pprint(f, proto_site_type)
예제 #9
0
def main():
    site_pins = json5.load(sys.stdin)

    output_site_pins = {}
    output_site_pins["tile_type"] = site_pins["tile_type"]
    output_site_pins["sites"] = copy.deepcopy(site_pins["sites"])

    site_pin_to_wires = create_site_pin_to_wire_maps(
        site_pins['tile_name'], site_pins['nodes'])
    min_x_coord, min_y_coord = find_origin_coordinate(site_pins['sites'])

    for site in output_site_pins['sites']:
        orig_site_name = site['name']
        coordinate = SITE_COORDINATE_PATTERN.match(orig_site_name)

        x_coord = int(coordinate.group(2))
        y_coord = int(coordinate.group(3))
        site['name'] = 'X{}Y{}'.format(
            x_coord - min_x_coord, y_coord - min_y_coord)
        site['prefix'] = coordinate.group(1)
        site['x_coord'] = x_coord - min_x_coord
        site['y_coord'] = y_coord - min_y_coord

        for site_pin in site['site_pins']:
            assert site_pin['name'].startswith(orig_site_name + '/')
            if site_pin['name'] in site_pin_to_wires:
                site_pin['wire'] = site_pin_to_wires[site_pin['name']]
            else:
                print(
                    (
                        '***WARNING***: Site pin {} for tile type {} is not connected, '
                        'make sure all instaces of this tile type has this site_pin '
                        'disconnected.').format(
                            site_pin['name'], site_pins['tile_type']),
                    file=sys.stderr)

            site_pin['name'] = site_pin['name'][len(orig_site_name) + 1:]

    xjson.pprint(sys.stdout, output_site_pins)
예제 #10
0
def tojson(f):
    d = load(f)
    o = io.StringIO()
    xjson.pprint(o, d)
    return o.getvalue()
예제 #11
0
def main():
    parser = argparse.ArgumentParser(
        description=
        "Reduces raw database dump into prototype tiles, grid, and connections."
    )
    parser.add_argument('--root_dir', required=True)
    parser.add_argument('--output_dir', required=True)
    parser.add_argument('--verify_only', action='store_true')
    parser.add_argument('--ignored_wires')

    args = parser.parse_args()

    tiles, nodes = lib.read_root_csv(args.root_dir)

    processes = min(multiprocessing.cpu_count(), 10)
    print('{} Running {} processes'.format(datetime.datetime.now(), processes))
    pool = multiprocessing.Pool(processes=processes)

    node_tree_file = os.path.join(args.output_dir, 'node_tree.json')

    tileconn_file = os.path.join(args.output_dir, 'tileconn.json')
    wire_map_file = os.path.join(args.output_dir, 'wiremap.pickle')

    print('{} Reading tilegrid'.format(datetime.datetime.now()))
    with open(os.path.join(util.get_db_root(), util.get_part(),
                           'tilegrid.json')) as f:
        grid = json.load(f)

    if not args.verify_only:
        print('{} Creating tile map'.format(datetime.datetime.now()))
        grid2, wire_map = generate_tilegrid(pool, tiles)

        # Make sure tilegrid from 005-tilegrid matches tilegrid from
        # generate_tilegrid.
        db_grid_keys = set(grid.keys())
        generated_grid_keys = set(grid2.keys())
        assert db_grid_keys == generated_grid_keys, (
            db_grid_keys ^ generated_grid_keys)

        for tile in db_grid_keys:
            for k in grid2[tile]:
                assert k in grid[tile], k
                assert grid[tile][k] == grid2[tile][k], (
                    tile, k, grid[tile][k], grid2[tile][k])

        with open(wire_map_file, 'wb') as f:
            pickle.dump(wire_map, f)

        print('{} Reading node tree'.format(datetime.datetime.now()))
        with open(node_tree_file) as f:
            node_tree = json.load(f)

        print('{} Creating tile connections'.format(datetime.datetime.now()))
        tileconn, raw_node_data = generate_tileconn(
            pool, node_tree, nodes, wire_map, grid)

        print('{} Writing tileconn'.format(datetime.datetime.now()))
        with open(tileconn_file, 'w') as f:
            xjson.pprint(f, tileconn)
    else:
        with open(wire_map_file, 'rb') as f:
            wire_map = pickle.load(f)

        print('{} Reading raw_node_data'.format(datetime.datetime.now()))
        raw_node_data = []
        with progressbar.ProgressBar(max_value=len(nodes)) as bar:
            for idx, node in enumerate(pool.imap_unordered(
                    read_json5,
                    nodes,
                    chunksize=20,
            )):
                bar.update(idx)
                raw_node_data.append(node)
                bar.update(idx + 1)

        print('{} Reading tileconn'.format(datetime.datetime.now()))
        with open(tileconn_file) as f:
            tileconn = json.load(f)

    wire_nodes_file = os.path.join(args.output_dir, 'wire_nodes.pickle')
    if os.path.exists(wire_nodes_file) and args.verify_only:
        with open(wire_nodes_file, 'rb') as f:
            wire_nodes = pickle.load(f)
    else:
        print(
            "{} Connecting wires to verify tileconn".format(
                datetime.datetime.now()))
        wire_nodes = connect_wires(grid, tileconn, wire_map)
        with open(wire_nodes_file, 'wb') as f:
            pickle.dump(wire_nodes, f)

    print('{} Verifing tileconn'.format(datetime.datetime.now()))
    error_nodes = []
    lib.verify_nodes(
        [
            (node['node'], tuple(wire['wire']
                                 for wire in node['wires']))
            for node in raw_node_data
        ], wire_nodes, error_nodes)

    if len(error_nodes) > 0:
        error_nodes_file = os.path.join(args.output_dir, 'error_nodes.json')
        with open(error_nodes_file, 'w') as f:
            xjson.pprint(f, error_nodes)

        ignored_wires = []
        ignored_wires_file = args.ignored_wires
        if os.path.exists(ignored_wires_file):
            with open(ignored_wires_file) as f:
                ignored_wires = set(l.strip() for l in f)

        if not lib.check_errors(error_nodes, ignored_wires):
            print(
                '{} errors detected, see {} for details.'.format(
                    len(error_nodes), error_nodes_file))
            sys.exit(1)
        else:
            print(
                '{} errors ignored because of {}\nSee {} for details.'.format(
                    len(error_nodes), ignored_wires_file, error_nodes_file))
예제 #12
0
def main():
    parser = argparse.ArgumentParser(description="")
    parser.add_argument('--dump_all_root_dir', required=True)
    parser.add_argument('--ordered_wires_root_dir', required=True)
    parser.add_argument('--output_dir', required=True)

    args = parser.parse_args()

    downhill_wires = os.path.join(args.ordered_wires_root_dir,
                                  'downhill_wires.txt')
    uphill_wires = os.path.join(args.ordered_wires_root_dir,
                                'uphill_wires.txt')

    assert os.path.exists(downhill_wires)
    assert os.path.exists(uphill_wires)

    print('{} Reading root.csv'.format(datetime.datetime.now()))
    tiles, nodes = prjxray.lib.read_root_csv(args.dump_all_root_dir)

    print('{} Loading node<->wire mapping'.format(datetime.datetime.now()))
    node_lookup = prjxray.lib.NodeLookup()
    node_lookup_file = os.path.join(args.output_dir, 'nodes.pickle')
    if os.path.exists(node_lookup_file):
        node_lookup.load_from_file(node_lookup_file)
    else:
        node_lookup.load_from_root_csv(nodes)
        node_lookup.save_to_file(node_lookup_file)

    wire_index_file = os.path.join(args.output_dir, 'wire_index.pickle')
    if os.path.exists(wire_index_file):
        print('{} Reading wire<->node index'.format(datetime.datetime.now()))
        with open(wire_index_file, 'rb') as f:
            wire_index = pickle.load(f)

        downhill_wire_node_index = wire_index['downhill']
        uphill_wire_node_index = wire_index['uphill']
    else:
        print('{} Creating wire<->node index'.format(datetime.datetime.now()))
        downhill_wire_node_index = build_node_index(downhill_wires)
        uphill_wire_node_index = build_node_index(uphill_wires)

        with open(wire_index_file, 'wb') as f:
            pickle.dump(
                {
                    'downhill': downhill_wire_node_index,
                    'uphill': uphill_wire_node_index,
                }, f)

    print('{} Creating node tree'.format(datetime.datetime.now()))
    nodes = collections.OrderedDict()
    for node in progressbar.progressbar(sorted(node_lookup.nodes)):
        nodes[node] = create_ordered_wires_for_node(
            node, tuple(wire['wire'] for wire in node_lookup.nodes[node]),
            tuple(
                read_node(
                    node, downhill_wires, downhill_wire_node_index[node]
                    if node in downhill_wire_node_index else [])),
            tuple(
                read_node(
                    node, uphill_wires, uphill_wire_node_index[node]
                    if node in uphill_wire_node_index else [])))

    print('{} Writing node tree'.format(datetime.datetime.now()))
    with open(os.path.join(args.output_dir, 'node_tree.json'), 'w') as f:
        xjson.pprint(f, nodes)