Exemplo n.º 1
0
def patch_fasm_with_mem(initfile, fasmfile, outfile, width, depth):
    fasm_tuples = fasm.parse_fasm_filename(fasmfile)
    mem_tuples = fasm.parse_fasm_filename('memfasm.fasm')
    merged_tuples = merge_tuples(fasm_tuples, mem_tuples)
    with open(outfile, 'w') as out:
        out.write(fasm.fasm_tuple_to_string(merged_tuples))
    import os
    os.remove('memfasm.fasm')
Exemplo n.º 2
0
    def parse_fasm_filename(self, filename):
        missing_features = []
        for line in fasm.parse_fasm_filename(filename):
            if not line.set_feature:
                continue

            line_strs = tuple(fasm.fasm_line_to_string(line))
            assert len(line_strs) == 1
            line_str = line_strs[0]

            parts = line.set_feature.feature.split('.')
            tile = parts[0]
            feature = '.'.join(parts[1:])

            # canonical_features flattens multibit feature enables to only
            # single bit features, which is what enable_feature expects.
            #
            # canonical_features also filters out features that are not enabled,
            # which are no-ops.
            for flat_set_feature in fasm.canonical_features(line.set_feature):
                address = 0
                if flat_set_feature.start is not None:
                    address = flat_set_feature.start

                try:
                    self.enable_feature(tile, feature, address, line_str)
                except FasmLookupError as e:
                    missing_features.append(str(e))

        if missing_features:
            raise FasmLookupError('\n'.join(missing_features))
Exemplo n.º 3
0
    def parse_fasm_filename(self, filename, extra_features=[]):
        missing_features = []
        for line in fasm.parse_fasm_filename(filename):
            self.add_fasm_line(line, missing_features)

        for line in extra_features:
            self.add_fasm_line(line, missing_features)

        if missing_features:
            raise FasmLookupError('\n'.join(missing_features))
Exemplo n.º 4
0
    def test_comment_file(self):
        result = list(fasm.parse_fasm_filename(example('comment.fasm')))
        self.assertEqual(result, [
            fasm.FasmLine(
                set_feature=None,
                annotations=None,
                comment=' Only a comment.',
            )
        ])

        check_round_trip(self, result)
Exemplo n.º 5
0
def main():
    parser = argparse.ArgumentParser()

    util.db_root_arg(parser)
    util.part_arg(parser)

    parser.add_argument('input')

    args = parser.parse_args()

    db = Database(args.db_root, args.part)

    grid = db.grid()

    base_address_to_tiles = {}

    for tile in grid.tiles():
        gridinfo = grid.gridinfo_at_tilename(tile)
        if BlockType.CLB_IO_CLK in gridinfo.bits:
            base_address = gridinfo.bits[BlockType.CLB_IO_CLK].base_address
            if base_address not in base_address_to_tiles:
                base_address_to_tiles[base_address] = []
            base_address_to_tiles[base_address].append(
                (tile, gridinfo.bits[BlockType.CLB_IO_CLK]))

    for line in fasm.parse_fasm_filename(args.input):
        is_unknown = False

        annotation_data = {}
        for annotation in line.annotations:
            annotation_data[annotation.name] = annotation.value

        if 'unknown_bit' not in annotation_data:
            continue

        base_address = int(annotation_data['unknown_segment'], 0)
        frame_offset = int(annotation_data['unknown_segbit'].split('_')[0])
        bit = int(annotation_data['unknown_segbit'].split('_')[1])
        offset = bit // 16

        if base_address not in base_address_to_tiles:
            print('# No tile for base address')
        else:
            for tile, bits in base_address_to_tiles[base_address]:
                if offset >= bits.offset and offset - bits.offset < bits.words:
                    print('# {} : {:02d}_{:02d}'.format(
                        tile, frame_offset,
                        bit - bitstream.WORD_SIZE_BITS * bits.offset))

        for l in fasm.fasm_line_to_string(line):
            print(l)
Exemplo n.º 6
0
    def inner():
        for line in fasm.parse_fasm_filename(args.fn_in):
            if not line.set_feature:
                continue

            parts = line.set_feature.feature.split('.')
            tile = parts[0]
            gridinfo = grid.gridinfo_at_tilename(tile)

            tile_type = database.get_tile_type(gridinfo.tile_type)

            for pip in tile_type.pips:
                if pip.net_from == parts[2] and pip.net_to == parts[1]:
                    yield '{}/{}'.format(tile, pip.name)
Exemplo n.º 7
0
    def test_one_line_feature(self):
        result = list(fasm.parse_fasm_filename(example('feature_only.fasm')))
        self.assertEqual(result, [
            fasm.FasmLine(
                set_feature=fasm.SetFasmFeature(
                    feature='EXAMPLE_FEATURE.X0.Y0.BLAH',
                    start=None,
                    end=None,
                    value=1,
                    value_format=None,
                ),
                annotations=None,
                comment=None,
            )
        ])

        self.assertEqual(fasm.fasm_tuple_to_string(result),
                         'EXAMPLE_FEATURE.X0.Y0.BLAH\n')
        check_round_trip(self, result)
Exemplo n.º 8
0
def process_fasm(db_root, part, fasm_file, canonical):
    database = db.Database(db_root, part)
    grid = database.grid()

    for fasm_line in fasm.parse_fasm_filename(fasm_file):
        if not fasm_line.set_feature:
            if not canonical:
                yield fasm_line

        for feature in fasm.canonical_features(fasm_line.set_feature):
            parts = feature.feature.split('.')
            tile = parts[0]

            gridinfo = grid.gridinfo_at_tilename(tile)
            tile_segbits = database.get_tile_segbits(gridinfo.tile_type)

            address = 0
            if feature.start is not None:
                address = feature.start

            feature_name = '{}.{}'.format(gridinfo.tile_type,
                                          '.'.join(parts[1:]))

            # Convert feature to bits.  If no bits are set, feature is
            # psuedo pip, and should not be output from canonical FASM.
            bits = tuple(
                tile_segbits.feature_to_bits(feature_name, address=address))
            if len(bits) == 0 and canonical:
                continue

            # In canonical output, only output the canonical features.
            if canonical:
                yield fasm.FasmLine(
                    set_feature=feature,
                    annotations=None,
                    comment=None,
                )

        # If not in canonical mode, output original FASM line
        if not canonical:
            yield fasm_line
Exemplo n.º 9
0
def run():
    """
    Main.
    """

    # Parse arguments
    parser = argparse.ArgumentParser()
    parser.add_argument("--design",
                        type=str,
                        required=True,
                        help="Design JSON file")
    parser.add_argument("--fasm",
                        type=str,
                        required=True,
                        help="Decoded fasm file")
    parser.add_argument("-o",
                        type=str,
                        default="results.csv",
                        help="Output CSV file")
    parser.add_argument("-j", type=str, default=None, help="Output JSON file")

    args = parser.parse_args()

    # Load IOB features
    features = load_iob_segbits()

    # Load the design data
    with open(args.design, "r") as fp:
        design = json.load(fp)

    # Load disassembled fasm
    fasm_tuples = fasm.parse_fasm_filename(args.fasm)
    set_features = fasm.fasm_tuple_to_string(fasm_tuples).split("\n")

    # Correlate features for given IOB types
    results = []
    for region in design:
        result = dict(region["iosettings"])

        for l in ["input", "output", "inout", "unused_sites"]:

            # TODO: Check if this is true eg. for all unused sites, not just
            # one random site.
            tile, site = random.choice(region[l]).split(".")
            matches = correlate_features(features, tile, site, set_features)

            result[l] = matches

        results.append(result)

    # Save results
    if args.j:
        with open(args.j, "w") as fp:
            json.dump(results, fp, indent=2, sort_keys=True)

    # Save results to CSV
    with open(args.o, "w") as fp:
        csv_data = defaultdict(lambda: {})

        # Collect data
        for result in results:
            iostandard = result["iostandard"]
            drive = result["drive"]
            slew = result["slew"]

            if drive is None:
                drive = "_FIXED"

            iosettings = "{}.I{}.{}".format(iostandard, drive, slew)

            is_diff = "DIFF" in iostandard

            for feature in sorted(features):
                I = [f[1] for f in result["input"] if f[0] == feature and f[1]]
                O = [
                    f[1] for f in result["output"] if f[0] == feature and f[1]
                ]
                T = [f[1] for f in result["inout"] if f[0] == feature and f[1]]
                U = [
                    f[1] for f in result["unused_sites"]
                    if f[0] == feature and f[1]
                ]

                s = "".join([
                    "I" if len(I) > 0 else "",
                    "O" if len(O) > 0 else "",
                    "T" if len(T) > 0 else "",
                    "U" if len(U) > 0 else "",
                ])

                csv_data[iosettings][feature] = s

        # Write header
        line = ["iosettings"] + sorted(features)
        fp.write(",".join(line) + "\n")

        # Write data
        for iosettings in sorted(csv_data.keys()):
            data = csv_data[iosettings]
            line = [iosettings
                    ] + [data[feature] for feature in sorted(features)]

            fp.write(",".join(line) + "\n")
Exemplo n.º 10
0
def main():
    parser = argparse.ArgumentParser(
        description=
        "Creates design.json from output of ROI generation tcl script.")
    parser.add_argument('--design_txt', required=True)
    parser.add_argument('--design_info_txt', required=True)
    parser.add_argument('--pad_wires', required=True)
    parser.add_argument('--design_fasm', required=True)

    args = parser.parse_args()

    design_json = {}
    design_json['ports'] = []
    design_json['info'] = {}
    with open(args.design_txt) as f:
        for d in csv.DictReader(f, delimiter=' '):
            if d['name'].startswith('dout['):
                d['type'] = 'out'
            elif d['name'].startswith('din['):
                d['type'] = 'in'
            elif d['name'].startswith('clk'):
                d['type'] = 'clk'
            else:
                assert False, d

            design_json['ports'].append(d)

    with open(args.design_info_txt) as f:
        for l in f:
            name, value = l.strip().split(' = ')

            design_json['info'][name] = int(value)

    db = Database(get_db_root(), get_part())
    grid = db.grid()

    roi = Roi(
        db=db,
        x1=design_json['info']['GRID_X_MIN'],
        y1=design_json['info']['GRID_Y_MIN'],
        x2=design_json['info']['GRID_X_MAX'],
        y2=design_json['info']['GRID_Y_MAX'],
    )

    with open(args.pad_wires) as f:
        for l in f:
            parts = l.strip().split(' ')
            name = parts[0]
            pin = parts[1]
            wires = parts[2:]

            wires_outside_roi = []

            for wire in wires:
                tile = wire.split('/')[0]

                loc = grid.loc_of_tilename(tile)

                if not roi.tile_in_roi(loc):
                    wires_outside_roi.append(wire)

            set_port_wires(design_json['ports'], name, pin, wires_outside_roi)

    frames_in_use = set()
    for tile in roi.gen_tiles():
        gridinfo = grid.gridinfo_at_tilename(tile)

        for bit in gridinfo.bits.values():
            frames_in_use.add(bit.base_address)

    required_features = []
    for fasm_line in fasm.parse_fasm_filename(args.design_fasm):
        if fasm_line.annotations:
            for annotation in fasm_line.annotations:
                if annotation.name != 'unknown_segment':
                    continue

                unknown_base_address = int(annotation.value, 0)

                assert False, "Found unknown bit in base address 0x{:08x}".format(
                    unknown_base_address)

        if not fasm_line.set_feature:
            continue

        tile = fasm_line.set_feature.feature.split('.')[0]

        loc = grid.loc_of_tilename(tile)
        gridinfo = grid.gridinfo_at_tilename(tile)

        not_in_roi = not roi.tile_in_roi(loc)

        if not_in_roi:
            required_features.append(fasm_line)

    design_json['required_features'] = sorted(
        fasm.fasm_tuple_to_string(required_features,
                                  canonical=True).split('\n'),
        key=extract_numbers)

    design_json['ports'].sort(key=lambda x: extract_numbers(x['name']))

    xjson.pprint(sys.stdout, design_json)
Exemplo n.º 11
0
 def test_examples_file(self):
     result = list(fasm.parse_fasm_filename(example('many.fasm')))
     check_round_trip(self, result)
Exemplo n.º 12
0
    # Load data from the database
    with open(args.vpr_db, "rb") as fp:
        db = pickle.load(fp)

    f2b = Fasm2Bels(db, args.package_name)

    if args.input_type == 'bitstream':
        qlfasmdb = load_quicklogic_database()
        assembler = QL732BAssembler(qlfasmdb)
        assembler.read_bitstream(args.input_file)
        fasmlines = assembler.disassemble()
        fasmlines = [
            line for line in fasm.parse_fasm_string('\n'.join(fasmlines))
        ]
    else:
        fasmlines = [
            line for line in fasm.parse_fasm_filename(args.input_file)
        ]

    verilog, pcf, qcf = f2b.convert_to_verilog(fasmlines)

    with open(args.output_verilog, 'w') as outv:
        outv.write(verilog)
    if args.output_pcf:
        with open(args.output_pcf, 'w') as outpcf:
            outpcf.write(pcf)
    if args.output_qcf:
        with open(args.output_qcf, 'w') as outqcf:
            outqcf.write(qcf)
Exemplo n.º 13
0
def get_fasm_tups(fname):
    fasm_tuples = [tup for tup in fasm.parse_fasm_filename(fname)]
    return fasm_tuples
Exemplo n.º 14
0
def main():
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument(
        '--connection_database',
        required=True,
        help="Path to SQLite3 database for given FASM file part.")
    parser.add_argument(
        '--db_root',
        required=True,
        help="Path to prjxray database for given FASM file part.")
    parser.add_argument('--allow_orphan_sinks',
                        action='store_true',
                        help="Allow sinks to have no connection.")
    parser.add_argument(
        '--prune-unconnected-ports',
        action='store_true',
        help="Prune top-level I/O ports that are not connected to any logic.")
    parser.add_argument(
        '--iostandard_defs',
        help=
        "Specify a JSON file defining IOSTANDARD and DRIVE parameters for each IOB site"
    )
    parser.add_argument('--fasm_file',
                        help="FASM file to convert BELs and routes.",
                        required=True)
    parser.add_argument('--bit_file',
                        help="Bitstream file to convert to FASM.")
    parser.add_argument(
        '--bitread',
        help="Path to bitread executable, required if --bit_file is provided.")
    parser.add_argument(
        '--part',
        help="Name of part being targeted, required if --bit_file is provided."
    )
    parser.add_argument('--top', default="top", help="Root level module name.")
    parser.add_argument('--pcf', help="Mapping of top-level pins to pads.")
    parser.add_argument('--route_file', help="VPR route output file.")
    parser.add_argument('--rr_graph', help="Real or virt xc7 graph")
    parser.add_argument('--eblif', help="EBLIF file used to generate design")
    parser.add_argument('verilog_file', help="Filename of output verilog file")
    parser.add_argument('tcl_file', help="Filename of output tcl script.")

    args = parser.parse_args()

    conn = sqlite3.connect('file:{}?mode=ro'.format(args.connection_database),
                           uri=True)

    db = prjxray.db.Database(args.db_root)
    grid = db.grid()

    if args.bit_file:
        bit2fasm(args.db_root, db, grid, args.bit_file, args.fasm_file,
                 args.bitread, args.part)

    tiles = {}

    maybe_get_wire = create_maybe_get_wire(conn)

    top = Module(db, grid, conn, name=args.top)
    if args.pcf:
        top.set_site_to_signal(load_io_sites(args.db_root, args.part,
                                             args.pcf))

    if args.route_file:
        assert args.rr_graph
        net_map = load_net_list(conn, args.rr_graph, args.route_file)
        top.set_net_map(net_map)

    if args.eblif:
        with open(args.eblif) as f:
            parsed_eblif = eblif.parse_blif(f)

        top.add_to_cname_map(parsed_eblif)

    for fasm_line in fasm.parse_fasm_filename(args.fasm_file):
        if not fasm_line.set_feature:
            continue

        parts = fasm_line.set_feature.feature.split('.')
        tile = parts[0]

        if tile not in tiles:
            tiles[tile] = []

        tiles[tile].append(fasm_line.set_feature)

        if len(parts) == 3:
            maybe_add_pip(top, maybe_get_wire, fasm_line.set_feature)

    if args.iostandard_defs:
        with open(args.iostandard_defs) as fp:
            defs = json.load(fp)
            top.set_iostandard_defs(defs)

    for tile, tile_features in tiles.items():
        process_tile(top, tile, tile_features)

    top.make_routes(allow_orphan_sinks=args.allow_orphan_sinks)

    if args.prune_unconnected_ports:
        top.prune_unconnected_ports()

    with open(args.verilog_file, 'w') as f:
        for l in top.output_verilog():
            print(l, file=f)

    with open(args.tcl_file, 'w') as f:
        for l in top.output_bel_locations():
            print(l, file=f)

        for l in top.output_nets():
            print(l, file=f)
Exemplo n.º 15
0
def main():
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument(
        '--connection_database',
        required=True,
        help="Path to SQLite3 database for given FASM file part.")
    parser.add_argument(
        '--db_root',
        required=True,
        help="Path to prjxray database for given FASM file part.")
    parser.add_argument('--allow_orphan_sinks',
                        action='store_true',
                        help="Allow sinks to have no connection.")
    parser.add_argument(
        '--prune-unconnected-ports',
        action='store_true',
        help="Prune top-level I/O ports that are not connected to any logic.")
    parser.add_argument('--fasm_file',
                        help="FASM file to convert BELs and routes.",
                        required=True)
    parser.add_argument('--bit_file',
                        help="Bitstream file to convert to FASM.")
    parser.add_argument(
        '--bitread',
        help="Path to bitread executable, required if --bit_file is provided.")
    parser.add_argument(
        '--part',
        help="Name of part being targeted, required if --bit_file is provided."
    )
    parser.add_argument(
        '--allow-non-dedicated-clk-routes',
        action='store_true',
        help="Effectively sets CLOCK_DEDICATED_ROUTE to FALSE on all nets.")
    parser.add_argument('--iostandard',
                        default=None,
                        help="Default IOSTANDARD to use for IO buffers.")
    parser.add_argument('--drive',
                        type=int,
                        default=None,
                        help="Default DRIVE to use for IO buffers.")
    parser.add_argument('--top', default="top", help="Root level module name.")
    parser.add_argument('--pcf', help="Mapping of top-level pins to pads.")
    parser.add_argument('--route_file', help="VPR route output file.")
    parser.add_argument('--rr_graph', help="Real or virt xc7 graph")
    parser.add_argument(
        '--vpr_capnp_schema_dir',
        help='Directory container VPR schema files',
    )
    parser.add_argument('--eblif', help="EBLIF file used to generate design")
    parser.add_argument('verilog_file', help="Filename of output verilog file")
    parser.add_argument('tcl_file', help="Filename of output tcl script.")

    args = parser.parse_args()

    conn = sqlite3.connect('file:{}?mode=ro'.format(args.connection_database),
                           uri=True)

    db = prjxray.db.Database(args.db_root, args.part)
    grid = db.grid()

    if args.bit_file:
        bit2fasm(args.db_root, db, grid, args.bit_file, args.fasm_file,
                 args.bitread, args.part)

    tiles = {}

    maybe_get_wire = create_maybe_get_wire(conn)

    top = Module(db, grid, conn, name=args.top)
    if args.pcf:
        top.set_site_to_signal(load_io_sites(args.db_root, args.part,
                                             args.pcf))

    if args.route_file:
        assert args.rr_graph
        assert args.vpr_capnp_schema_dir
        net_map = load_net_list(conn, args.vpr_capnp_schema_dir, args.rr_graph,
                                args.route_file)
        top.set_net_map(net_map)

    if args.part:
        with open(os.path.join(args.db_root, args.part, 'part.json')) as f:
            part_data = json.load(f)
            top.set_io_banks(part_data['iobanks'])

    if args.eblif:
        with open(args.eblif) as f:
            parsed_eblif = eblif.parse_blif(f)

        top.add_to_cname_map(parsed_eblif)
        top.make_iosettings_map(parsed_eblif)

    top.set_default_iostandard(args.iostandard, args.drive)

    for fasm_line in fasm.parse_fasm_filename(args.fasm_file):
        if not fasm_line.set_feature:
            continue

        set_feature = process_set_feature(fasm_line.set_feature)

        parts = set_feature.feature.split('.')
        tile = parts[0]

        if tile not in tiles:
            tiles[tile] = []

        tiles[tile].append(set_feature)

        if len(parts) == 3:
            maybe_add_pip(top, maybe_get_wire, set_feature)

    for tile, tile_features in tiles.items():
        process_tile(top, tile, tile_features)

    # Check if the PS7 is present in the tilegrid. If so then insert it.
    pss_tile, ps7_site = get_ps7_site(db)
    if pss_tile is not None and ps7_site is not None:

        # First load the PS7 ports
        fname = os.path.join(args.db_root, "ps7_ports.json")
        with open(fname, "r") as fp:
            ps7_ports = json.load(fp)

        # Insert the PS7
        insert_ps7(top, pss_tile, ps7_site, ps7_ports)

    top.make_routes(allow_orphan_sinks=args.allow_orphan_sinks)

    if args.prune_unconnected_ports:
        top.prune_unconnected_ports()

    if args.allow_non_dedicated_clk_routes:
        top.add_extra_tcl_line(
            "set_property CLOCK_DEDICATED_ROUTE FALSE [get_nets]")

    with open(args.verilog_file, 'w') as f:
        for line in top.output_verilog():
            print(line, file=f)

    with open(args.tcl_file, 'w') as f:
        for line in top.output_bel_locations():
            print(line, file=f)

        for line in top.output_nets():
            print(line, file=f)

        for line in top.output_disabled_drcs():
            print(line, file=f)

        for line in top.output_extra_tcl():
            print(line, file=f)
Exemplo n.º 16
0
def main():
    fasm_file = 'top.fasm'
    fasm_model = list(parse_fasm_filename(fasm_file))

    unknown_bits = {
        'HCLK_IOI': {},
        'IOI3': {},
    }

    total_unknown = 0
    for l in fasm_model:
        if l.annotations is None:
            continue

        annotations = {}
        for annotation in l.annotations:
            annotations[annotation.name] = annotation.value

        if 'unknown_bit' not in annotations:
            continue

        total_unknown += 1

        frame, word, bit = annotations['unknown_bit'].split('_')

        frame = int(frame, 16)
        word = int(word)
        bit = int(bit)

        frame_offset = frame % 0x80
        base_frame = frame - frame_offset

        # All remaining LiteX bits appear to be in this one IO bank, so limit
        # the tool this this one IO bank.
        assert base_frame == 0x00401580, hex(frame)

        SIZE = 4
        INITIAL_OFFSET = -2

        if word == 50:
            group = 'HCLK_IOI'
            offset = 45
        elif word < 50:
            group = 'IOI3'
            offset = ((word - INITIAL_OFFSET) // SIZE) * SIZE + INITIAL_OFFSET
        else:
            group = 'IOI3'
            word -= 1
            offset = ((word - INITIAL_OFFSET) // SIZE) * SIZE + INITIAL_OFFSET
            offset += 1
            word += 1

        bit = '{}_{:02d}'.format(
            frame_offset,
            (word - offset) * 32 + bit,
        )

        if bit not in unknown_bits[group]:
            unknown_bits[group][bit] = 0
        unknown_bits[group][bit] += 1

    print('Total unknown bits: {}'.format(total_unknown))
    for group in unknown_bits:
        print('Group {} (count = {}):'.format(group, len(unknown_bits[group])))
        for bit in sorted(unknown_bits[group]):
            print('  {} (count = {})'.format(bit, unknown_bits[group][bit]))
Exemplo n.º 17
0
    def test_blank_file(self):
        result = list(fasm.parse_fasm_filename(example('blank.fasm')))
        self.assertEqual(result, [])

        check_round_trip(self, result)
Exemplo n.º 18
0
def main():
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument(
        '--connection_database',
        required=True,
        help="Path to SQLite3 database for given FASM file part.")
    parser.add_argument(
        '--db_root',
        required=True,
        help="Path to prjxray database for given FASM file part.")
    parser.add_argument('--allow_orphan_sinks',
                        action='store_true',
                        help="Allow sinks to have no connection.")
    parser.add_argument(
        '--prune-unconnected-ports',
        action='store_true',
        help="Prune top-level I/O ports that are not connected to any logic.")
    parser.add_argument('--fasm_file',
                        help="FASM file to convert BELs and routes.",
                        required=True)
    parser.add_argument('--bit_file',
                        help="Bitstream file to convert to FASM.")
    parser.add_argument(
        '--bitread',
        help="Path to bitread executable, required if --bit_file is provided.")
    parser.add_argument(
        '--part',
        help="Name of part being targeted, required if --bit_file is provided."
    )
    parser.add_argument(
        '--allow-non-dedicated-clk-routes',
        action='store_true',
        help="Effectively sets CLOCK_DEDICATED_ROUTE to FALSE on all nets.")
    parser.add_argument('--iostandard',
                        default=None,
                        help="Default IOSTANDARD to use for IO buffers.")
    parser.add_argument('--drive',
                        type=int,
                        default=None,
                        help="Default DRIVE to use for IO buffers.")
    parser.add_argument('--top', default="top", help="Root level module name.")
    parser.add_argument('--pcf',
                        help="Mapping of top-level pins to pads, PCF format.")
    parser.add_argument('--input_xdc',
                        help="Mapping of top-level pints to pads, XDC format.")
    parser.add_argument('--route_file', help="VPR route output file.")
    parser.add_argument('--rr_graph', help="Real or virt xc7 graph")
    parser.add_argument('--vpr_capnp_schema_dir',
                        help="VPR capnp schemas directory.")
    parser.add_argument('--eblif', help="EBLIF file used to generate design")
    parser.add_argument('--vpr_grid_map',
                        help="VPR grid to Canonical grid map")
    parser.add_argument('--verilog_file',
                        help="Filename of output verilog file")
    parser.add_argument('--xdc_file',
                        help="Filename of output xdc constraints file.")
    parser.add_argument(
        '--logical_netlist',
        help="Filename of output interchange logical netlist capnp.")
    parser.add_argument(
        '--physical_netlist',
        help="Filename of output interchange physical netlist capnp.")
    parser.add_argument('--interchange_xdc',
                        help="Filename of output interchange XDC.")
    parser.add_argument(
        '--interchange_capnp_schema_dir',
        help="Folder containing interchange capnp definitions.")

    args = parser.parse_args()

    if not os.path.exists(
            os.path.join(os.path.realpath(__file__),
                         args.connection_database)):
        create_channels(args.db_root, args.part, args.connection_database)

    conn = sqlite3.connect('file:{}?mode=ro'.format(args.connection_database),
                           uri=True)

    db = prjxray.db.Database(args.db_root, args.part)
    grid = db.grid()

    if args.bit_file:
        bit2fasm(args.db_root, db, grid, args.bit_file, args.fasm_file,
                 args.bitread, args.part)

    tiles = {}

    top = Module(db, grid, conn, name=args.top)
    if args.eblif:
        with open(args.eblif) as f:
            parsed_eblif = eblif.parse_blif(f)
    else:
        parsed_eblif = None

    if args.eblif or args.pcf or args.input_xdc:
        top.set_site_to_signal(
            load_io_sites(args.db_root, args.part, args.pcf, args.input_xdc,
                          parsed_eblif, top))

    if args.route_file:
        assert args.rr_graph, "RR graph file required."
        assert args.vpr_grid_map, "VPR grid map required."
        assert args.vpr_capnp_schema_dir, "VPR capnp schemas dir path required."

        grid_map = dict()
        with open(args.vpr_grid_map, 'r') as csv_grid_map:
            csv_reader = csv.DictReader(csv_grid_map)
            for row in csv_reader:
                vpr_x = int(row['vpr_x'])
                vpr_y = int(row['vpr_y'])
                can_x = int(row['canon_x'])
                can_y = int(row['canon_y'])

                if (vpr_x, vpr_y) in grid_map:
                    grid_map[(vpr_x, vpr_y)].append((can_x, can_y))
                else:
                    grid_map[(vpr_x, vpr_y)] = [(can_x, can_y)]

        net_map = load_net_list(conn, args.vpr_capnp_schema_dir, args.rr_graph,
                                args.route_file, grid_map)
        top.set_net_map(net_map)

    if args.part:
        with open(os.path.join(args.db_root, args.part, 'part.json')) as f:
            part_data = json.load(f)
            top.set_io_banks(part_data['iobanks'])

    if args.eblif:
        top.add_to_cname_map(parsed_eblif)
        top.make_iosettings_map(parsed_eblif)

    top.set_default_iostandard(args.iostandard, args.drive)

    for fasm_line in fasm.parse_fasm_filename(args.fasm_file):
        if not fasm_line.set_feature:
            continue

        set_feature = process_set_feature(fasm_line.set_feature)

        parts = set_feature.feature.split('.')
        tile = parts[0]

        if tile not in tiles:
            tiles[tile] = []

        tiles[tile].append(set_feature)

        if len(parts) == 3 and set_feature.value == 1:
            top.maybe_add_pip(set_feature.feature)

    for tile, tile_features in tiles.items():
        process_tile(top, tile, tile_features)

    # Check if the PS7 is present in the tilegrid. If so then insert it.
    pss_tile, ps7_site = get_ps7_site(db)
    if pss_tile is not None and ps7_site is not None:

        # First load the PS7 ports
        fname = os.path.join(args.db_root, "ps7_ports.json")
        with open(fname, "r") as fp:
            ps7_ports = json.load(fp)

        # Insert the PS7
        insert_ps7(top, pss_tile, ps7_site, ps7_ports)

    top.make_routes(allow_orphan_sinks=args.allow_orphan_sinks)

    if args.prune_unconnected_ports:
        top.prune_unconnected_ports()

    # IBUF IOSTANDARDS are checked here, after routing and pruning,
    # as we don't need to issue IOSTANDARD warnings/errors for
    # removed IBUFs (eg the PUDC pin)
    ibufs_append_iostandard_params(top)

    if args.allow_non_dedicated_clk_routes:
        top.add_extra_tcl_line(
            "set_property CLOCK_DEDICATED_ROUTE FALSE [get_nets]")

    if args.verilog_file:
        assert args.xdc_file
        with open(args.verilog_file, 'w') as f:
            for line in top.output_verilog():
                print(line, file=f)

        with open(args.xdc_file, 'w') as f:
            for line in top.output_bel_locations():
                print(line, file=f)

            for line in top.output_nets():
                print(line, file=f)

            for line in top.output_disabled_drcs():
                print(line, file=f)

            for line in top.output_extra_tcl():
                print(line, file=f)

    if args.logical_netlist:
        assert args.physical_netlist
        assert args.interchange_capnp_schema_dir
        assert args.part

        with open(args.logical_netlist, 'wb') as f_log, open(
                args.physical_netlist,
                'wb') as f_phys, open(args.interchange_xdc, 'w') as f_xdc:
            output_interchange(top, args.interchange_capnp_schema_dir,
                               args.part, f_log, f_phys, f_xdc)