コード例 #1
0
def main():

    parser = argparse.ArgumentParser()
    parser.add_argument('--sdfs',
                        nargs='+',
                        type=str,
                        help="List of sdf files to merge")
    parser.add_argument('--site', type=str, help="Site we want to merge")
    parser.add_argument('--json', type=str, help="Debug JSON")
    parser.add_argument('--out', type=str, help="Merged sdf name")

    args = parser.parse_args()

    timings_list = list()

    for sdf in args.sdfs:
        with open(sdf, 'r') as fp:
            timing = sdfparse.parse(fp.read())
            timings_list.append(timing)

    merged_sdf = merge(timings_list, args.site)
    open(args.out, 'w').write(sdfparse.emit(merged_sdf, timescale='1ns'))

    if args.json is not None:
        with open(args.json, 'w') as fp:
            json.dump(merged_sdf, fp, indent=4, sort_keys=True)
コード例 #2
0
def load_sdf_timings(sdf_dir):
    """
    Loads and merges SDF timing data from all *.sdf files in the given
    directory.
    """

    def apply_scale(cells, scale=1.0):
        """
        Scales all timings represented by the given SDF structure.
        """
        for cell_type, cell_data in cells.items():
            for instance, instance_data in cell_data.items():
                for timing, timing_data in instance_data.items():
                    paths = timing_data["delay_paths"]
                    for path_name, path_data in paths.items():

                        for k in path_data.keys():
                            if path_data[k] is not None:
                                path_data[k] *= scale

    # List SDF files
    files = [f for f in os.listdir(sdf_dir) if f.lower().endswith(".sdf")]

    # Read and parse
    cell_timings = {}

    for f in files:
        print("Loading SDF: '{}'".format(f))

        # Read
        fname = os.path.join(sdf_dir, f)
        with open(fname, "r") as fp:
            sdf = sdfparse.parse(fp.read())

            # Get the timing scale
            header = sdf["header"]
            if "timescale" in header:
                timescale = get_scale_seconds(header["timescale"])
            else:
                print("WARNING: the SDF has no timescale, assuming 1.0")
                timescale = 1.0

            # Apply the scale and update cells
            cells = sdf["cells"]
            apply_scale(cells, timescale)

            cell_timings.update(cells)

    return cell_timings
コード例 #3
0
def main():
    parser = argparse.ArgumentParser()

    parser.add_argument('--input_arch',
                        required=True,
                        help="Input arch.xml file")
    parser.add_argument('--sdf_dir', required=True, help="SDF files directory")
    parser.add_argument('--out_arch',
                        required=True,
                        help="Output arch.xml file")
    parser.add_argument('--bels_map',
                        required=True,
                        help="VPR <-> timing info bels mapping json file")

    args = parser.parse_args()

    arch_xml = ET.ElementTree()
    root_element = arch_xml.parse(args.input_arch)

    # read bels json
    import json
    with open(args.bels_map, 'r') as fp:
        bels = json.load(fp)

    timings = dict()
    files = os.listdir(args.sdf_dir)
    for f in files:
        if not f.endswith('.sdf'):
            continue
        with open(args.sdf_dir + '/' + f, 'r') as fp:
            tmp = sdfparse.parse(fp.read())
            mergedicts(tmp, timings)

    with open("/tmp/dump.json", 'w') as fp:
        json.dump(timings, fp, indent=4)

    for dm in root_element.iter('delay_matrix'):
        if dm.attrib['type'] == 'max':
            bel_timings = get_bel_timings(dm, timings, bels, 'SLOW', 'max')
        elif dm.attrib['type'] == 'min':
            bel_timings = get_bel_timings(dm, timings, bels, 'FAST', 'min')
        else:
            assert dm.attrib['type']

        if bel_timings is None:
            continue

        dm.text = dm.text.format(**bel_timings)

    for dc in root_element.iter('delay_constant'):
        format_s = dc.attrib['max']
        max_tim = get_bel_timings(dc, timings, bels, 'SLOW', 'max')
        if max_tim is not None:
            dc.attrib['max'] = format_s.format(**max_tim)

        min_tim = get_bel_timings(dc, timings, bels, 'FAST', 'min')
        if min_tim is not None:
            dc.attrib['min'] = format_s.format(**min_tim)

    for tq in root_element.iter('T_clock_to_Q'):
        format_s = tq.attrib['max']
        max_tim = get_bel_timings(tq, timings, bels, 'SLOW', 'max')
        if max_tim is not None:
            tq.attrib['max'] = format_s.format(**max_tim)

        min_tim = get_bel_timings(tq, timings, bels, 'FAST', 'min')
        if min_tim is not None:
            tq.attrib['min'] = format_s.format(**min_tim)

    for ts in root_element.iter('T_setup'):
        bel_timings = get_bel_timings(ts, timings, bels, 'SLOW', 'max')
        if bel_timings is None:
            continue
        ts.attrib['value'] = ts.attrib['value'].format(**bel_timings)

    for th in root_element.iter('T_hold'):
        bel_timings = get_bel_timings(th, timings, bels, 'FAST', 'min')
        if bel_timings is None:
            continue
        th.attrib['value'] = th.attrib['value'].format(**bel_timings)

    with open(args.out_arch, 'wb') as fp:
        fp.write(ET.tostring(arch_xml))
コード例 #4
0
def test_parse_generated():
    for s in generated_sdfs:
        sdfparse.parse(s)
コード例 #5
0
def test_parse():
    files = sorted(os.listdir(datafiles_path))
    for f in files:
        if f.endswith('.sdf'):
            with open(datafiles_path + f) as sdffile:
                parsed_sdfs.append(sdfparse.parse(sdffile.read()))
コード例 #6
0
ファイル: carry4delays.py プロジェクト: tmichalak/prjxray
def compute_delays(model, fin_name, fout_name):
    data = ''
    with open(fin_name, 'r') as f:
        data = f.read()
    sdf = sdfparse.parse(data)

    keys = sdf['cells'][model['type']].keys()
    if 'slicel'.upper() in keys:
        sl = 'L'
    elif 'slicem'.upper() in keys:
        sl = 'M'
    else:
        print("Unknown slice type!")
        return

    nsdf = dict()
    nsdf['header'] = sdf['header']
    nsdf['cells'] = dict()
    nsdf['cells']['ROUTING_BEL'] = dict()

    for p in model['pins']:
        pin = model['pins'][p]

        outs = dict()
        for o in model['out']:
            outs[o] = []

        res = []
        cells = sdf['cells']

        for src in model['srcs']:
            source = src.replace('?', pin['type'])

            _type = model['type'] + '_' + source

            if _type in cells.keys():
                cell = cells[_type]["SLICE" + sl.upper()]

                for o in model['out']:
                    iopath = 'iopath_' + p + '_' + o

                    if iopath in cell.keys():
                        delay = cell[iopath]['delay_paths']['slow']['max']
                        outs[o].append(delay)

        for src in outs:
            s = sorted(outs[src])
            for val in s:
                res.append(val - s[0])

        delay = round(max(res), 3)

        muxname = str(model['mux'].replace('?', pin['type']))
        rbel = nsdf['cells']['ROUTING_BEL']['SLICE' + sl.upper() + '/' +
                                            muxname] = dict()

        iname = 'interconnect_' + pin['type'].lower() + 'x_' + str(p).lower()

        rbel[iname] = dict()
        rbel[iname]['is_absolute'] = True
        rbel[iname]['to_pin_edge'] = None
        rbel[iname]['from_pin_edge'] = None
        rbel[iname]['from_pin'] = pin['type'].lower() + 'x'
        rbel[iname]['to_pin'] = str(p).lower()
        rbel[iname]['type'] = 'interconnect'
        rbel[iname]['is_timing_check'] = False
        rbel[iname]['is_timing_env'] = False

        paths = rbel[iname]['delay_paths'] = dict()

        paths['slow'] = dict()
        paths['slow']['min'] = delay
        paths['slow']['avg'] = None
        paths['slow']['max'] = delay

        paths['fast'] = dict()
        paths['fast']['min'] = delay
        paths['fast']['avg'] = None
        paths['fast']['max'] = delay

        # emit new sdfs
        with open(fout_name, 'w') as f:
            f.write(sdfparse.emit(nsdf))