示例#1
0
def create_init_data():
    # Open a file in "w"rite mode
    filters = ts.Filters(complib='blosc', complevel=2)
    fileh = ts.openFile("place_and_route_data.h5", mode="w", filters=filters)

    # Get the HDF5 root group
    root = fileh.root
    net_files = fileh.createTable(root, 'net_files',
                                  NET_FILES_TABLE_LAYOUT)
    net_paths = path('/var/benchmarks/mcnc').files('*.net')

    placements = fileh.createGroup(root, 'placements', 'Placements')

    m = cMain()

    for i, p in enumerate(net_paths):
        parser = cVprNetFileParser(p)
        net_file = net_files.row
        net_file['id'] = i
        net_file['md5'] = p.read_hexhash('md5')
        net_file['block_count'] = len(parser.block_labels)
        net_file['net_count'] = len(parser.net_labels)

        group = fileh.createGroup(placements, p.namebase,
                                'Placements for %s' % p.namebase)
        placement_table = fileh.createTable(group, 'placements',
                                            PLACEMENT_TABLE_LAYOUT)
        placement_table.flush()
        positions_table = fileh.createTable(
                group, 'block_positions', BLOCK_POSITIONS_TABLE_LAYOUT)
        positions_table.flush()

        placement_id = len(placement_table)
        positions_id = len(positions_table)
        for j, d in enumerate(path('../place_results')
                            .files('placed-%s-*.out' % p.namebase)):
            positions = m.read_placement(
                    p, '/var/benchmarks/4lut_sanitized.arch', d)

            k = 0
            placement = placement_table.row
            placement['id'] = placement_id + j
            placement['net_file_id'] = net_file['id']
            placement['block_position_offset'] = positions_id
            placement['block_count'] = net_file['block_count']
            placement['md5'] = d.read_hexhash('md5')

            for k, b in enumerate(positions):
                row = positions_table.row
                row['id'] = positions_id + k
                row['placement_id'] = placement['id']
                row['position'] = b
                row.append()
            positions_id += len(positions)
            placement.append()
            positions_table.flush()
            placement_table.flush()
        net_file.append()
        net_files.flush()
    fileh.close()
示例#2
0
def test_route():
    data_root = path(cyvpr.get_data_root()[0])
    arch = data_root.joinpath('4lut_sanitized.arch')
    net = data_root.joinpath('e64-4lut.net')
    m = cMain()
    block_positions = m.place(net, arch, 'placed.out')
    assert(len(block_positions) == m.block_count)
    routed_data = m.route(net, arch, 'placed.out', 'routed.out')
    state = [s for s in routed_data['states'] if s.success][-1]
    assert(len(state.wire_lengths) == m.net_count)
    assert(len(state.bends) == m.net_count)
    assert(len(state.segments) == m.net_count)
    return routed_data
示例#3
0
文件: placement.py 项目: cfobel/cyvpr
    def sync_placements_from_paths(self, arch_path):
        self.sync_net_files_from_paths(arch_path)
        vpr_main = cMain()
        h5f = self.h5f['placements']
        if not hasattr(h5f.root, 'placement_results'):
            h5f.createGroup(h5f.root, 'placement_results')
        placement_results = h5f.root.placement_results
        cre_net_file_name = re.compile(r'(?P<net_file_namebase>[^\/\s]+)\.net')
        for placement_path in self.h5f['paths'].root.placement_paths:
            with open(placement_path['path'], 'rb') as f:
                header = f.readline()
            match = cre_net_file_name.search(header)
            net_file_namebase = match.group('net_file_namebase')
            net_file_name = net_file_namebase + '.net'
            if hasattr(placement_results, net_file_namebase):
                placement_table = getattr(placement_results, net_file_namebase)
                if (placement_path['block_positions_sha1'] in
                    placement_table.cols.block_positions_sha1):
                    # This placement has already been entered into the table.
                    continue

            # The placement has not been entered into the results table, so
            # process it now.
            net_file = None
            for net_file_data in self.h5f['paths'].root.net_file_paths:
                if net_file_data['path'].endswith(net_file_name):
                    # The net-file used for the placement is available.
                    net_file = net_file_data
                    break
            if net_file is None:
                raise RuntimeError, 'The net-file used for the placement is _not_ available.'

            # Parse using VPR.
            block_positions = (vpr_main
                                .read_placement(net_file_data['path'],
                                                arch_path,
                                                placement_path['path']))
            if not hasattr(placement_results, net_file_namebase):
                table = h5f.createTable(placement_results, net_file_namebase,
                                        get_PLACEMENT_TABLE_LAYOUT(
                                                vpr_main.block_count))
                table.cols.net_file_md5.createIndex()
                table.cols.block_positions_sha1.createIndex()
            placements = getattr(placement_results, net_file_namebase)
            row = placements.row
            row['net_file_md5'] = net_file['md5']
            row['block_positions_sha1'] = placement_path['block_positions_sha1']
            row['block_positions'] = block_positions
            row.append()
            placements.flush()
示例#4
0
def process_placement_path(net_file_paths_table, placement_paths_table,
                           file_path, architecture, update):
    vpr_main = cMain()
    cre_net_file_name = re.compile(r'(?P<net_file_namebase>[^\/\s]+)\.net')
    with open(file_path, 'rb') as f:
        header = f.readline()
    match = cre_net_file_name.search(header)
    net_file_namebase = match.group('net_file_namebase')
    net_file_name = net_file_namebase + '.net'

    # The placement has not been entered into the results table, so
    # process it now.
    net_file = None
    for net_file_data in net_file_paths_table:
        if net_file_data['path'].endswith(net_file_name):
            # The net-file used for the placement is available.
            net_file = net_file_data
            break
    if net_file is None:
        raise RuntimeError, ('The net-file used for the placement is _not_ '
                             'available.')

    # Parse using VPR.
    block_positions = vpr_main.read_placement(net_file['path'], architecture,
                                              file_path)
    sha1 = hashlib.sha1()
    sha1.update(block_positions.data)
    block_positions_sha1 = sha1.hexdigest()

    row_count = 0
    for row in placement_paths_table.where('block_positions_sha1 == "%s"'
                                           % block_positions_sha1):
        # The table already contains an entry for this file.
        print ('The table already contains block_positions with this SHA1: %s'
               % block_positions_sha1)
        if update and not row['path'] == file_path.abspath():
            row['path'] = file_path.abspath()
            row.update()
            print '  \--> updated path to: %s' % row['path']
        row_count += 1

    if row_count == 0:
        row = placement_paths_table.row
        row['block_positions_sha1'] = block_positions_sha1
        row['path'] = file_path.abspath()
        row.append()
示例#5
0
文件: placement.py 项目: cfobel/cyvpr
 def sync_net_files_from_paths(self, arch_path):
     vpr_main = cMain()
     h5f = self.h5f['placements']
     if not hasattr(h5f.root, 'net_files'):
         table = h5f.createTable(h5f.root, 'net_files', NET_FILES_TABLE_LAYOUT)
         table.cols.md5.createCSIndex()
     net_files = h5f.root.net_files
     for net_file_path in self.h5f['paths'].root.net_file_paths:
         if net_file_path['md5'] not in net_files.cols.md5:
             # Intialize VPR with dummy output files to read in net-file to
             # get block-count and net-count.
             vpr_main.init([net_file_path['path'], arch_path, 'placed.out',
                            'routed.out', '-place_only', '-nodisp'])
             row = net_files.row
             row['md5'] = net_file_path['md5']
             row['block_count'] = vpr_main.block_count
             row['net_count'] = vpr_main.net_count
             row.append()
     net_files.flush()
示例#6
0
def place(net_path, arch_path, output_path=None, output_dir=None, place_algorithm="bounding_box", fast=True, seed=0):
    """
    Perform VPR placement and write result to HDF file with the following
    structure:

        <net-file_namebase _(e.g., `ex5p`, `clma`, etc.)_> (Group)
            \--> `placements` (Table)

    The intention here is to structure the results such that they can be merged
    together with the results from other placements.
    """
    vpr_main = cMain()
    # We just hard-code `placed.out` as the output path, since we aren't using
    # the output file.  Instead, the block-positions are returned from the
    # `place` method.
    place_state, block_positions = vpr_main.place(net_path, arch_path, "placed.out", seed=seed, fast=fast)
    # Use a hash of the block-positions to name the HDF file.
    block_positions_sha1 = hashlib.sha1(block_positions.astype("uint32").data).hexdigest()
    filters = ts.Filters(complib="blosc", complevel=6)
    if output_path is not None:
        output_path = str(output_path)
    else:
        output_file_name = "placed-%s-s%d-%s.h5" % (net_path.namebase, seed, block_positions_sha1)
        if output_dir is not None:
            output_path = str(output_dir.joinpath(output_file_name))
        else:
            output_path = output_file_name
    parent_dir = path(output_path).parent
    if parent_dir and not parent_dir.isdir():
        parent_dir.makedirs_p()
    print "writing output to: %s" % output_path

    h5f = ts.openFile(output_file_name, mode="w", filters=filters)

    net_file_results = h5f.createGroup(
        h5f.root,
        net_path.namebase,
        title="Placement results for %s VPR "
        "with `fast`=%s, `place_algorithm`=%s" % (net_path.namebase, fast, place_algorithm),
    )

    placements = h5f.createTable(
        net_file_results,
        "placements",
        get_PLACEMENT_TABLE_LAYOUT(vpr_main.block_count),
        title="Placements for %s VPR with args: %s" % (net_path.namebase, " ".join(vpr_main.most_recent_args())),
    )
    placements.setAttr("net_file_namebase", net_path.namebase)

    placements.cols.block_positions_sha1.createIndex()
    row = placements.row
    row["net_file_md5"] = net_path.read_hexhash("md5")
    row["block_positions"] = block_positions
    row["block_positions_sha1"] = block_positions_sha1
    row["seed"] = seed
    # Convert start-date-time to UTC unix timestamp
    row["start"] = unix_time(place_state.start)
    row["end"] = unix_time(place_state.end)

    placer_opts = place_state.placer_opts
    row["placer_options"] = (
        placer_opts.timing_tradeoff,
        placer_opts.block_dist,
        placer_opts.place_cost_exp,
        placer_opts.place_chan_width,
        placer_opts.num_regions,
        placer_opts.recompute_crit_iter,
        placer_opts.enable_timing_computations,
        placer_opts.inner_loop_recompute_divider,
        placer_opts.td_place_exp_first,
        placer_opts.td_place_exp_last,
        placer_opts.place_cost_type,
        placer_opts.place_algorithm,
    )
    row.append()
    placements.flush()

    stats_group = h5f.createGroup(
        net_file_results,
        "placement_stats",
        title="Placement statistics for each "
        "outer-loop iteration of a VPR anneal for "
        "%s with args: %s" % (net_path.namebase, " ".join(vpr_main.most_recent_args())),
    )

    # Prefix `block_positions_sha1` with `P_` to ensure the table-name is
    # compatible with Python natural-naming.  This is necessary since SHA1
    # hashes may start with a number, in which case the name would not be a
    # valid Python attribute name.
    placement_stats = h5f.createTable(
        stats_group,
        "P_" + block_positions_sha1,
        get_VPR_PLACEMENT_STATS_TABLE_LAYOUT(),
        title="Placement statistics for each "
        "outer-loop iteration of a VPR anneal "
        "for %s with args: `%s`, which produced "
        "the block-positions with SHA1 hash `%s`"
        % (net_path.namebase, " ".join(vpr_main.most_recent_args()), block_positions_sha1),
    )
    placement_stats.setAttr("net_file_namebase", net_path.namebase)
    placement_stats.setAttr("block_positions_sha1", block_positions_sha1)

    for stats in place_state.stats:
        stats_row = placement_stats.row
        for field in (
            "temperature",
            "mean_cost",
            "mean_bounding_box_cost",
            "mean_timing_cost",
            "mean_delay_cost",
            "place_delay_value",
            "success_ratio",
            "std_dev",
            "radius_limit",
            "criticality_exponent",
            "total_iteration_count",
        ):
            stats_row[field] = getattr(stats, field)
            stats_row["start"] = stats.start["tv_sec"] + stats.start["tv_nsec"] * 1e-9
            stats_row["end"] = stats.end["tv_sec"] + stats.end["tv_nsec"] * 1e-9
        stats_row.append()
    placement_stats.flush()

    h5f.close()
    return place_state
示例#7
0
文件: do_route.py 项目: cfobel/cyvpr
def route(net_path, arch_path, placement_path, output_path=None,
          output_dir=None, fast=True, clbs_per_pin_factor=None,
          channel_width=None, timing_driven=True, max_router_iterations=None):
    '''
    Perform VPR routing and write result to HDF file with the following
    structure:

        <net-file_namebase _(e.g., `ex5p`, `clma`, etc.)_> (Group)
            \--> `route_states` (Table)

    The intention here is to structure the results such that they can be merged
    together with the results from other routings.
    '''
    net_path = path(net_path)
    arch_path = path(arch_path)
    placement_path = path(placement_path)
    vpr_main = cMain()

    routed_temp_dir = path(tempfile.mkdtemp(prefix='routed-'))
    try:
        routed_path = routed_temp_dir.joinpath('routed.out')

        # We just hard-code `routed.out` as the output path, since we aren't using
        # the output file.  Instead, the routing results and states are returned
        # from the `route` method, as an `OrderedDict` with the keys `result` and
        # `states`.
        route_results = vpr_main.route(net_path, arch_path, placement_path,
                                    routed_path, timing_driven=timing_driven,
                                    fast=fast, route_chan_width=channel_width,
                                    max_router_iterations=max_router_iterations)
    finally:
        routed_temp_dir.rmtree()

    block_positions = vpr_main.extract_block_positions()
    block_positions_sha1 = hashlib.sha1(block_positions
                                        .astype('uint32').data).hexdigest()

    # Use a hash of the block-positions to name the HDF file.
    filters = ts.Filters(complib='blosc', complevel=6)
    if output_path is not None:
        output_path = str(output_path)
    else:
        output_file_name = 'routed-%s-%s' % (net_path.namebase,
                                             block_positions_sha1)
        if fast:
            output_file_name += '-fast'

        if timing_driven:
            output_file_name += '-timing_driven'
        else:
            output_file_name += '-breadth_first'

        if channel_width:
            output_file_name += '-w%d' % channel_width

        if max_router_iterations:
            output_file_name += '-m%d' % max_router_iterations

        output_file_name += '.h5'

        if output_dir is not None:
            output_path = str(output_dir.joinpath(output_file_name))
        else:
            output_path = output_file_name
    parent_dir = path(output_path).parent
    if parent_dir and not parent_dir.isdir():
        parent_dir.makedirs_p()
    print 'writing output to: %s' % output_path

    h5f = ts.openFile(output_path, mode='w', filters=filters)

    net_file_results = h5f.createGroup(h5f.root, net_path.namebase,
                                       title='Routing results for %s VPR '
                                       'with `fast`=%s, `timing_driven`=%s, '
                                       'with `route_chan_width`=%s, '
                                       '`max_router_iterations`=%s'
                                       % (net_path.namebase, fast,
                                          timing_driven, channel_width,
                                          max_router_iterations))

    # TODO: Finish modifying this function for route _(instead of placement)_.
    route_states = h5f.createTable(net_file_results, 'route_states',
                                   get_ROUTE_TABLE_LAYOUT(vpr_main.net_count),
                                   title='Routings for %s VPR with args: %s' %
                                   (net_path.namebase,
                                    ' '.join(vpr_main.most_recent_args())))
    route_states.setAttr('net_file_namebase', net_path.namebase)

    # Index some columns for fast look-up.
    route_states.cols.block_positions_sha1.createIndex()
    route_states.cols.success.createIndex()
    route_states.cols.width_fac.createCSIndex()

    for i, route_state in enumerate(route_results['states']):
        state_row = route_states.row
        state_row['block_positions_sha1'] = block_positions_sha1
        state_row['success'] = route_state.success
        state_row['width_fac'] = route_state.width_fac
        state_row['critical_path_delay'] = route_state.critical_path_delay
        state_row['total_logic_delay'] = route_state.total_logic_delay
        state_row['total_net_delay'] = route_state.total_net_delay
        state_row['tnodes_on_crit_path'] = route_state.tnodes_on_crit_path
        state_row['non_global_nets_on_crit_path'] = (
                route_state.non_global_nets_on_crit_path)
        state_row['global_nets_on_crit_path'] = (route_state
                                                 .global_nets_on_crit_path)

        # Convert start-date-time to UTC unix timestamp
        state_row['start'] = unix_time(route_state.start)
        state_row['end'] = unix_time(route_state.end)

        state_row['router_options'] = tuple(getattr(route_state.router_opts,
                                                    attr) for attr in
                                            ('max_router_iterations',
                                             'first_iter_pres_fac',
                                             'initial_pres_fac',
                                             'pres_fac_mult', 'acc_fac',
                                             'bend_cost', 'bb_factor',
                                             'astar_fac', 'max_criticality',
                                             'criticality_exp'))

        if len(route_state.bends) > 0:
            state_row['net_data'][0][:] = route_state.bends[:]
            state_row['net_data'][1][:] = route_state.wire_lengths[:]
            state_row['net_data'][2][:] = route_state.segments[:]
        state_row.append()
    route_states.flush()

    h5f.close()
    return route_results