Ejemplo n.º 1
0
def parse_results_from_dir(resultsdir, partial=None, metrics=None):
    results = []

    ## sim.cfg
    simcfg = os.path.join(resultsdir, 'sim.cfg')
    if not os.path.exists(simcfg):
        raise SniperResultsException("No valid configuration found")
    simcfg = sniper_config.parse_config(open(simcfg).read())
    ncores = int(simcfg['general/total_cores'])

    results += [('ncores', -1, ncores)]
    results += [('corefreq', idx, 1e9 * float(
        sniper_config.get_config(simcfg, 'perf_model/core/frequency', idx)))
                for idx in range(ncores)]

    ## sim.info or graphite.out
    siminfo = os.path.join(resultsdir, 'sim.info')
    graphiteout = os.path.join(resultsdir, 'graphite.out')
    if os.path.exists(siminfo):
        siminfo = eval(open(siminfo).read())
    elif os.path.exists(graphiteout):
        siminfo = eval(open(graphiteout).read())
    else:
        siminfo = None
    if siminfo:
        # If we're called from inside run-graphite, sim.info may not yet exist
        results.append(('walltime', -1, siminfo['t_elapsed']))
        results.append(('vmem', -1, siminfo['vmem']))

    ## sim.stats
    if partial:
        k1, k2 = partial[:2]
    else:
        k1, k2 = 'roi-begin', 'roi-end'

    stats = sniper_stats.SniperStats(resultsdir)
    results += stats.parse_stats((k1, k2), ncores, metrics=metrics)

    if not partial:
        walltime = [v for k, _, v in results if k == 'time.walltime']
        instrs = [v for k, _, v in results if k == 'core.instructions']
        if walltime and instrs:
            walltime = walltime[0] / 1e6  # microseconds -> seconds
            instrs = sum(instrs)
            results.append(('roi.walltime', -1, walltime))
            results.append(('roi.instrs', -1, instrs))
            results.append(('roi.ipstotal', -1, instrs / walltime))
            results.append(('roi.ipscore', -1, instrs / (walltime * ncores)))

    ## power.py
    power = {}
    powerfile = os.path.join(resultsdir, 'power.py')
    if os.path.exists(powerfile):
        exec(open(powerfile).read())
        for key, value in power.items():
            results.append(('power.%s' % key, -1, value))

    return results
Ejemplo n.º 2
0
def createJSONData(native_interval_,
                   nativenum_intervals_,
                   interval_,
                   num_intervals_,
                   resultsdir_,
                   outputdir_,
                   title_,
                   mcpat,
                   verbose=False,
                   requested_cores_list=[]):

    if verbose:
        print 'Generate JSON data for Level 2'

    global native_interval, nativenum_intervals, interval, num_intervals, resultsdir, outputdir, title, use_mcpat, stats, config
    native_interval = native_interval_
    nativenum_intervals = nativenum_intervals_
    interval = interval_
    num_intervals = num_intervals_
    resultsdir = resultsdir_
    outputdir = outputdir_
    title = title_
    use_mcpat = mcpat
    stats = sniper_stats.SniperStats(resultsdir_)
    config = sniper_lib.get_config(resultsdir=resultsdir_)

    initialize()

    collectCPIStackDataFIC(verbose=verbose,
                           requested_cores_list=requested_cores_list)
    collectCPIStackDataFCC(verbose=verbose,
                           requested_cores_list=requested_cores_list)

    writetojson(outputdir, "cpipercentage", "cpi", 1, verbose=verbose)
    writetojson(outputdir,
                "cpipercentagesimplified",
                "cpisimplified",
                1,
                verbose=verbose)

    writeinfo(outputdir, verbose)
    writemarkers(outputdir, verbose)

    writelabels(outputdir, "cpipercentage", "cpi")
    writelabels(outputdir, "cpipercentagesimplified", "cpisimplified")
    writelabels(outputdir, "cpific", "cpific")
    writelabels(outputdir, "simple", "cpisimplified")

    writeIPCvaluestoJSON(outputdir)

    if (use_mcpat):
        collectMcPATData(verbose)
        writetojson(outputdir, "power", "mcpat", 1, verbose)
        writetojson(outputdir, "energy", "mcpat", 2, verbose)
        writetojson(outputdir, "energypercentage", "mcpat", 3, verbose)
        writelabels(outputdir, "power", "mcpat")
        writelabels(outputdir, "energy", "mcpat")
        writelabels(outputdir, "energypercentage", "mcpat")
def createJSONData(resultsdir, outputdir, title = None, source = None, doxygenpath = None):

  resultsdir = os.path.abspath(resultsdir)
  outputdir = os.path.abspath(outputdir)
  if not title:
    title = os.path.basename(resultsdir)
  title = title.replace(' ', '_')

  global config, stats
  config = sniper_config.parse_config(file(os.path.join(resultsdir, 'sim.cfg')).read())
  stats = sniper_stats.SniperStats(resultsdir)

  readInputData(os.path.join(resultsdir,"sim.rtntrace"))
  if not os.path.exists(os.path.join(outputdir,"levels","functionbased")):
    os.makedirs(os.path.join(outputdir,"levels","functionbased"))

  writeiptstats(os.path.join(outputdir,"levels","functionbased","iptstats.json"))
  writerooflinestats(os.path.join(outputdir,"levels","functionbased","rooflinestats.json"))

  if not os.path.exists(os.path.join(HOME,"levels","functionbased","doxygen")):
    os.makedirs(os.path.join(HOME,"levels","functionbased","doxygen"))

  if source and doxygenpath:
    createDoxyGenOutput(source,doxygenpath,title)
Ejemplo n.º 4
0
        if o == '-v' or o == '--verbose':
            verbose = True
        if o == '-N':
            requested_cores_list += map(int, a.split(':'))

    if verbose:
        print 'This script generates data for the second Level 2 visualization'

    resultsdir = os.path.abspath(resultsdir)
    outputdir = os.path.abspath(outputdir)
    if not title:
        title = os.path.basename(resultsdir)
    title = title.replace(' ', '_')

    try:
        stats = sniper_stats.SniperStats(resultsdir)
        snapshots = stats.get_snapshots()
    except:
        print "No valid results found in " + resultsdir
        sys.exit(1)

    snapshots = sorted([
        long(name.split('-')[1]) for name in snapshots
        if re.match(r'periodic-[0-9]+', name)
    ])
    defaultinterval = snapshots[1] - snapshots[0]
    defaultnum_intervals = len(snapshots) - 1

    if (num_intervals == 0 or num_intervals > defaultnum_intervals):
        print 'No number of intervals specified or number of intervals is to big.'
        print 'Now using all intervals (' + str(
Ejemplo n.º 5
0
def format_event(timestamp, core, thread, message):
    return '%9ld ns: core(%2d) thread(%2d)  %s' % (timestamp / 1e6, core,
                                                   thread, message)


def format_marker(value0, value1, description):
    if description:
        return 'a = %3d,  str = "%s"' % (value0, description)
    else:
        return 'a = %3d,  b = %3d' % (value0, value1)


if do_list:
    import sniper_stats
    stats = sniper_stats.SniperStats(resultsdir=resultsdir, jobid=jobid)
    print ', '.join(stats.get_snapshots())

if do_topo:
    import sniper_stats
    stats = sniper_stats.SniperStats(resultsdir=resultsdir, jobid=jobid)
    for t in stats.get_topology():
        print ', '.join(map(str, t))

if do_markers:
    import sniper_stats
    stats = sniper_stats.SniperStats(resultsdir=resultsdir, jobid=jobid)
    try:
        markers = stats.get_markers()
    except Exception, e:
        print >> sys.stderr, e
Ejemplo n.º 6
0
def bottlegraph(jobid=None,
                resultsdir=None,
                outputfile='./bottlegraph',
                partial=None,
                no_text=False,
                thread_names_translate=translateThreadNameJikes):
    stats = sniper_stats.SniperStats(resultsdir=resultsdir, jobid=jobid)
    results = stats.get_results(partial=partial)['results']

    thread_names = {}
    for threadid, name in stats.get_thread_names().items():
        thread_names[threadid] = thread_names_translate(name)

    runtime = dict(enumerate(results['thread.bottle_runtime_time']))
    contrib = dict(enumerate(results['thread.bottle_contrib_time']))
    total_runtime = results['barrier.global_time'][0] / 1e15

    xs = dict([(thread, runtime[thread] / float(contrib[thread]))
               for thread in runtime if runtime[thread] > 0])
    ys = dict([(thread, contrib[thread] / 1e15) for thread in runtime
               if runtime[thread] > 0])
    # Threads in descending order of parallelism
    threads = sorted(xs.keys(), key=lambda thread: xs[thread], reverse=True)

    if not no_text:
        print 'Runtime (s)   Parallelism   Thread name'
        for thread in threads:
            print '%11.5f' % ys[thread], '%13.2f' % xs[
                thread], ' ' * 3, thread_names[thread] or 'Thread-%d' % thread

    max_x = int(max(xs.values()) + 1.2)
    max_y = total_runtime * 1.1
    fd = open('%s.input' % outputfile, 'w')
    print >> fd, '''\
set terminal png font "FreeSans,10" size 450,400
set output "%s.png"
set grid
set xlabel "Parallelism"
set ylabel "Runtime (seconds)"
set key outside bottom horizontal
set style fill solid 1.0 noborder
set xrange [-%d:%d]
set yrange [0:%f]
set xtics (%s) nomirror
''' % (os.path.basename(outputfile), max_x, max_x, max_y, ','.join(
        ['"%d" %d' % (abs(x), x) for x in range(-max_x, max_x + 1)]))

    y = 0
    colors = ('#00FFFF', '#A52A2A', '#A9A9A9', '#FF1493', '#8FBC8F', '#FF6347',
              '#006400')
    color = lambda i: colors[i % len(colors)]

    for i, thread in enumerate(threads):
        print >> fd, 'set object rect from %f,%f to %f,%f fc rgb "%s"' % (
            -xs[thread], y, xs[thread], y + ys[thread], color(i))
        y += ys[thread]

    print >> fd, 'plot %s' % ', '.join([
        '-1 with boxes title "%s" lc rgb "%s"' %
        (thread_names[thread] or 'Thread-%d' % thread, color(i))
        for i, thread in reversed(list(enumerate(threads)))
        if ys[thread] > .01 * total_runtime
    ])
    fd.close()

    os.system('cd "%s" && gnuplot %s.input' %
              (os.path.dirname(outputfile), os.path.basename(outputfile)))
def gen_topology(resultsdir='.',
                 jobid=None,
                 outputobj=sys.stdout,
                 format='svg',
                 embedded=False):
    names = ('hwcontext', 'smt', 'L1-I', 'L1-D', 'L2', 'L3', 'L4', 'tag-dir',
             'nuca-cache', 'dram-cache', 'dram-cntlr')
    ids = dict([(name, collections.defaultdict(lambda: None))
                for name in names])

    stats = sniper_stats.SniperStats(resultsdir, jobid)
    config = sniper_lib.get_config(resultsdir=resultsdir, jobid=jobid)

    try:
        topology = stats.get_topology()
    except:
        print >> sys.stderr, "Failed getting topology information"
        topology = None

    max_id = 0
    if topology:
        for name, lid, mid in stats.get_topology():
            if name not in names:
                print >> sys.stderr, 'Unknown component', name
                continue
            ids[name][int(lid)] = int(mid)
            max_id = max(max_id, int(lid))

    def format_config(name, lid):
        caches = {
            'L1-I': 'l1_icache',
            'L1-D': 'l1_dcache',
            'L2': 'l2_cache',
            'L3': 'l3_cache',
            'L4': 'l4_cache'
        }
        if name in caches:
            value = sniper_config.get_config(
                config, 'perf_model/%s/cache_size' % caches[name], lid)
            return sniper_lib.format_size(1024 * long(value), digits=0)
        elif name == 'dram-cache':
            value = sniper_config.get_config(
                config, 'perf_model/dram/cache/cache_size', lid)
            return sniper_lib.format_size(1024 * long(value), digits=0)
        else:
            return ''

    if format == 'text':
        print >> outputobj, ' ' * 20,
        for lid in range(max_id + 1):
            print >> outputobj, '%3d' % lid,
        print >> outputobj

        for name in names:
            if ids[name].keys():
                print >> outputobj, '%-20s' % name,
                for lid in range(max_id + 1):
                    mid = ids[name][lid]
                    if mid is None:
                        value = ' '
                    elif mid == lid:
                        value = 'X'
                    else:
                        value = '<'
                    print >> outputobj, '%3s' % value,
                print >> outputobj

    elif format == 'svg':

        class Svg:
            def __init__(self):
                self.margin_x = 50
                self.step_x = 110
                self.margin_y = 50
                self.step_y = 50
                self.size_x = 0
                self.size_y = 0
                self.items = []

            def paint_box(self,
                          (x, y),
                          (w, h),
                          name='',
                          label=0,
                          color='#ffffff',
                          zorder=0,
                          margin=(.2, .2),
                          root=(0, 0)):
                x += root[0]
                y += root[1]
                self.size_x = max(self.size_x, (x + w) * self.step_x)
                self.size_y = max(self.size_y, (y + h) * self.step_y)
                svg = '''\
<rect id="%s" x="%d" y="%d" width="%d" height="%d" rx="0"
   style="stroke:#000000;stroke-width:1;stroke-linejoin:miter; stroke-linecap:butt;fill:%s;"/>
  ''' % (name, self.margin_x + x * self.step_x,
                self.margin_y + y * self.step_y, (w - margin[0]) * self.step_x,
                (h - margin[1]) * self.step_y, color)
                if label:
                    svg += '''\
<text xml:space="preserve" x="%d" y="%d" fill="#000000"  font-family="Times" font-style="normal" font-weight="normal" font-size="12" text-anchor="start">%s</text>
  ''' % (self.margin_x + (x + .1) * self.step_x, self.margin_y +
                    (y + .3) * self.step_y, label)
                self.items.append((zorder, svg))
Ejemplo n.º 8
0
def createJSONData(interval,
                   num_intervals,
                   resultsdir,
                   outputdir,
                   verbose=False):
    topodir = os.path.join(outputdir, 'levels', 'topology')
    mkdir_p(topodir)

    gen_topology.gen_topology(resultsdir=resultsdir,
                              outputobj=file(os.path.join(topodir, 'topo.svg'),
                                             'w'),
                              format='svg',
                              embedded=True)

    config = sniper_config.parse_config(
        file(os.path.join(resultsdir, 'sim.cfg')).read())
    ncores = int(config['general/total_cores'])
    stats = sniper_stats.SniperStats(resultsdir)

    ids = collections.defaultdict(lambda: {})
    for name, lid, mid in stats.get_topology():
        ids[name][int(lid)] = int(mid)

    caches = ['L1-I', 'L1-D', 'L2', 'L3', 'L4', 'dram-cache']
    items = sum(
        [['%s-%d' % (name, core) for name in ['core', 'dram-cntlr'] + caches]
         for core in range(ncores)], [])
    data = dict([(item, {'info': '', 'sparkdata': []}) for item in items])
    dramcntlrs = [
        lid for (name, lid, mid) in stats.get_topology()
        if name == 'dram-cntlr'
    ]

    for i in range(num_intervals):
        results = sniper_lib.get_results(
            config=config,
            stats=stats,
            partial=('periodic-' + str(i * interval), 'periodic-' + str(
                (i + 1) * interval)))['results']
        if 'barrier.global_time_begin' in results:
            # Most accurate: ask the barrier
            results['time_begin'] = results['barrier.global_time_begin'][0]
            results['time_end'] = results['barrier.global_time_end'][0]
        elif 'performance_model.elapsed_time_end' in results:
            # Guess based on core that has the latest time (future wakeup is less common than sleep on futex)
            results['time_begin'] = max(
                results['performance_model.elapsed_time_begin'])
            results['time_end'] = max(
                results['performance_model.elapsed_time_end'])
        else:
            raise ValueError(
                'Need either performance_model.elapsed_time or barrier.global_time, simulation is probably too old'
            )

        for core in range(ncores):
            if 'fs_to_cycles_cores' in results:
                cycles_scale = results['fs_to_cycles_cores'][core]
            else:
                cycles_scale = 1.
            cycles = cycles_scale * (results['time_end'] -
                                     results['time_begin'])
            ninstrs = results['performance_model.instruction_count'][core]
            data['core-%d' % core]['sparkdata'].append('%.3f' %
                                                       (ninstrs / cycles))
            data['core-%d' % core]['info'] = 'IPC (core-%d)' % core
            for cache in caches:
                if cache not in ids:
                    # Cache level does not exist
                    continue
                if ids[cache][core] != core:
                    # Non-master cache
                    continue
                if '%s.loads' % cache in results:
                    # Sum misses and instruction counts over all cores sharing this cache
                    misses = 0
                    ninstrs = 0
                    for _core in range(ncores):
                        if ids[cache][_core] == ids[cache][core]:
                            misses += results['%s.load-misses' %
                                              cache][_core] + results[
                                                  '%s.store-misses-I' %
                                                  cache][_core]
                            ninstrs += results[
                                'performance_model.instruction_count'][_core]
                    data['%s-%d' % (cache, core)]['sparkdata'].append(
                        '%.3f' % (1000. * misses / float(ninstrs or 1.)))
                    data['%s-%d' %
                         (cache, core)]['info'] = 'MPKI (%s-%d)' % (cache,
                                                                    core)

        for dramcntlr in dramcntlrs:
            ninstrs = sum(results['performance_model.instruction_count'])
            if ninstrs == 0:
                data['dram-cntlr-%d' % dramcntlr]['sparkdata'].append(0.)
                # FIXME ninstrs should not be zero while we are accessing dram
            else:
                data['dram-cntlr-%d' % dramcntlr]['sparkdata'].append(
                    '%.3f' % (1000. * (results['dram.reads'][dramcntlr] +
                                       results['dram.writes'][dramcntlr]) /
                              (ninstrs or 1.)))
            data['dram-cntlr-%d' %
                 dramcntlr]['info'] = 'APKI (dram-cntlr-%d)' % dramcntlr

    jsonfile = open(os.path.join(topodir, 'topology.txt'), "w")
    jsonfile.write('topology = %s' % json.dumps(data))
    jsonfile.close()
def createJSONData(interval,
                   num_intervals,
                   resultsdir,
                   outputdir,
                   title,
                   verbose=False):
    if verbose:
        print 'Generate JSON data for Level 3'

    stats = sniper_stats.SniperStats(resultsdir)
    config = sniper_config.parse_config(
        file(os.path.join(resultsdir, 'sim.cfg')).read())

    ncores = int(config['general/total_cores'])
    if verbose:
        print ncores, "cores detected"

    intervaldata = [0 for x in xrange(num_intervals)]
    num_exceptions = 0
    for i in range(0, num_intervals):
        if verbose:
            print "Parsing interval " + str(i + 1) + "/" + str(
                num_intervals) + "\r",

        try:
            results = cpistack.cpistack_compute(
                config=config,
                stats=stats,
                partial=[
                    "periodic-" + str(i * interval), "periodic-" + str(
                        (i + 1) * interval)
                ],
                use_simple=False,
                use_simple_mem=True,
                no_collapse=False,
                aggregate=False)
            data = results.get_data('cpi')

            intervaldata[i] = [0 for x in xrange(ncores)]

            for core in xrange(ncores):
                if core in results.cores:
                    intervaldata[i][core] = {
                        'time': (i * interval / 1000000),
                        'ipc': 1. / sum(data[core].itervalues())
                    }
                else:
                    intervaldata[i][core] = {
                        'time': (i * interval / 1000000),
                        'ipc': 0
                    }

        except ValueError:
            intervaldata[i] = [0 for x in xrange(ncores)]
            for j in range(0, ncores):
                intervaldata[i][j] = dict(time=(i * interval / 1000000), ipc=0)
            num_exceptions += 1
            continue

    # Write JSON to file
    mkdir_p(os.path.join(outputdir, 'levels', 'level3', 'data'))
    f = open(
        os.path.join(outputdir, 'levels', 'level3', 'data', 'ipcvalues.txt'),
        "w")
    f.write("intervalsize = " + str(interval) + ";\n")
    f.write("ipcvaluestr = '" + json.dumps(intervaldata) + "';")
    f.close()
    f = open(
        os.path.join(outputdir, 'levels', 'level3', 'data', 'ipcvalues.json'),
        "w")
    f.write(json.dumps(intervaldata, indent=4))
    f.close()
    if verbose:
        print
    if (num_exceptions > 0):
        if verbose:
            print("There was no useful information for " +
                  str(num_exceptions) + " intervals.")
            print("You might want to increase the interval size.")
    if verbose:
        print('[OK]')