コード例 #1
0
ファイル: sniper_lib.py プロジェクト: yonggang985/Sniper
def parse_results_from_dir(resultsdir, partial=None, metrics=None):
    results = []

    ## sim.cfg
    simcfg = os.path.join(resultsdir, "sim.cfg")
    if not os.path.exists(simcfg):
        raise SniperResultsException("No valid configuration found")
    simcfg = sniper_config.parse_config(open(simcfg).read())
    ncores = int(simcfg["general/total_cores"])

    results += [("ncores", -1, ncores)]
    results += [
        ("corefreq", idx, 1e9 * float(sniper_config.get_config(simcfg, "perf_model/core/frequency", idx)))
        for idx in range(ncores)
    ]

    ## sim.info or graphite.out
    siminfo = os.path.join(resultsdir, "sim.info")
    graphiteout = os.path.join(resultsdir, "graphite.out")
    if os.path.exists(siminfo):
        siminfo = eval(open(siminfo).read())
    elif os.path.exists(graphiteout):
        siminfo = eval(open(graphiteout).read())
    else:
        siminfo = None
    if siminfo:
        # If we're called from inside run-graphite, sim.info may not yet exist
        results.append(("walltime", -1, siminfo["t_elapsed"]))
        results.append(("vmem", -1, siminfo["vmem"]))

    ## sim.stats
    if partial:
        k1, k2 = partial[:2]
    else:
        k1, k2 = "roi-begin", "roi-end"

    stats = sniper_stats.SniperStats(resultsdir)
    results += stats.parse_stats((k1, k2), ncores, metrics=metrics)

    if not partial:
        walltime = [v for k, _, v in results if k == "time.walltime"]
        instrs = [v for k, _, v in results if k == "core.instructions"]
        if walltime and instrs:
            walltime = walltime[0] / 1e6  # microseconds -> seconds
            instrs = sum(instrs)
            results.append(("roi.walltime", -1, walltime))
            results.append(("roi.instrs", -1, instrs))
            results.append(("roi.ipstotal", -1, instrs / walltime))
            results.append(("roi.ipscore", -1, instrs / (walltime * ncores)))

    ## power.py
    power = {}
    powerfile = os.path.join(resultsdir, "power.py")
    if os.path.exists(powerfile):
        exec (open(powerfile).read())
        for key, value in power.items():
            results.append(("power.%s" % key, -1, value))

    return results
コード例 #2
0
def parse_results_from_dir(resultsdir, partial=None, metrics=None):
    results = []

    ## sim.cfg
    simcfg = os.path.join(resultsdir, 'sim.cfg')
    if not os.path.exists(simcfg):
        raise SniperResultsException("No valid configuration found")
    simcfg = sniper_config.parse_config(open(simcfg).read())
    ncores = int(simcfg['general/total_cores'])

    results += [('ncores', -1, ncores)]
    results += [('corefreq', idx, 1e9 * float(
        sniper_config.get_config(simcfg, 'perf_model/core/frequency', idx)))
                for idx in range(ncores)]

    ## sim.info or graphite.out
    siminfo = os.path.join(resultsdir, 'sim.info')
    graphiteout = os.path.join(resultsdir, 'graphite.out')
    if os.path.exists(siminfo):
        siminfo = eval(open(siminfo).read())
    elif os.path.exists(graphiteout):
        siminfo = eval(open(graphiteout).read())
    else:
        siminfo = None
    if siminfo:
        # If we're called from inside run-graphite, sim.info may not yet exist
        results.append(('walltime', -1, siminfo['t_elapsed']))
        results.append(('vmem', -1, siminfo['vmem']))

    ## sim.stats
    if partial:
        k1, k2 = partial[:2]
    else:
        k1, k2 = 'roi-begin', 'roi-end'

    stats = sniper_stats.SniperStats(resultsdir)
    results += stats.parse_stats((k1, k2), ncores, metrics=metrics)

    if not partial:
        walltime = [v for k, _, v in results if k == 'time.walltime']
        instrs = [v for k, _, v in results if k == 'core.instructions']
        if walltime and instrs:
            walltime = walltime[0] / 1e6  # microseconds -> seconds
            instrs = sum(instrs)
            results.append(('roi.walltime', -1, walltime))
            results.append(('roi.instrs', -1, instrs))
            results.append(('roi.ipstotal', -1, instrs / walltime))
            results.append(('roi.ipscore', -1, instrs / (walltime * ncores)))

    ## power.py
    power = {}
    powerfile = os.path.join(resultsdir, 'power.py')
    if os.path.exists(powerfile):
        exec(open(powerfile).read())
        for key, value in power.items():
            results.append(('power.%s' % key, -1, value))

    return results
def get_config(jobid = None, resultsdir = None, force_deleted = True):
  if jobid:
    if ic_invalid:
      raise RuntimeError('Cannot fetch results from server, make sure BENCHMARKS_ROOT points to a valid copy of benchmarks+iqlib')
    simcfg = ic.job_output(jobid, 'sim.cfg', force_deleted)
  elif resultsdir:
    cfgfile = os.path.join(resultsdir, 'sim.cfg')
    if not os.path.exists(cfgfile):
      raise ValueError('Cannot find config file at %s' % resultsdir)
    simcfg = file(cfgfile).read()
  config = sniper_config.parse_config(simcfg)
  return config
コード例 #4
0
ファイル: functionbased.py プロジェクト: ramsrivatsa/583
def createJSONData(resultsdir, outputdir, title = None, source = None, doxygenpath = None):

  resultsdir = os.path.abspath(resultsdir)
  outputdir = os.path.abspath(outputdir)
  if not title:
    title = os.path.basename(resultsdir)
  title = title.replace(' ', '_')

  global config, stats
  config = sniper_config.parse_config(file(os.path.join(resultsdir, 'sim.cfg')).read())
  stats = sniper_stats.SniperStats(resultsdir)

  readInputData(os.path.join(resultsdir,"sim.rtntrace"))
  if not os.path.exists(os.path.join(outputdir,"levels","functionbased")):
    os.makedirs(os.path.join(outputdir,"levels","functionbased"))

  writeiptstats(os.path.join(outputdir,"levels","functionbased","iptstats.json"))
  writerooflinestats(os.path.join(outputdir,"levels","functionbased","rooflinestats.json"))

  if not os.path.exists(os.path.join(HOME,"levels","functionbased","doxygen")):
    os.makedirs(os.path.join(HOME,"levels","functionbased","doxygen"))

  if source and doxygenpath:
    createDoxyGenOutput(source,doxygenpath,title)
def createJSONData(resultsdir, outputdir, title = None, source = None, doxygenpath = None):

  resultsdir = os.path.abspath(resultsdir)
  outputdir = os.path.abspath(outputdir)
  if not title:
    title = os.path.basename(resultsdir)
  title = title.replace(' ', '_')

  global config, stats
  config = sniper_config.parse_config(file(os.path.join(resultsdir, 'sim.cfg')).read())
  stats = sniper_stats.SniperStats(resultsdir)

  readInputData(os.path.join(resultsdir,"sim.rtntrace"))
  if not os.path.exists(os.path.join(outputdir,"levels","functionbased")):
    os.makedirs(os.path.join(outputdir,"levels","functionbased"))

  writeiptstats(os.path.join(outputdir,"levels","functionbased","iptstats.json"))
  writerooflinestats(os.path.join(outputdir,"levels","functionbased","rooflinestats.json"))

  if not os.path.exists(os.path.join(HOME,"levels","functionbased","doxygen")):
    os.makedirs(os.path.join(HOME,"levels","functionbased","doxygen"))

  if source and doxygenpath:
    createDoxyGenOutput(source,doxygenpath,title)
コード例 #6
0
ファイル: topology.py プロジェクト: Jashinta/570_project
def createJSONData(interval, num_intervals, resultsdir, outputdir, verbose = False):
  topodir = os.path.join(outputdir,'levels','topology')
  mkdir_p(topodir)

  gen_topology.gen_topology(resultsdir = resultsdir, outputobj = file(os.path.join(topodir, 'topo.svg'), 'w'), format = 'svg', embedded = True)

  config = sniper_config.parse_config(file(os.path.join(resultsdir, 'sim.cfg')).read())
  ncores = int(config['general/total_cores'])
  stats = sniper_stats.SniperStats(resultsdir)

  ids = collections.defaultdict(lambda: {})
  for name, lid, mid in stats.get_topology():
    ids[name][int(lid)] = int(mid)

  caches = [ 'L1-I', 'L1-D', 'L2', 'L3', 'L4', 'dram-cache' ]
  items = sum([ [ '%s-%d' % (name, core) for name in ['core','dram-cntlr']+caches ] for core in range(ncores) ], [])
  data = dict([ (item, {'info':'', 'sparkdata':[]}) for item in items ])
  dramcntlrs = [ lid for (name, lid, mid) in stats.get_topology() if name == 'dram-cntlr' ]


  for i in range(num_intervals):
    results = sniper_lib.get_results(config = config, stats = stats, partial = ('periodic-'+str(i*interval), 'periodic-'+str((i+1)*interval)))['results']
    if 'barrier.global_time_begin' in results:
      # Most accurate: ask the barrier
      results['time_begin'] = results['barrier.global_time_begin'][0]
      results['time_end'] = results['barrier.global_time_end'][0]
    elif 'performance_model.elapsed_time_end' in results:
      # Guess based on core that has the latest time (future wakeup is less common than sleep on futex)
      results['time_begin'] = max(results['performance_model.elapsed_time_begin'])
      results['time_end'] = max(results['performance_model.elapsed_time_end'])
    else:
      raise ValueError('Need either performance_model.elapsed_time or barrier.global_time, simulation is probably too old')

    for core in range(ncores):
      if 'fs_to_cycles_cores' in results:
        cycles_scale = results['fs_to_cycles_cores'][core]
      else:
        cycles_scale = 1.
      cycles = cycles_scale * (results['time_end'] - results['time_begin'])
      ninstrs = results['performance_model.instruction_count'][core]
      data['core-%d' % core]['sparkdata'].append('%.3f' % (ninstrs / cycles))
      data['core-%d' % core]['info'] = 'IPC (core-%d)' % core
      for cache in caches:
        if cache not in ids:
          # Cache level does not exist
          continue
        if ids[cache][core] != core:
          # Non-master cache
          continue
        if '%s.loads' % cache in results:
          # Sum misses and instruction counts over all cores sharing this cache
          misses = 0; ninstrs = 0
          for _core in range(ncores):
            if ids[cache][_core] == ids[cache][core]:
              misses += results['%s.load-misses'%cache][_core] + results['%s.store-misses-I'%cache][_core]
              ninstrs += results['performance_model.instruction_count'][_core]
          data['%s-%d' % (cache, core)]['sparkdata'].append('%.3f' % (1000. * misses / float(ninstrs or 1.)))
          data['%s-%d' % (cache, core)]['info'] = 'MPKI (%s-%d)' % (cache, core)

    for dramcntlr in dramcntlrs:
      ninstrs = sum(results['performance_model.instruction_count'])
      if ninstrs == 0:
        data['dram-cntlr-%d' % dramcntlr]['sparkdata'].append(0.); # FIXME ninstrs should not be zero while we are accessing dram
      else:
        data['dram-cntlr-%d' % dramcntlr]['sparkdata'].append('%.3f' % (1000. * (results['dram.reads'][dramcntlr] + results['dram.writes'][dramcntlr]) / (ninstrs or 1.)))
      data['dram-cntlr-%d' % dramcntlr]['info'] = 'APKI (dram-cntlr-%d)' % dramcntlr

  jsonfile = open(os.path.join(topodir, 'topology.txt'), "w")
  jsonfile.write('topology = %s' % json.dumps(data))
  jsonfile.close()
コード例 #7
0
def createJSONData(interval,
                   num_intervals,
                   resultsdir,
                   outputdir,
                   verbose=False):
    topodir = os.path.join(outputdir, 'levels', 'topology')
    mkdir_p(topodir)

    gen_topology.gen_topology(resultsdir=resultsdir,
                              outputobj=file(os.path.join(topodir, 'topo.svg'),
                                             'w'),
                              format='svg',
                              embedded=True)

    config = sniper_config.parse_config(
        file(os.path.join(resultsdir, 'sim.cfg')).read())
    ncores = int(config['general/total_cores'])
    stats = sniper_stats.SniperStats(resultsdir)

    ids = collections.defaultdict(lambda: {})
    for name, lid, mid in stats.get_topology():
        ids[name][int(lid)] = int(mid)

    caches = ['L1-I', 'L1-D', 'L2', 'L3', 'L4', 'dram-cache']
    items = sum(
        [['%s-%d' % (name, core) for name in ['core', 'dram-cntlr'] + caches]
         for core in range(ncores)], [])
    data = dict([(item, {'info': '', 'sparkdata': []}) for item in items])
    dramcntlrs = [
        lid for (name, lid, mid) in stats.get_topology()
        if name == 'dram-cntlr'
    ]

    for i in range(num_intervals):
        results = sniper_lib.get_results(
            config=config,
            stats=stats,
            partial=('periodic-' + str(i * interval), 'periodic-' + str(
                (i + 1) * interval)))['results']
        if 'barrier.global_time_begin' in results:
            # Most accurate: ask the barrier
            results['time_begin'] = results['barrier.global_time_begin'][0]
            results['time_end'] = results['barrier.global_time_end'][0]
        elif 'performance_model.elapsed_time_end' in results:
            # Guess based on core that has the latest time (future wakeup is less common than sleep on futex)
            results['time_begin'] = max(
                results['performance_model.elapsed_time_begin'])
            results['time_end'] = max(
                results['performance_model.elapsed_time_end'])
        else:
            raise ValueError(
                'Need either performance_model.elapsed_time or barrier.global_time, simulation is probably too old'
            )

        for core in range(ncores):
            if 'fs_to_cycles_cores' in results:
                cycles_scale = results['fs_to_cycles_cores'][core]
            else:
                cycles_scale = 1.
            cycles = cycles_scale * (results['time_end'] -
                                     results['time_begin'])
            ninstrs = results['performance_model.instruction_count'][core]
            data['core-%d' % core]['sparkdata'].append('%.3f' %
                                                       (ninstrs / cycles))
            data['core-%d' % core]['info'] = 'IPC (core-%d)' % core
            for cache in caches:
                if cache not in ids:
                    # Cache level does not exist
                    continue
                if ids[cache][core] != core:
                    # Non-master cache
                    continue
                if '%s.loads' % cache in results:
                    # Sum misses and instruction counts over all cores sharing this cache
                    misses = 0
                    ninstrs = 0
                    for _core in range(ncores):
                        if ids[cache][_core] == ids[cache][core]:
                            misses += results['%s.load-misses' %
                                              cache][_core] + results[
                                                  '%s.store-misses-I' %
                                                  cache][_core]
                            ninstrs += results[
                                'performance_model.instruction_count'][_core]
                    data['%s-%d' % (cache, core)]['sparkdata'].append(
                        '%.3f' % (1000. * misses / float(ninstrs or 1.)))
                    data['%s-%d' %
                         (cache, core)]['info'] = 'MPKI (%s-%d)' % (cache,
                                                                    core)

        for dramcntlr in dramcntlrs:
            ninstrs = sum(results['performance_model.instruction_count'])
            if ninstrs == 0:
                data['dram-cntlr-%d' % dramcntlr]['sparkdata'].append(0.)
                # FIXME ninstrs should not be zero while we are accessing dram
            else:
                data['dram-cntlr-%d' % dramcntlr]['sparkdata'].append(
                    '%.3f' % (1000. * (results['dram.reads'][dramcntlr] +
                                       results['dram.writes'][dramcntlr]) /
                              (ninstrs or 1.)))
            data['dram-cntlr-%d' %
                 dramcntlr]['info'] = 'APKI (dram-cntlr-%d)' % dramcntlr

    jsonfile = open(os.path.join(topodir, 'topology.txt'), "w")
    jsonfile.write('topology = %s' % json.dumps(data))
    jsonfile.close()
コード例 #8
0
def createJSONData(interval,
                   num_intervals,
                   resultsdir,
                   outputdir,
                   title,
                   verbose=False):
    if verbose:
        print 'Generate JSON data for Level 3'

    stats = sniper_stats.SniperStats(resultsdir)
    config = sniper_config.parse_config(
        file(os.path.join(resultsdir, 'sim.cfg')).read())

    ncores = int(config['general/total_cores'])
    if verbose:
        print ncores, "cores detected"

    intervaldata = [0 for x in xrange(num_intervals)]
    num_exceptions = 0
    for i in range(0, num_intervals):
        if verbose:
            print "Parsing interval " + str(i + 1) + "/" + str(
                num_intervals) + "\r",

        try:
            results = cpistack.cpistack_compute(
                config=config,
                stats=stats,
                partial=[
                    "periodic-" + str(i * interval), "periodic-" + str(
                        (i + 1) * interval)
                ],
                use_simple=False,
                use_simple_mem=True,
                no_collapse=False,
                aggregate=False)
            data = results.get_data('cpi')

            intervaldata[i] = [0 for x in xrange(ncores)]

            for core in xrange(ncores):
                if core in results.cores:
                    intervaldata[i][core] = {
                        'time': (i * interval / 1000000),
                        'ipc': 1. / sum(data[core].itervalues())
                    }
                else:
                    intervaldata[i][core] = {
                        'time': (i * interval / 1000000),
                        'ipc': 0
                    }

        except ValueError:
            intervaldata[i] = [0 for x in xrange(ncores)]
            for j in range(0, ncores):
                intervaldata[i][j] = dict(time=(i * interval / 1000000), ipc=0)
            num_exceptions += 1
            continue

    # Write JSON to file
    mkdir_p(os.path.join(outputdir, 'levels', 'level3', 'data'))
    f = open(
        os.path.join(outputdir, 'levels', 'level3', 'data', 'ipcvalues.txt'),
        "w")
    f.write("intervalsize = " + str(interval) + ";\n")
    f.write("ipcvaluestr = '" + json.dumps(intervaldata) + "';")
    f.close()
    f = open(
        os.path.join(outputdir, 'levels', 'level3', 'data', 'ipcvalues.json'),
        "w")
    f.write(json.dumps(intervaldata, indent=4))
    f.close()
    if verbose:
        print
    if (num_exceptions > 0):
        if verbose:
            print("There was no useful information for " +
                  str(num_exceptions) + " intervals.")
            print("You might want to increase the interval size.")
    if verbose:
        print('[OK]')
コード例 #9
0
ファイル: level3.py プロジェクト: Jashinta/570_project
def createJSONData(interval, num_intervals, resultsdir, outputdir, title, verbose = False):
  if verbose:
    print 'Generate JSON data for Level 3'

  stats = sniper_stats.SniperStats(resultsdir)
  config = sniper_config.parse_config(file(os.path.join(resultsdir, 'sim.cfg')).read())

  ncores = int(config['general/total_cores'])
  if verbose:
    print ncores, "cores detected"

  intervaldata = [0 for x in xrange(num_intervals)]
  num_exceptions=0
  for i in range(0,num_intervals):
    if verbose:
      print "Parsing interval "+str(i+1)+"/"+str(num_intervals)+"\r",

    try:
      results = cpistack.cpistack_compute(
        config = config,
        stats = stats,
        partial = ["periodic-"+str(i*interval),"periodic-"+str((i+1)*interval)],
        use_simple = False,
        use_simple_mem = True,
        no_collapse = False,
        aggregate = False
      )
      data = results.get_data('cpi')

      intervaldata[i] = [0 for x in xrange(ncores)]

      for core in xrange(ncores):
        if core in results.cores:
          intervaldata[i][core] = {'time':(i*interval/1000000), 'ipc':1./sum(data[core].itervalues())}
        else:
          intervaldata[i][core] = {'time':(i*interval/1000000), 'ipc':0}

    except ValueError:
      intervaldata[i] = [0 for x in xrange(ncores)]
      for j in range(0,ncores):
        intervaldata[i][j] = dict(time=(i*interval/1000000), ipc=0)
      num_exceptions += 1
      continue

  # Write JSON to file
  mkdir_p(os.path.join(outputdir,'levels','level3','data'))
  f = open(os.path.join(outputdir,'levels','level3','data','ipcvalues.txt'), "w")
  f.write("intervalsize = "+str(interval)+";\n")
  f.write("ipcvaluestr = '"+json.dumps(intervaldata)+"';")
  f.close()
  f = open(os.path.join(outputdir,'levels','level3','data','ipcvalues.json'), "w")
  f.write(json.dumps(intervaldata, indent=4))
  f.close()
  if verbose:
    print
  if(num_exceptions>0):
    if verbose:
      print("There was no useful information for "+str(num_exceptions)+" intervals.")
      print("You might want to increase the interval size.")
  if verbose:
    print('[OK]')