Ejemplo n.º 1
0
def init(benchmark, acf=createChoiceSiteMutators, taf=createTunableMutators):
  if config.debug:
    logging.basicConfig(level=logging.DEBUG)
    config.pause_on_crash = True
  if not config.threads:
    config.threads = pbutil.cpuCount()
  for k in filter(len, config.abort_on.split(',')):
    warnings.simplefilter('error', getattr(tunerwarnings,k))
  infoxml = TrainingInfo(pbutil.benchmarkToInfo(benchmark))
  if not config.main:
    config.main = mainname([pbutil.benchmarkToBin(benchmark)])
  tester = CandidateTester(benchmark, config.min_input_size)
  if config.seed is None:
    cfg = defaultConfigFile(pbutil.benchmarkToBin(tester.app))
  else:
    cfg = configtool.ConfigFile(config.seed)
  candidate = Candidate(cfg, infoxml.transform(config.main))
  addMutators(candidate, infoxml.globalsec(), acf, taf)
  addMutators(candidate, infoxml.transform(config.main), acf, taf)
  candidate.addMutator(mutators.MultiMutator(2))
  if not config.delete_output_dir:
    storagedirs.cur.dumpConfig()
    storagedirs.cur.dumpGitStatus()
    storagedirs.cur.saveFile(pbutil.benchmarkToInfo(benchmark))
    storagedirs.cur.saveFile(pbutil.benchmarkToBin(benchmark))
  return candidate, tester
Ejemplo n.º 2
0
def main(benchmark, n, filename):
    if os.path.isdir(filename):
        filename = os.path.join(filename, 'stats/candidatelog.csv')
    f = open(filename)
    infoxml = TrainingInfo(pbutil.benchmarkToInfo(benchmark))
    main = mainname([pbutil.benchmarkToBin(benchmark)])
    infoxml = infoxml.transform(main)
    binpath = pbutil.benchmarkToBin(benchmark)
    tester = CandidateTester(benchmark, n)
    root = os.path.dirname(filename)

    def findconfig(c):
        if c[0] == '/':
            c = c[1:]
        if os.path.isfile(os.path.join(root, c)):
            return os.path.join(root, c)
        if os.path.isfile(os.path.join(root, '..', c)):
            return os.path.join(root, '..', c)
        return None

    rows = list(csv.DictReader(f))
    for i, row in enumerate(rows):
        if options.onlyrounds \
            and i+1<len(rows) \
            and row.has_key('round_number') \
            and rows[i+1]['round_number']==row['round_number']:
            continue
        config = findconfig(row['config_path'])
        row['tests'] = int(row['tests_complete']) + int(
            row['tests_timeout']) + int(row['tests_crashed'])
        candidate = Candidate(ConfigFile(config), infoxml)
        while candidate.numTests(n) < options.trials:
            try:
                tester.testN(candidate, options.trials, options.timeout)
            except candidatetester.CrashException, e:
                print >> sys.stderr, e
        try:
            row['minperf'] = candidate.metrics[0][n].min()
            row['perf_on_%d' % n], row['perf_on_%d_ci' %
                                       n] = candidate.metrics[0][n].interval(
                                           options.confidence)
            row['invperf'] = 1.0 / row['perf_on_%d' % n]
        except Exception, e:
            row['minperf'] = -1
            row['perf_on_%d' % n] = -1
            print >> sys.stderr, e
Ejemplo n.º 3
0
def init(benchmark,
         tester_lambda=None,
         pop_lambda=None,
         hlconfig_lambda=None,
         config_lambda=None):
    if config.debug:
        logging.basicConfig(level=logging.DEBUG)
        config.pause_on_crash = True
    if not config.threads:
        config.threads = pbutil.cpuCount()
    for k in filter(len, config.abort_on.split(',')):
        warnings.simplefilter('error', getattr(tunerwarnings, k))
    if hlconfig_lambda is not None:
        hlconfig = hlconfig_lambda()
    else:
        infoxml = TrainingInfo(pbutil.benchmarkToInfo(benchmark))
        hlconfig = HighLevelConfig(infoxml)
    if not config.main:
        if tester_lambda is None and pop_lambda is None and hlconfig_lambda is None:
            config.main = mainname([pbutil.benchmarkToBin(benchmark)])
    if tester_lambda is not None:
        tester = tester_lambda(benchmark, config.min_input_size)
    else:
        tester = CandidateTester(benchmark, config.min_input_size)
    if config_lambda is not None:
        cfg = config_lambda()
    else:
        if config.seed is None:
            cfg = defaultConfigFile(pbutil.benchmarkToBin(tester.app))
        else:
            cfg = configtool.ConfigFile(config.seed)
    candidate = Candidate(cfg)
    if hlconfig_lambda is None:
        if not config.delete_output_dir:
            storagedirs.cur.dumpConfig()
            storagedirs.cur.dumpGitStatus()
            storagedirs.cur.saveFile(pbutil.benchmarkToInfo(benchmark))
            storagedirs.cur.saveFile(pbutil.benchmarkToBin(benchmark))
        if not infoxml.transform(
                config.main).isVariableAccuracy() and config.accuracy_target:
            logging.info("clearing accuracy_target")
            config.accuracy_target = None
    return candidate, tester, hlconfig
Ejemplo n.º 4
0
def main(benchmark, n, filename):
  if os.path.isdir(filename):
    filename=os.path.join(filename, 'stats/candidatelog.csv')
  f = open(filename)
  infoxml = TrainingInfo(pbutil.benchmarkToInfo(benchmark))
  main = mainname([pbutil.benchmarkToBin(benchmark)])
  infoxml = infoxml.transform(main)
  binpath=pbutil.benchmarkToBin(benchmark)
  tester = CandidateTester(benchmark, n)
  root = os.path.dirname(filename)
  def findconfig(c):
    if c[0]=='/':
      c=c[1:]
    if os.path.isfile(os.path.join(root, c)):
      return os.path.join(root, c)
    if os.path.isfile(os.path.join(root, '..', c)):
      return os.path.join(root, '..', c)
    return None
  rows = list(csv.DictReader(f))
  for i, row in enumerate(rows):
    if options.onlyrounds \
        and i+1<len(rows) \
        and row.has_key('round_number') \
        and rows[i+1]['round_number']==row['round_number']:
      continue
    config = findconfig(row['config_path'])
    row['tests'] = int(row['tests_complete'])+int(row['tests_timeout'])+int(row['tests_crashed'])
    candidate = Candidate(ConfigFile(config), infoxml)
    while candidate.numTests(n)<options.trials:
      try:
        tester.testN(candidate, options.trials, options.timeout)
      except candidatetester.CrashException, e:
        print >>sys.stderr, e
    try:
      row['minperf'] = candidate.metrics[0][n].min()
      row['perf_on_%d'%n], row['perf_on_%d_ci'%n] = candidate.metrics[0][n].interval(options.confidence)
      row['invperf']=1.0/row['perf_on_%d'%n]
    except Exception,e:
      row['minperf'] = -1
      row['perf_on_%d'%n] = -1
      print >>sys.stderr, e
Ejemplo n.º 5
0
def init(benchmark, tester_lambda=None, pop_lambda=None, hlconfig_lambda=None, config_lambda=None):
  if config.debug:
    logging.basicConfig(level=logging.DEBUG)
    config.pause_on_crash = True
  if not config.threads:
    config.threads = pbutil.cpuCount()
  for k in filter(len, config.abort_on.split(',')):
    warnings.simplefilter('error', getattr(tunerwarnings,k))
  if hlconfig_lambda is not None:
    hlconfig = hlconfig_lambda()
  else:
    infoxml = TrainingInfo(pbutil.benchmarkToInfo(benchmark))
    hlconfig = HighLevelConfig(infoxml)
  if not config.main:
    if tester_lambda is None and pop_lambda is None and hlconfig_lambda is None:
      config.main = mainname([pbutil.benchmarkToBin(benchmark)])
  if tester_lambda is not None:
    tester = tester_lambda(benchmark, config.min_input_size)
  else:
    tester = CandidateTester(benchmark, config.min_input_size)
  if config_lambda is not None:
    cfg = config_lambda()
  else:
    if config.seed is None:
      cfg = defaultConfigFile(pbutil.benchmarkToBin(tester.app))
    else:
      cfg = configtool.ConfigFile(config.seed)
  candidate = Candidate(cfg)
  if hlconfig_lambda is None:
    if not config.delete_output_dir:
      storagedirs.cur.dumpConfig()
      storagedirs.cur.dumpGitStatus()
      storagedirs.cur.saveFile(pbutil.benchmarkToInfo(benchmark))
      storagedirs.cur.saveFile(pbutil.benchmarkToBin(benchmark))
    if not infoxml.transform(config.main).isVariableAccuracy() and config.accuracy_target:
      logging.info("clearing accuracy_target")
      config.accuracy_target = None
  return candidate, tester, hlconfig