示例#1
0
 def __init__(self, initial, tester, hlconfig):
   self.initial  = initial
   if random.seed:
     self.members  = [initial.clone()]
   else:
     self.members  = []
   self.best     = None
   self.notadded = []
   self.removed  = []
   self.failed   = set()
   self.testers  = [tester]
   self.roundNumber = -1
   self.firstRound = True
   self.hlconfig = hlconfig
   if config.candidatelog:
     self.candidatelog = storagedirs.openCsvStats("candidatelog",
                                                  ['time',
                                                   'candidates',
                                                   'tests_complete',
                                                   'tests_timeout',
                                                   'tests_crashed',
                                                   'config_path',
                                                   'input_size',
                                                   'end_of_round',
                                                   'round_number'])
     self.candidateloglast = None
   self.starttime = time.time()
   self.onMembersChanged(True)
   self.members[0].log_mutation(MutationLog.seed)
示例#2
0
 def __init__(self, initial, tester, hlconfig):
     self.initial = initial
     if random.seed:
         self.members = [initial.clone()]
     else:
         self.members = []
     self.best = None
     self.notadded = []
     self.removed = []
     self.failed = set()
     self.testers = [tester]
     self.roundNumber = -1
     self.firstRound = True
     self.hlconfig = hlconfig
     if config.candidatelog:
         self.candidatelog = storagedirs.openCsvStats(
             "candidatelog", [
                 'time', 'candidates', 'tests_complete', 'tests_timeout',
                 'tests_crashed', 'config_path', 'input_size',
                 'end_of_round', 'round_number'
             ])
         self.candidateloglast = None
     self.starttime = time.time()
     self.onMembersChanged(True)
     self.members[0].log_mutation(MutationLog.seed)
示例#3
0
 def __init__(self, initial, tester, baseline=None):
   self.members  = [initial]
   self.best     = None
   self.notadded = []
   self.removed  = []
   self.failed   = set()
   self.testers  = [tester]
   self.baseline = baseline
   self.roundNumber = -1
   self.firstRound = True
   if config.candidatelog:
     self.candidatelog     = storagedirs.openCsvStats("candidatelog",
                                                      ['time',
                                                       'candidates',
                                                       'tests_complete',
                                                       'tests_timeout',
                                                       'tests_crashed',
                                                       'config_path',
                                                       'input_size',
                                                       'end_of_round',
                                                       'round_number'])
     self.candidateloglast = None
   self.starttime = time.time()
   self.onMembersChanged(True)
示例#4
0
def autotuneInner(benchmark, returnBest=None, tester_lambda=None, pop_lambda=None, hlconfig_lambda=None, config_lambda=None):
  """Function running the autotuning process.
If returnBest is specified, it should be a list. The best candidate found will 
be added to that list"""
  with progress.Scope("autotuning "+benchmark,
     config.rounds_per_input_size*math.log(config.max_input_size,2)+config.final_rounds) as pr:
    config.benchmark = benchmark
    candidate, tester, hlconfig = init(benchmark, tester_lambda, pop_lambda, hlconfig_lambda, config_lambda)
    try:
      if pop_lambda is not None:
        pop = pop_lambda(candidate, tester, hlconfig)
      else:
        pop = Population(candidate, tester, hlconfig)

      stats = storagedirs.openCsvStats("roundstats", 
          ("round",
           "input_size",
           "cumulative_sec",
           "incremental_sec",
           "testing_sec",
           "inputgen_sec")+pop.statsHeader())
      timers.total.start()
      config.end_time = time.time() + config.max_time
      def generation():
        pop.generation()
        pr()
        stats.writerow((pop.roundNumber,
                        pop.inputSize(),
                        timers.total.total(),
                        timers.total.lap(),
                        timers.testing.lap(),
                        timers.inputgen.lap())+pop.stats())
      try:
        while pop.inputSize() < config.max_input_size:
          for z in xrange(config.rounds_per_input_size):
            generation()
          pop.nextInputSize()
        for z in xrange(config.final_rounds):
          generation()
      except TrainingTimeout:
        pass
      timers.total.stop()

      #check to make sure we did something:
      if pop.firstRound:
        warnings.warn(tunerwarnings.AlwaysCrashes())
        
      logging.info("TODO: using acc target: "+str(config.accuracy_target))
      return pop.best
    except:
      traceback.print_exc()     # Connelly: Print exceptions (are not otherwise displayed...not sure why)
      raise
    finally:
      if pop.best and config.output_cfg:
        print pop.best.cfgfile(),"=>" , config.output_cfg
        shutil.copyfile(pop.best.cfgfile(), config.output_cfg)
      if pop.best and returnBest is not None:
        returnBest.append(pop.best)
      at = storagedirs.getactivetimers()
      if len(at):
        storagedirs.openCsvStats("timers", at.keys()).writerow(at.values())
      if tester and hasattr(tester, 'cleanup'):     # Connelly: only call if has cleanup attr
        tester.cleanup()
示例#5
0
def onlinelearnInner(benchmark):
  candidate, tester = sgatuner.init(benchmark, createChoiceSiteMutatorsOnline)
  pop = OnlinePopulation()
  objectives = ObjectiveTuner(pop)

  ''' mutators in the last time window that produced improved candidates, 
  ordered by descending fitness of the candidates'''
  mutatorLog = MutatorLog(name = "acc and time log")

  ostats = storagedirs.openCsvStats("onlinestats", ObjectiveTuner.statsHeader)
  pstats = storagedirs.openCsvStats("population", OnlinePopulation.statsHeader)
  clog = storagedirs.openCsvStats("onlinecandidates", ['gen',
                                                       'timesafe','accsafe','timeexp','accexp',
                                                       'safe','seed','experimental',
                                                       ])

    
  try:
    timers.total.start()

    '''seed first round'''
    p = candidate
    if config.online_baseline:
      c = None
    else:
      c = p.clone()
    if not tester.race(p, c):
      raise Exception()
    if not p.wasTimeout:
      pop.add(p)
    if c and not c.wasTimeout:
      pop.add(c)

    if not config.online_baseline:
      mlog = MutatorLogFile(c.mutators)

    '''now normal rounds'''  
    for gen in itertools.count(1):
      if config.max_time and objectives.elapsed>config.max_time:
        break
      if config.max_gen and gen>config.max_gen:
        break
      if gen%config.reweight_interval==0:
        pop.reweight()

      p = pop.select(objectives.fitness)
      #s = pop.choice(parentlimit(p), getacc)
      s = p

      if config.fixed_safe_alg:
        p = candidate

      if config.online_baseline:
        c = None
      else:
        if(objectives.needAccuracy()):
          mfilter = lambda x: x.accuracyHint
        else:
          mfilter = lambda x: True
        
        c = s.cloneAndMutate(tester.n,
                             adaptive = True,
                             mutatorLog = mutatorLog,
                             objectives = objectives,
                             mutatorFilter = mfilter)
      tlim, atarg = objectives.getlimits(p, s, c)
      if tester.race(p, c, tlim, atarg) and not (p.wasTimeout and c.wasTimeout):
        p.discardResults(config.max_trials)
        if c and not c.wasTimeout:
          pop.add(c)
          pop.prune()

        if c is None:
          c=p
        
        logging.debug("Child vs parent, better=%d, %f vs. %f" % (int(gettime(c) < gettime(p)), gettime(c), gettime(p)))
        clog.writerow([gen, lasttime(p), lastacc(p), lasttime(c), lastacc(c)]
                      +map(storagedirs.relpath,[p.cfgfile(), s.cfgfile(), c.cfgfile()]))

        dtime = gettime(c) - gettime(p)
        dacc = None if c.wasTimeout else (getacc(c) - getacc(p))

        if c is not None:          
          mutatorLog.add(c, dtime, dacc, gettime(c), None if c.wasTimeout else getacc(c));


        if not config.online_baseline:
          mlog.logPerformance(gen, gettime(c), "None" if c.wasTimeout else getacc(c), dtime, dacc, str(c.lastMutator));
          mlog.logScores(gen, c.mutatorScores)

        t,a = resultingTimeAcc(p, c)
        print "Generation", gen, "elapsed",objectives.elapsed,"time", t,"accuracy",a, getconf(p)
        print "Objectives", objectives
        if a is not None and t is not None:
          objectives.result(t,a)
        pop.output((p,c,s))
        ostats.writerow(objectives.stats(gen))
        pstats.writerow(pop.stats(gen))
      else:
        print 'error'

    timers.total.stop()
  finally:
    at = storagedirs.getactivetimers()
    if len(at):
      storagedirs.openCsvStats("timers", at.keys()).writerow(at.values())
    tester.cleanup()
示例#6
0
 def __init__(self, mutators):
   self.mutators = sorted(mutators, key=str)
   self.mutatorPerf = storagedirs.openCsvStats("mutatorperf", ["gen", "time", "accuracy", "dtime", "daccuracy," "selected_mutator"])
   self.mutatorScores = storagedirs.openCsvStats("mutatorscores", ["gen"] + map(str, mutators))
示例#7
0
def onlinelearnInner(benchmark):
    candidate, tester = sgatuner.init(benchmark,
                                      createChoiceSiteMutatorsOnline)
    pop = OnlinePopulation()
    objectives = ObjectiveTuner(pop)
    ''' mutators in the last time window that produced improved candidates, 
  ordered by descending fitness of the candidates'''
    mutatorLog = MutatorLog(name="acc and time log")

    ostats = storagedirs.openCsvStats("onlinestats",
                                      ObjectiveTuner.statsHeader)
    pstats = storagedirs.openCsvStats("population",
                                      OnlinePopulation.statsHeader)
    clog = storagedirs.openCsvStats("onlinecandidates", [
        'gen',
        'timesafe',
        'accsafe',
        'timeexp',
        'accexp',
        'safe',
        'seed',
        'experimental',
    ])

    try:
        timers.total.start()
        '''seed first round'''
        p = candidate
        if config.online_baseline:
            c = None
        else:
            c = p.clone()
        if not tester.race(p, c):
            raise Exception()
        if not p.wasTimeout:
            pop.add(p)
        if c and not c.wasTimeout:
            pop.add(c)

        if not config.online_baseline:
            mlog = MutatorLogFile(c.mutators)
        '''now normal rounds'''
        for gen in itertools.count(1):
            if config.max_time and objectives.elapsed > config.max_time:
                break
            if config.max_gen and gen > config.max_gen:
                break
            if gen % config.reweight_interval == 0:
                pop.reweight()

            p = pop.select(objectives.fitness)
            #s = pop.choice(parentlimit(p), getacc)
            s = p

            if config.fixed_safe_alg:
                p = candidate

            if config.online_baseline:
                c = None
            else:
                if (objectives.needAccuracy()):
                    mfilter = lambda x: x.accuracyHint
                else:
                    mfilter = lambda x: True

                c = s.cloneAndMutate(tester.n,
                                     adaptive=True,
                                     mutatorLog=mutatorLog,
                                     objectives=objectives,
                                     mutatorFilter=mfilter)
            tlim, atarg = objectives.getlimits(p, s, c)
            if tester.race(p, c, tlim,
                           atarg) and not (p.wasTimeout and c.wasTimeout):
                p.discardResults(config.max_trials)
                if c and not c.wasTimeout:
                    pop.add(c)
                    pop.prune()

                if c is None:
                    c = p

                logging.debug(
                    "Child vs parent, better=%d, %f vs. %f" %
                    (int(gettime(c) < gettime(p)), gettime(c), gettime(p)))
                clog.writerow(
                    [gen,
                     lasttime(p),
                     lastacc(p),
                     lasttime(c),
                     lastacc(c)] +
                    map(storagedirs.relpath,
                        [p.cfgfile(), s.cfgfile(),
                         c.cfgfile()]))

                dtime = gettime(c) - gettime(p)
                dacc = None if c.wasTimeout else (getacc(c) - getacc(p))

                if c is not None:
                    mutatorLog.add(c, dtime, dacc, gettime(c),
                                   None if c.wasTimeout else getacc(c))

                if not config.online_baseline:
                    mlog.logPerformance(gen, gettime(c),
                                        "None" if c.wasTimeout else getacc(c),
                                        dtime, dacc, str(c.lastMutator))
                    mlog.logScores(gen, c.mutatorScores)

                t, a = resultingTimeAcc(p, c)
                print "Generation", gen, "elapsed", objectives.elapsed, "time", t, "accuracy", a, getconf(
                    p)
                print "Objectives", objectives
                if a is not None and t is not None:
                    objectives.result(t, a)
                pop.output((p, c, s))
                ostats.writerow(objectives.stats(gen))
                pstats.writerow(pop.stats(gen))
            else:
                print 'error'

        timers.total.stop()
    finally:
        at = storagedirs.getactivetimers()
        if len(at):
            storagedirs.openCsvStats("timers", at.keys()).writerow(at.values())
        tester.cleanup()
示例#8
0
 def __init__(self, mutators):
   self.mutators = sorted(mutators, key=str)
   self.mutatorPerf = storagedirs.openCsvStats("mutatorperf", ["gen", "time", "accuracy", "dtime", "daccuracy," "selected_mutator"])
   self.mutatorScores = storagedirs.openCsvStats("mutatorscores", ["gen"] + map(str, mutators))
示例#9
0
def autotuneInner(benchmark):
  progress.push()
  config.benchmark = benchmark
  candidate, tester = init(benchmark)
  try:
    pop = Population(candidate, tester, None)
    
    if not pop.isVariableAccuracy() and config.accuracy_target:
      logging.info("clearing accuracy_target")
      config.accuracy_target = None

    stats = storagedirs.openCsvStats("roundstats", 
        ("round",
         "input_size",
         "cumulative_sec",
         "incremental_sec",
         "testing_sec",
         "inputgen_sec")+pop.statsHeader())
    timers.total.start()
    config.end_time = time.time() + config.max_time
    try:
      progress.remaining(config.max_input_size*(1+config.final_rounds))
      while pop.inputSize() < config.max_input_size:
        progress.status("autotuning %s: input %d of %d" % (config.benchmark, pop.inputSize(), config.max_input_size))
        pop.generation()
        stats.writerow((pop.roundNumber,
                        pop.inputSize(),
                        timers.total.total(),
                        timers.total.lap(),
                        timers.testing.lap(),
                        timers.inputgen.lap())+pop.stats())
        pop.nextInputSize()
        progress.remaining(config.max_input_size - pop.inputSize() + config.max_input_size*config.final_rounds)
      for z in xrange(config.final_rounds):
        pop.generation()
        stats.writerow((pop.roundNumber,
                        pop.inputSize(),
                        timers.total.total(),
                        timers.total.lap(),
                        timers.testing.lap(),
                        timers.inputgen.lap())+pop.stats())
        progress.remaining((config.final_rounds - z)*config.max_input_size)
    except TrainingTimeout:
      pass
    timers.total.stop()

    #check to make sure we did something:
    if pop.firstRound:
      warnings.warn(tunerwarnings.AlwaysCrashes())
      
    logging.info("TODO: using acc target: "+str(config.accuracy_target))
    return pop.best
  finally:
    if pop.best and config.output_cfg:
      print pop.best.cfgfile(),"=>" , config.output_cfg
      shutil.copyfile(pop.best.cfgfile(), config.output_cfg)
    at = storagedirs.getactivetimers()
    if len(at):
      storagedirs.openCsvStats("timers", at.keys()).writerow(at.values())
    if tester:
      tester.cleanup()
    progress.pop()
示例#10
0
def autotuneInner(benchmark,
                  returnBest=None,
                  tester_lambda=None,
                  pop_lambda=None,
                  hlconfig_lambda=None,
                  config_lambda=None):
    """Function running the autotuning process.
If returnBest is specified, it should be a list. The best candidate found will 
be added to that list"""
    with progress.Scope(
            "autotuning " + benchmark,
            config.rounds_per_input_size * math.log(config.max_input_size, 2) +
            config.final_rounds) as pr:
        config.benchmark = benchmark
        candidate, tester, hlconfig = init(benchmark, tester_lambda,
                                           pop_lambda, hlconfig_lambda,
                                           config_lambda)
        try:
            if pop_lambda is not None:
                pop = pop_lambda(candidate, tester, hlconfig)
            else:
                pop = Population(candidate, tester, hlconfig)

            stats = storagedirs.openCsvStats(
                "roundstats",
                ("round", "input_size", "cumulative_sec", "incremental_sec",
                 "testing_sec", "inputgen_sec") + pop.statsHeader())
            timers.total.start()
            config.end_time = time.time() + config.max_time

            def generation():
                pop.generation()
                pr()
                stats.writerow((pop.roundNumber, pop.inputSize(),
                                timers.total.total(), timers.total.lap(),
                                timers.testing.lap(), timers.inputgen.lap()) +
                               pop.stats())

            try:
                while pop.inputSize() < config.max_input_size:
                    for z in xrange(config.rounds_per_input_size):
                        generation()
                    pop.nextInputSize()
                for z in xrange(config.final_rounds):
                    generation()
            except TrainingTimeout:
                pass
            timers.total.stop()

            #check to make sure we did something:
            if pop.firstRound:
                warnings.warn(tunerwarnings.AlwaysCrashes())

            logging.info("TODO: using acc target: " +
                         str(config.accuracy_target))
            return pop.best
        except:
            traceback.print_exc(
            )  # Connelly: Print exceptions (are not otherwise displayed...not sure why)
            raise
        finally:
            if pop.best and config.output_cfg:
                print pop.best.cfgfile(), "=>", config.output_cfg
                shutil.copyfile(pop.best.cfgfile(), config.output_cfg)
            if pop.best and returnBest is not None:
                returnBest.append(pop.best)
            at = storagedirs.getactivetimers()
            if len(at):
                storagedirs.openCsvStats("timers",
                                         at.keys()).writerow(at.values())
            if tester and hasattr(
                    tester,
                    'cleanup'):  # Connelly: only call if has cleanup attr
                tester.cleanup()