Exemple #1
0
def setup():
   "Read CEF collection, set up directories, and reset best found params."
   global update_cefs, best_info, best_params, start_time, iter_counter
   
   cefs = epymils.cefs.bests(50)
   if len(sys.argv) == 2:
      params = epymils.params.make_params(file(sys.argv[1]).read().strip().split())
      extend_cefs(cefs, params)
      update_cefs = True
   else:
      params = None
      update_cefs = False
      
   log("CEFS:", ["[%2d] %s"%(n,s) for (n,s) in enumerate(map(epymils.params.block2cef,cefs))])

   system("rm -fr protos")
   system("mkdir -p protos")

   best_params = None
   best_info = None

   start_time = time.time()
   iter_counter = 0

   return (cefs, params)
Exemple #2
0
def extend_cefs(cefs, params):
   "Extend cefs with CEFs from params."

   keys = [x for x in params if x.startswith("cef")]
   for key in keys:
      cef = params[key]
      if cef not in cefs:
         log("Extending CEFS with %s"%cef)
         cefs.append(cef)
Exemple #3
0
def train_enigma(results, problems, name, boost="POS3", debug=False):
   sha1 = hashlib.sha1()
   sha1.update("\n".join(problems))
   file("train.txt","w").write("\n".join(problems))
   pos = enigma.collect_pos(problems, results)
   neg = enigma.collect_neg(problems, results)
   symbols = enigma.make_symbols(pos, neg, "symbols.map")
   train = enigma.make_train(pos, neg, boost=boost)
   sha1.update("\n".join(train))
   accuracy = enigma.train()
   log("> Enigma candidate model: %s (samples=%s/%s symbols=%s trains=%s): %s"%(name,len(pos),len(neg),len(symbols),len(train),accuracy))
   enigma.save_model(name, debug=debug)
   return (name, sha1.hexdigest())
Exemple #4
0
def update_best(params, info):
   "Update best parameters when required."
   global best_info, best_params, start_time, iter_counter
   
   q = float(info[1])
   n = float(info[2])
   s = round(n-(q*n/10**6))
   runtime = time.time()-start_time
   #print "PROGRES: %d %d %d %d %.1f %.2f" % (iter_counter,runtime,s,n,(s/n)*100.0 if n!=0 else 0, q)

   if paramils.params_better(info, best_info):
      log("Q OUT:", ["currently best solution"])
      best_params = dict(params)
      best_info = list(info)
Exemple #5
0
def register_enigma_model(params, models):
   "Assign permanent id to the temporary enigma model in params."

   slots = int(params["slots"])
   for i in range(slots):
      key = "cef%d"%i
      if params[key].startswith("Enigma"):
         cef = epymils.block2cef(params[key])
         model = cef.split(",")[1]
         if model in models:
            #name = time.strftime("%A%U%b%Y%Z")+str(int(time.time()*100))
            name = "enigmodel"+models[model] # sha1 hash
            params["cef%d"%i] = params["cef%d"%i].replace(model, name)
            log("> Enigma new model (%s): %s"%(model,name))
            enigma.rename_model(model, name)
            return name
   return None
Exemple #6
0
def enigma_tune(cefs, params, REGISTER=False):
   global iter_counter, best_params
   log("\nENIGMA_TUNE %s" % iter_counter)
   if not params:
      return None
   params = dict(params)
   # get training samples
   proto = eargs.e_proto(params)
   file("proto","w").write(proto) # for debug=True in train_enigma
   json.dump(params,file("params.json","w"),indent=3,sort_keys=True) # as well for debug
   problems_train = file("data/problems.txt").read().strip().split("\n")
   problems_all = file("data/all.txt").read().strip().split("\n")
   results = training_results(proto, problems_all, EVAL_LIMIT, setting.CORES)
   # create enigma model and run ParamILS
   models = make_enigma_models(results, problems_all, problems_train)
   (params, info) = paramils.run_enigmatune(iter_counter, cefs, params, models.keys())
   update_best(params, info)
   # save model model if best
   if params == best_params:
      register_enigma_model(params, models)
      best_params = dict(params)
   elif REGISTER:
      register_enigma_model(params, models)
   # verify results 
   if VERIFY_ENIGMA:
      log("\nVERIFY_ENIGMA: Trial runs.")
      proto = eargs.e_proto(params)
      check = training_results(proto, problems_all, EVAL_LIMIT, setting.CORES)
      for r in results:
         if "PROCESSED" in results[r]:
            left = results[r]["PROCESSED"] if results[r].solved() else "?"
         else:
            left = "-"
         if r in check and "PROCESSED" in check[r]:
            right = check[r]["PROCESSED"] if check[r].solved() else "?"
         else:
            right = "-" if r in check else "!"
         print "%s: %10s --> %10s" % (r, left, right)
   return params
Exemple #7
0
def run():
   global iter_counter, best_params, best_info

   (cefs, params) = setup()
   if params:
      log_params(params, "EPYMILS INPUT:")
   
   if USE_ENIGMA:
      enigma_params = enigma_tune(cefs, params, REGISTER=(not ENFORCE_ENIGMA))
      if ENFORCE_ENIGMA:
         best_params = None
         best_info = None
      else:
         params = enigma_params
   else:
      enigma_params = None

   for it in range(setting.ITERS):
      iter_counter = it+1
      params = global_tune(cefs, params)
      params = fine_tune(cefs, params)
      if ITER_ENIGMA:
         params = enigma_tune(cefs, params, REGISTER=True)

   if best_params:
      best_params = dict(best_params)
      res = ["%s %s" % (x,best_params[x]) for x in sorted(best_params.keys())]
      log("\nRESULT: %s" % (" ".join(res)))
      log_proto("best_params", best_params)
      log_params(best_params, "EPYMILS OUTPUT:")
      cefs_update(best_params)
   if ENFORCE_ENIGMA and enigma_params:
      res = ["%s %s" % (x,enigma_params[x]) for x in sorted(enigma_params.keys())]
      log("\nRESULT: %s" % (" ".join(res)))
      log_params(enigma_params, "EPYMILS ENIGMA OUTPUT:")
      log_proto("enigma", enigma_params)
Exemple #8
0
def fine_tune(cefs, params):
   global iter_counter
   log("\nFINE_TUNE %s" % iter_counter)
   (params, info) = paramils.run_finetune(iter_counter, cefs, params)
   update_best(params, info)
   return params
Exemple #9
0
def global_tune(cefs, params):
   global iter_counter
   log("\nGLOBAL_TUNE %s" % iter_counter)
   (params, info) = paramils.run_globaltune(iter_counter, cefs, params)
   update_best(params, info)
   return params