Exemple #1
0
    def __call__(self, x):
        # determine the minimal station mass
        min_mass, slot_mass = select(self.asteroid, self.station,
                                     self.trajectory, self.mass,
                                     self.transfer_start, self.transfer_time,
                                     x)
        sdv = select_dvs(self.trajectory_dv, x)
        y = -score(min_mass, sdv)
        self.evals.value += 1
        if y < self.best_y.value:
            self.best_y.value = y
            trajectories = trajectory_selection(x, TRAJECTORY_NUM)[0]
            stations = dyson_stations(x, STATION_NUM)
            times = timings(x, STATION_NUM)

            sc = score(np.amin(slot_mass), sdv)

            logger().info(
                "evals = {0}: time = {1:.1f} s = {2:.0f} a = {3:.0f} t = {4:s} s = {5:s} b = {6:s} m = {7:s} dv = {8:s}"
                .format(self.evals.value, dtime(self.t0), sc,
                        ast_num(x, self.asteroid, self.trajectory),
                        str([round(ti, 2) for ti in times[1:-1]]),
                        str([int(si) for si in stations]),
                        str([int(ti) for ti in trajectories]),
                        str([round(mi, 2) for mi in slot_mass * 1E-15]),
                        str([round(di, 2) for di in sdv])))
        return y
Exemple #2
0
def messengerFullLoop():    
    while True:    
        problem = MessFull()
        logger().info(problem.name + ' cmaes c++')
        ret = minimize(problem.fun, bounds=problem.bounds, num_retries = 40000, 
            max_evaluations = 50000, value_limit = 10.0, logger = logger(), 
            useCpp = True)
Exemple #3
0
def messengerFullLoop():
    while True:
        problem = MessFull()
        logger().info(problem.name + ' de + cmaes c++')
        ret = minimize(problem.fun,
                       bounds=problem.bounds,
                       num_retries=60000,
                       value_limit=10.0,
                       logger=logger())
Exemple #4
0
 def fun(self, x):
     self.evals.value += 1
     y = self.f8fun(x)
     if y < self.best_y.value:
         self.best_y.value = y
         logger().info(
             str(dtime(self.t0)) + ' ' + str(self.evals.value) + ' ' +
             str(self.best_y.value) + ' ' + str(list(x)))
     return y
Exemple #5
0
def messengerFullLoop():
    while True:
        problem = MessFull()
        logger().info(problem.name + ' de + cmaes c++')
        minimize(problem.fun,
                 bounds=problem.bounds,
                 num_retries=50000,
                 value_limit=12.0,
                 logger=logger(),
                 optimizer=de_cma(1500))
Exemple #6
0
 def dump_all(self, iter):
     idx = self.values_all().argsort()
     self.all_stats = list(np.asarray(self.all_stats)[idx])
     logger().info("iteration " + str(iter))
     for i in range(len(self.all_stats)):
         ps = self.all_stats[i]
         logger().info("problem " + ray.get(ps.name.remote()) 
                       + ' ' + str(ray.get(ps.id.remote())) 
                       + ' ' + str(ray.get(ps.value.remote())) 
                       + ' time = ' + str(dtime(self.t0)))
Exemple #7
0
def nsgaII_test(problem, fname, NGEN=2000, MU=100, value_limits=None):
    time0 = time.perf_counter()  # optimization start time
    name = problem.name
    logger().info('optimize ' + name + ' nsgaII')
    pbounds = np.array(list(zip(problem.bounds.lb, problem.bounds.ub)))
    pop, logbook, front = nsgaII(2, problem.fun, pbounds, NGEN=NGEN, MU=MU)
    logger().info(name + ' nsgaII time ' + str(dtime(time0)))
    name = 'nsgaII_' + str(NGEN) + '_' + str(MU) + name + '_' + fname
    np.savez_compressed(name, xs=pop, ys=front)
    if not value_limits is None:
        front = np.array([
            y for y in front
            if all([y[i] < value_limits[i] for i in range(len(y))])
        ])
    retry.plot(front, name)
Exemple #8
0
def test_retry_cpp(problem, num):
    ret = retry.minimize(problem.fun,
                         bounds=problem.bounds,
                         num_retries=num,
                         max_evaluations=50000,
                         logger=logger(),
                         useCpp=True)
Exemple #9
0
def _test_optimizer(opt, problem, num_retries = 32, num = 1):
    log = logger()
    log.info(problem.name + ' ' + opt.name)
    for i in range(num):
        name = str(i+1) + ' ' + problem.name if num > 1 else problem.name
        retry.minimize_plot(name, opt, problem.fun, problem.bounds, 
                            math.inf, 10.0, num_retries, logger=log)
Exemple #10
0
def test_multiretry(num_retries = min(256, 8*mp.cpu_count()), 
             keep = 0.7, optimizer = de2_cma(1500), logger = logger(), repeat = 10):
    seqs = Tandem(0).seqs
    n = len(seqs)
    problems = [Tandem(i) for i in range(n)]
    ids = [str(seqs[i]) for i in range(n)]
    t0 = time.perf_counter()
    for _ in range(repeat):
        # check all variants
        problem_stats = multiretry.minimize(problems, ids, num_retries, keep, optimizer, logger)
        ps = problem_stats[0]
        
#         for _ in range(10):
#             # improve the best variant using only one node
#             fval = ray.get(ps.retry.remote(optimizer))
#             logger.info("improve best variant " + ray.get(ps.name.remote()) 
#                         + ' ' + str(ray.get(ps.id.remote()))
#                         + ' ' + str(ray.get(ps.value.remote())) 
#                         + ' time = ' + str(dtime(t0)))
#             if fval < -1490:
#                 break           
            
        # optimize best variant starting from scratch using all nodes
        logger.info("improve best variant " + ray.get(ps.name.remote()) 
                    + ' ' + str(ray.get(ps.id.remote()))
                    + ' ' + str(ray.get(ps.value.remote())) 
                    + ' time = ' + str(dtime(t0)))
        problem = problems[ray.get(ps.index.remote())]
        _rayoptimizer(optimizer, problem, 1, max_time = 1200, log = logger)
Exemple #11
0
def _retry_loop(pid,
                rgs,
                store,
                optimize,
                num_retries,
                value_limit,
                stop_fitness=-math.inf):
    fun = store.wrapper if store.statistic_num > 0 else store.fun
    #reinitialize logging config for windows -  multi threading fix
    if 'win' in sys.platform and not store.logger is None:
        store.logger = logger()

    lower = store.lower
    while store.get_runs_compare_incr(
            num_retries) and store.best_y.value > stop_fitness:
        try:
            rg = rgs[pid]
            sol, y, evals = optimize(fun, Bounds(store.lower,
                                                 store.upper), None,
                                     [rg.uniform(0.05, 0.1)] * len(lower), rg,
                                     store)
            store.add_result(y, sol, evals, value_limit)
            if not store.plot_name is None:
                name = store.plot_name + "_retry_" + str(
                    store.get_count_evals())
                xs = np.array(store.get_xs())
                ys = np.array(store.get_ys())
                np.savez_compressed(name, xs=xs, ys=ys)
                plot(y, name, interp=False)
        except Exception as ex:
            print(str(ex))
Exemple #12
0
def test_cma_cordinated_retry(dim=6):
    # coordinated retry with CMA-ES optimizer with reduced popsize
    # faster for small dimension, use default for dim > 12
    return advretry.minimize(problem.fun,
                             problem.bounds,
                             logger=logger(),
                             optimizer=Cma_cpp(2000, popsize=13))
Exemple #13
0
def _retry_loop(pid,
                rgs,
                store,
                optimize,
                value_limit,
                stop_fitness=-math.inf):
    fun = store.wrapper if store.statistic_num > 0 else store.fun
    #reinitialize logging config for windows -  multi threading fix
    if 'win' in sys.platform and not store.logger is None:
        store.logger = logger()

    while store.get_runs_compare_incr(
            store.num_retries) and store.best_y.value > stop_fitness:
        if _crossover(fun, store, optimize, rgs[pid]):
            continue
        try:
            rg = rgs[pid]
            dim = len(store.lower)
            sol, y, evals = optimize(fun, Bounds(store.lower,
                                                 store.upper), None,
                                     [rg.uniform(0.05, 0.1)] * dim, rg, store)
            store.add_result(y, sol, store.lower, store.upper, evals,
                             value_limit)
        except Exception as ex:
            continue
Exemple #14
0
def optimize():
    solo_mgar = solo_mgar_udp([7000, 8000])
    prob = pg.problem(solo_mgar)
    fprob = single_objective(prob)

    # logger().info('solar orbiter' + ' de -> cmaes c++ smart retry')
    # ret = advretry.minimize(fprob.fun, bounds=fprob.bounds, num_retries = 60000,
    # logger = logger(), optimizer=de_cma(1500))

    logger().info('solar orbiter' + ' BiteOpt parallel retry')
    ret = retry.minimize(fprob.fun,
                         bounds=fprob.bounds,
                         num_retries=32000,
                         logger=logger(),
                         optimizer=Bite_cpp(120000, M=6))
    return ret
Exemple #15
0
def _test_archipelago(algo, problem, num=10000, stop_val=-1E99, log=logger()):
    udp = pygmo_udp(problem.fun, problem.bounds)
    prob = pg.problem(udp)
    best_y = math.inf
    best_x = None
    t0 = time.perf_counter()

    for _ in range(num):
        archi = pg.archipelago(n=mp.cpu_count(),
                               algo=algo,
                               prob=prob,
                               pop_size=64)
        archi.evolve()
        archi.wait()
        ys = archi.get_champions_f()
        if not ys is None and len(ys) > 0:
            sort = np.argsort([y[0] for y in ys])
            y = ys[sort[0]][0]
            if y < best_y:
                best_y = y
                best_x = archi.get_champions_x()[sort[0]]
                message = '{0} {1} {2} {3!s}'.format(problem.name, dtime(t0),
                                                     best_y, list(best_x))
                log.info(message)
                if best_y < stop_val:
                    break
    return OptimizeResult(x=best_x, fun=best_y, success=True)
Exemple #16
0
def _retry_loop(pid, rgs, fun, weight_bounds, ncon, y_exp, 
                store, optimize, num_retries, value_limits):
    
    if 'win' in sys.platform and not store.logger is None:
        store.logger = logger()       
    lower = store.lower
    wlb = np.array(weight_bounds.lb)
    wub = np.array(weight_bounds.ub)
    while store.get_runs_compare_incr(num_retries):      
        try:       
            rg = rgs[pid]
            w = rg.uniform(size=len(wub))          
            w /= _avg_exp(w, y_exp) # correct scaling
            w = wlb + w * (wub - wlb)
            wrapper = mo_wrapper(fun, w, ncon, y_exp)  
            x, y, evals = optimize(wrapper.eval, Bounds(store.lower, store.upper), None, 
                                     [rg.uniform(0.05, 0.1)]*len(lower), rg, store)
            objs = wrapper.mo_eval(x) # retrieve the objective values
            if value_limits is None or all([objs[i] < value_limits[i] for i in range(len(w))]):
                store.add_result(y, x, evals, math.inf)   
                if not store.plot_name is None:
                    name = store.plot_name + "_moretry_" + str(store.get_count_evals())
                    xs = np.array(store.get_xs())
                    ys = np.array([fun(x) for x in xs])
                    np.savez_compressed(name, xs=xs, ys=ys) 
                    plot(name, ncon, xs, ys)
        except Exception as ex:
            print(str(ex))
Exemple #17
0
def minimize_plot(name,
                  optimizer,
                  fun,
                  bounds,
                  value_limit=math.inf,
                  plot_limit=math.inf,
                  num_retries=1024,
                  workers=mp.cpu_count(),
                  logger=logger(),
                  stop_fitness=-math.inf,
                  statistic_num=5000):
    time0 = time.perf_counter()  # optimization start time
    name += '_' + optimizer.name
    logger.info('optimize ' + name)
    store = Store(fun,
                  bounds,
                  capacity=500,
                  logger=logger,
                  statistic_num=statistic_num)
    ret = retry(store, optimizer.minimize, value_limit, workers, stop_fitness)
    impr = store.get_improvements()
    np.savez_compressed(name, ys=impr)
    filtered = np.array([imp for imp in impr if imp[1] < plot_limit])
    if len(filtered) > 0: impr = filtered
    logger.info(name + ' time ' + str(dtime(time0)))
    plot(impr,
         'progress_aret.' + name + '.png',
         label=name,
         xlabel='time in sec',
         ylabel=r'$f$')
    return ret
Exemple #18
0
def minimize_plot(name,
                  fun,
                  nobj,
                  ncon,
                  bounds,
                  popsize=64,
                  max_evaluations=100000,
                  nsga_update=False,
                  pareto_update=0,
                  workers=mp.cpu_count(),
                  logger=logger(),
                  plot_name=None):
    name += '_mode_' + str(popsize) + '_' + \
                ('nsga_update' if nsga_update else ('de_update_' + str(pareto_update)))
    logger.info('optimize ' + name)
    xs, ys = minimize(fun,
                      nobj,
                      ncon,
                      bounds,
                      popsize=popsize,
                      max_evaluations=max_evaluations,
                      nsga_update=nsga_update,
                      pareto_update=pareto_update,
                      workers=workers,
                      logger=logger,
                      plot_name=plot_name)
    np.savez_compressed(name, xs=xs, ys=ys)
    moretry.plot(name, ncon, xs, ys)
Exemple #19
0
def _test_optimizer(opt_name, problem, num_retries = 4000, num = 20, value_limit = 20.0, 
                   log = logger()):
    log.info(problem.name + ' ' + opt_name)
    for i in range(num):
        store = Store(problem.bounds, logger = log)
        optimizer = Optimizer(store, 0)
        method = getattr(optimizer, opt_name)
        ret = retry(problem.fun, store, method, num_retries, value_limit = value_limit)
Exemple #20
0
def test_gclde_cordinated_retry(problem):
    # coordinated retry with GCLDE->CMA sequence optimizer
    return advretry.minimize(problem.fun,
                             problem.bounds,
                             logger=logger(),
                             optimizer=Sequence(
                                 [GCLDE_cpp(750),
                                  Cma_cpp(750, popsize=13)]))
Exemple #21
0
def test_gclde_cordinated_retry(dim=6):
    # coordinated retry with GCLDE->CMA sequence optimizer
    return advretry.minimize(obj_f_c,
                             bounds(dim),
                             logger=logger(),
                             optimizer=Sequence(
                                 [GCLDE_cpp(750),
                                  Cma_cpp(750, popsize=13)]))
Exemple #22
0
def _rayoptimizer(opt, problem, num, max_time = 1200, log = logger()):
    log.info(problem.name + ' ' + opt.name)
    minimizers = None # remote actors created by minimize will be reused
    for i in range(num):
        ret, minimizers = rayretry.minimize(problem.fun, problem.bounds, 100, None, 20000, 0, log, 
                       optimizer=opt, max_time=max_time, minimizers=minimizers)
        print("solution: ", i+1, ret.fun, str(ret.x))
    for minimizer in minimizers:
        ray.get(minimizer.terminate.remote())
Exemple #23
0
def _test_optimizer(opt, problem, num_retries=32, num=1):
    log = logger()
    log.info(problem.name + ' ' + opt.name)
    for _ in range(num):
        minimize(problem.fun,
                 problem.bounds,
                 math.inf,
                 num_retries,
                 log,
                 optimizer=opt)
Exemple #24
0
 def __init__(self, prob, id, index, num_retries = 64, logger = logger()):
     self.store = advretry.Store(prob.bounds, logger = logger)
     self.prob = prob
     self.name = prob.name
     self.fun = prob.fun
     self.num_retries = num_retries
     self.retries = 0
     self.value = 0
     self.id = id
     self.index = index
     self.ret = None
Exemple #25
0
def test_multiretry(num_retries = 512, 
             keep = 0.7, optimizer = de_cma(1500), logger = logger(), repeat = 50):
    seqs = Tandem(0).seqs
    n = len(seqs)
    problems = [Tandem(i) for i in range(n)]
    ids = [str(seqs[i]) for i in range(n)]
    for _ in range(100):
        problem_stats = multiretry.minimize(problems, ids, num_retries, keep, optimizer, logger)
        ps = problem_stats[0]
        for _ in range(repeat):
            logger.info("problem " + ps.prob.name + ' ' + str(ps.id))
            ps.retry(optimizer)
Exemple #26
0
def messengerFullLoop(opt, num=1, log=logger()):
    for i in range(num):
        problem = MessFull()
        log.info(problem.name + ' ' + opt.name)
        name = str(i + 1) + ' ' + problem.name if num > 1 else problem.name
        advretry.minimize_plot(name,
                               opt,
                               problem.fun,
                               problem.bounds,
                               12.0,
                               12.0,
                               50000,
                               logger=log)
Exemple #27
0
def test_retry_cma_python(problem, num):
    best = math.inf
    t0 = time.perf_counter()
    for i in range(num):
        ret = retry.minimize(problem.fun,
                             bounds=problem.bounds,
                             num_retries=2000,
                             optimizer=Cma_python(100000),
                             statistic_num=5000,
                             logger=logger())
        best = min(ret.fun, best)
        print("{0}: time = {1:.1f} best = {2:.1f} f(xmin) = {3:.1f}".format(
            i + 1, dtime(t0), best, ret.fun))
Exemple #28
0
def test_advretry(problem, value_limit, num):
    best = math.inf
    t0 = time.perf_counter()
    for i in range(num):
        ret = advretry.minimize(problem.fun,
                                bounds=problem.bounds,
                                num_retries=4000,
                                value_limit=value_limit,
                                statistic_num=5000,
                                logger=logger())
        best = min(ret.fun, best)
        print("{0}: time = {1:.1f} best = {2:.1f} f(xmin) = {3:.1f}".format(
            i + 1, dtime(t0), best, ret.fun))
Exemple #29
0
 def __call__(self, X):
     ts = []
     for year, x in enumerate(X):
         if x > 0:  # should we kill a fox this year?
             ts.append(year + x)  # when exactly?
     I = integrator()
     I.set_initial_value(pop0, 0)
     for i in range(len(ts)):
         pop = integrate(
             I, ts[i])  # propagate rabbit and fox population to ts[i]
         pop[1] = max(1, pop[1] - 1)  # kill one fox, but keep at least one
         I.set_initial_value(pop, ts[i])
     # value is maximal rabbit population during the following 5 years without fox killings
     y = -max([integrate(I, t)[0] for t in np.linspace(dim, dim + 5, 50)])
     # book keeping and logging
     self.evals.value += 1
     if y < self.best_y.value:
         self.best_y.value = y
         logger().info(
             "nfev = {0}: t = {1:.1f} fval = {2:.3f} fox kill at {3:s} x = {4:s}"
             .format(self.evals.value, dtime(self.t0), y,
                     str([round(t, 2) for t in ts[:-1]]), str(list(X))))
     return y
Exemple #30
0
def test_optimizer(opt,
                   problem,
                   num_retries=120000,
                   num=100,
                   value_limit=10.0,
                   log=logger()):
    log.info(problem.name + ' ' + opt.name)
    for _ in range(num):
        ret = advretry.minimize(problem.fun,
                                problem.bounds,
                                value_limit,
                                num_retries,
                                log,
                                optimizer=opt)