def main(number): random.seed(4) N_ISLES = number FREQ = 5 pob = int(500 / number) islands = [toolbox.population(n=pob) for i in range(N_ISLES)] toolbox.unregister("indices") toolbox.unregister("individual") toolbox.unregister("population") toolbox.register("alg_scoop", algorithms.eaSimple, toolbox=toolbox, cxpb=0.8, mutpb=0.2, ngen=5, verbose=False) start_time = time.time() for i in range(0, 400, FREQ): results = futures.map(toolbox.alg_scoop, islands) islands = [pop for pop, logbook in results] tools.migRing(islands, 15, tools.selBest) print("--- %s seconds ---" % (time.time() - start_time)) return "finished"
def main(): start = time.time() random.seed(64) NISLES = 100 islands = [toolbox.population(n=300) for i in range(NISLES)] # Unregister unpicklable methods before sending the toolbox. toolbox.unregister("attr_bool") toolbox.unregister("individual") toolbox.unregister("population") NGEN, FREQ = 40, 5 toolbox.register("algorithm", algorithms.eaSimple, toolbox=toolbox, cxpb=0.5, mutpb=0.2, ngen=FREQ, verbose=False) registration_time = time.time() for i in range(0, NGEN, FREQ): results = toolbox.map(toolbox.algorithm, islands) islands = [pop for pop, logbook in results] tools.migRing(islands, 1, tools.selBest) end = time.time() #print(islands) print("start time: ") print(start) print("registration_time:") print(registration_time) print("end time: ") print(end)
def parallel_evolution(): print_infos() islands = [toolbox.population(n=POP_SIZE) for i in range(N_ISLES)] with Parallel(n_jobs=N_JOBS) as parallel: hof = tools.ParetoFront() it = 0 while it == 0 or (it < N_GEN and not stop_cond(islands, STOP_CONDITION)): #for i in range(0, generations, migration_interval): print("\nIteration: " + str(it)) res = parallel( delayed(single_evolver) (pop=island, n_gen=MIGRATION_INTERVAL, hof=hof, verbose=True) for island in islands) islands = [] for pop, logbook, hofi in res: hof.update(pop) islands.append(pop) tools.migRing(islands, N_MIGRATION, tools.selBest) if N_ISLES > 1 else 0 it += MIGRATION_INTERVAL #for island in islands: # print_output(island, it) #print_output(hof, it,n_ind=N_ISLES) return hof
def multi_islands(): random.seed(64) NISLES = 6 islands = [toolbox.population(n=300) for i in range(NISLES)] # Unregister unpicklable methods before sending the toolbox. toolbox.unregister("attr_AP") toolbox.unregister("individual") toolbox.unregister("population") NGEN, FREQ = 50, 5 toolbox.register("algorithm", algorithms.eaSimple, toolbox=toolbox, cxpb=0.5, mutpb=0.2, ngen=FREQ, verbose=False) for i in range(0, NGEN, FREQ): results = toolbox.map(toolbox.algorithm, islands) islands = [pop for pop, logbook in results] tools.migRing(islands, 15, tools.selBest) for island in islands: best_inds = tools.selBest(island, 1) for best_ind in best_inds: print("Best individual is:\n\t %s\n\t %s" % (best_ind, best_ind.fitness.values)) eval = SolutionEvaluer() eval.plot(best_ind) print("") return islands
def main(): population = toolbox.populationCreator(n=POPULATION_SIZE) random.seed(64) NISLES = 6 stats = tools.Statistics(lambda ind: ind.fitness.values) stats.register("min", np.min) stats.register("avg", np.mean) logbook = tools.Logbook() logbook.header = "gen", "evals", "min", "avg" hof = tools.HallOfFame(HALL_OF_FAME_SIZE) NGEN, FREQ = 40, 30 toolbox.register("algorithm", algorithms.eaSimple, toolbox=toolbox, cxpb=P_CROSSOVER, mutpb=P_MUTATION, ngen=MAX_GENERATIONS, stats=stats, halloffame=hof, verbose=True) islands = [population for i in range(NISLES)] for i in range(0, MAX_GENERATIONS, FREQ): results = toolbox.map(toolbox.algorithm, islands) islands = [population for population, logbook in results] tools.migRing(islands, 25, tools.selBest) record = stats.compile(population) logbook.record(gen=i, evals=len(population), **record) hof.update(population) best = hof.items[0] print(i) print("-- Best Ever Individual = ", best) print("-- Best Ever Fitness = ", best.fitness.values[0]) if i == FREQ: plt.figure(1) tsp.plotData(best) minFitnessValues, meanFitnessValues = logbook.select("min", "avg") plt.figure(2) sns.set_style("whitegrid") plt.plot(minFitnessValues, color='red') plt.plot(meanFitnessValues, color='green') plt.xlabel('Generation') plt.ylabel('Min / Average Fitness') plt.title('Min and Average fitness over Generations') plt.show() return islands
def main(): random.seed(64) NISLES = 5 islands = [toolbox.population(n=300) for i in range(NISLES)] # Unregister unpicklable methods before sending the toolbox. toolbox.unregister("attr_bool") toolbox.unregister("individual") toolbox.unregister("population") NGEN, FREQ = 40, 5 toolbox.register("algorithm", algorithms.eaSimple, toolbox=toolbox, cxpb=0.5, mutpb=0.2, ngen=FREQ, verbose=False) for i in range(0, NGEN, FREQ): results = toolbox.map(toolbox.algorithm, islands) islands = [pop for pop, logbook in results] tools.migRing(islands, 15, tools.selBest) return islands
size = comm.Get_size() rank = comm.Get_rank() random.seed(4) N_ISLES = size FREQ = 5 if rank == 0: pob = int(500/N_ISLES) islands = [toolbox.population(n=pob) for i in range(N_ISLES)] else: islands = None toolbox.unregister("indices") toolbox.unregister("individual") toolbox.unregister("population") if rank == 0 : start_time = time.time() for i in range (0, 400, FREQ): islands = comm.scatter(islands,root = 0) resultsT = algorithms.eaSimple(islands,toolbox=toolbox, cxpb=0.8, mutpb=0.2, ngen=5, verbose=False) results = comm.gather(resultsT,root=0) if rank== 0: islands = [pop for pop, logbook in results] tools.migRing(islands, 15, tools.selBest) if rank == 0: print("--- %s seconds ---" % (time.time() - start_time))
start_time = time.perf_counter() for it in range(max_iter): for island in controlIslands: island.evolution_step() for island in islands: island.evolution_step() # migrate if np.random.rand() < migration_probability: populations = [ island.get_population() for island in controlIslands ] tools.migRing(populations, k=3, selection=toolbox.selectMig, replacement=toolbox.selectRepl) populations = [island.get_population() for island in islands] tools.migRing(populations, k=3, selection=toolbox.selectMig, replacement=toolbox.selectRepl) positions = [island.estimate_position()[0] for island in islands] mean_position_std = np.std(positions) diversity_logger.info("{}, {}".format(it, mean_position_std)) results = [island.get_results() for island in islands] best_result = min([island.get_avg_fitness() for island in islands])
def __call__(self, fixed_schedule_part, initial_schedule, current_time=0, initial_population=None, only_new_pops=False): ##========================== ## create populations ##========================== ##TODO: remake it creator.create("FitnessMax", base.Fitness, weights=(1.0,)) creator.create("Individual", dict, fitness=creator.FitnessMax) toolbox = base.Toolbox() toolbox.register("evaluate", ga_functions.build_fitness(fixed_schedule_part, current_time)) toolbox.register("attr_bool", ga_functions.build_initial(fixed_schedule_part, current_time)) # Structure initializers toolbox.register("individual", tools.initIterate, creator.Individual, toolbox.attr_bool) toolbox.register("population", tools.initRepeat, list, toolbox.individual) ## TODO: replace it with strategy and specilized builder if only_new_pops is True: ## TODO: replace this magic number with parameter populations = [toolbox.population(n=POPSIZE) for i in range(3)] else: ## create populations old_pop = initial_population newpop = toolbox.population(n=POPSIZE) #heft_initial = GAFunctions2.schedule_to_chromosome(initial_schedule) # TODO: this is a stub. Rearchitect information flows and entities responsibilities as soon as you will get the first positive results. heft_initial = initial_schedule heft_initial = tools.initIterate(creator.Individual, lambda: heft_initial) heft_pop = [ga_functions.mutation(deepcopy(heft_initial)) for i in range(POPSIZE)] populations = [old_pop, newpop, heft_pop] ## TODO: replace this more effective implementation def quick_save(): whole_pop = reduce(lambda x, y: x+y, populations) ## choose new old pop for the next run sorted_whole_pop = sorted(whole_pop, key=lambda x: x.fitness.values, reverse=True) best = sorted_whole_pop[0] new_oldpop = sorted_whole_pop[0:POPSIZE] ## construct final result ## TODO: implement constructor of final result here result = ((best, new_oldpop, None, None), common_logbook) self._save_result(result) return result ##========================== ## run for several migration periods ##========================== common_logbook = tools.Logbook() result = None for k in range(MIGRATIONS): new_pops = [] iter_map = {} for pop in populations: ((best, npop, schedule, stopped_iteration), logbook) = ga_alg(fixed_schedule_part, None, current_time=current_time, initial_population=pop) new_pops.append(npop) for rec in logbook: iter = k*GENERATIONS + rec["iter"] mp = iter_map.get(iter, []) mp.append({"worst": rec["worst"], "best": rec["best"], "avr": rec["avr"]}) iter_map[iter] = mp pass for iter, items in iter_map.items(): best = max(it["best"] for it in items) avr = sum(it["avr"] for it in items)/len(items) worst = min(it["worst"] for it in items) common_logbook.record(iter=iter, worst=worst, best=best, avr=avr) pass populations = new_pops migRing(populations, migrCount, emigrant_selection) result = quick_save() pass # merge all populations in one and evaluate for some time # TODO: test this changes common_pop = functools.reduce(operator.add, populations, []) for k in range(MERGED_POP): ((best, npop, schedule, stopped_iteration), logbook) = ga_alg(fixed_schedule_part, None, current_time=current_time, initial_population=common_pop) common_pop = npop for rec in logbook: iter = (all_iters_count - MERGED_POP) + rec["iter"] common_logbook.record(iter=iter, worst=rec["worst"], best=rec["best"], avr=rec["avr"]) pass result = quick_save() pass ((best, new_oldpop, x1, x2), x3) = result result = ((best, new_oldpop, ga_functions.build_schedule(best, fixed_schedule_part, current_time), None), common_logbook) self._save_result(result[0]) return result
def __call__(self, fixed_schedule_part, initial_schedule, current_time=0, initial_population=None, only_new_pops=False): ##========================== ## create populations ##========================== ##TODO: remake it creator.create("FitnessMax", base.Fitness, weights=(1.0, )) creator.create("Individual", dict, fitness=creator.FitnessMax) toolbox = base.Toolbox() toolbox.register( "evaluate", ga_functions.build_fitness(fixed_schedule_part, current_time)) toolbox.register( "attr_bool", ga_functions.build_initial(fixed_schedule_part, current_time)) # Structure initializers toolbox.register("individual", tools.initIterate, creator.Individual, toolbox.attr_bool) toolbox.register("population", tools.initRepeat, list, toolbox.individual) ## TODO: replace it with strategy and specilized builder if only_new_pops is True: ## TODO: replace this magic number with parameter populations = [toolbox.population(n=POPSIZE) for i in range(3)] else: ## create populations old_pop = initial_population newpop = toolbox.population(n=POPSIZE) #heft_initial = GAFunctions2.schedule_to_chromosome(initial_schedule) # TODO: this is a stub. Rearchitect information flows and entities responsibilities as soon as you will get the first positive results. heft_initial = initial_schedule if not isinstance( initial_schedule, Schedule) else GAFunctions2.schedule_to_chromosome( initial_schedule, fixed_schedule_part) heft_initial = tools.initIterate(creator.Individual, lambda: heft_initial) heft_pop = [ ga_functions.mutation(deepcopy(heft_initial)) for i in range(POPSIZE) ] populations = [old_pop, newpop, heft_pop] ## TODO: replace this more effective implementation def quick_save(): whole_pop = reduce(lambda x, y: x + y, populations) ## choose new old pop for the next run sorted_whole_pop = sorted(whole_pop, key=lambda x: x.fitness.values, reverse=True) best = sorted_whole_pop[0] new_oldpop = sorted_whole_pop[0:POPSIZE] ## construct final result ## TODO: implement constructor of final result here result = ((best, new_oldpop, None, None), common_logbook) self._save_result(result) return result ##========================== ## run for several migration periods ##========================== common_logbook = tools.Logbook() result = None for k in range(MIGRATIONS): new_pops = [] iter_map = {} for pop in populations: ((best, npop, schedule, stopped_iteration), logbook) = ga_alg(fixed_schedule_part, None, current_time=current_time, initial_population=pop) new_pops.append(npop) # for rec in logbook: # iter = k*GENERATIONS + rec["iter"] # mp = iter_map.get(iter, []) # mp.append({"worst": rec["worst"], "best": rec["best"], "avr": rec["avr"]}) # iter_map[iter] = mp # pass for iter, items in iter_map.items(): best = max(it["best"] for it in items) avr = sum(it["avr"] for it in items) / len(items) worst = min(it["worst"] for it in items) common_logbook.record(iter=iter, worst=worst, best=best, avr=avr) pass populations = new_pops migRing(populations, migrCount, emigrant_selection) result = quick_save() pass # merge all populations in one and evaluate for some time # TODO: test this changes common_pop = functools.reduce(operator.add, populations, []) for k in range(MERGED_POP): ((best, npop, schedule, stopped_iteration), logbook) = ga_alg(fixed_schedule_part, None, current_time=current_time, initial_population=common_pop) common_pop = npop # for rec in logbook: # iter = (all_iters_count - MERGED_POP) + rec["iter"] # common_logbook.record(iter=iter, worst=rec["worst"], best=rec["best"], avr=rec["avr"]) # pass result = quick_save() pass ((best, new_oldpop, x1, x2), x3) = result result = ((best, new_oldpop, ga_functions.build_schedule(best, fixed_schedule_part, current_time), None), common_logbook) self._save_result(result[0]) return result