def startGA(num_gen): for g in range(num_gen): # Select the next generation individuals offspring = toolbox.select(pop, len(pop)) # Clone the selected individuals offspring = map(toolbox.clone, offspring) # Apply crossover on the offspring for child1, child2 in zip(offspring[::2], offspring[1::2]): if random.random() < probCross: tools.cxBlend(child1, child2, 0.5) del child1.fitness.values del child2.fitness.values # Apply mutation on the offspring for mutant in offspring: if random.random() < probMut: toolbox.mutate(mutant) del mutant.fitness.values # Evaluate the individuals with an invalid fitness try: invalid_ind = [ind for ind in offspring if not ind.fitness.valid] fitnesses = toolbox.map(toolbox.evaluate, invalid_ind) for ind, fit in zip(invalid_ind, fitnesses): ind.fitness.values = fit except TypeError: print("-----------------") # The population is entirely replaced by the offspring pop[:] = offspring
def cxMate(ind1, ind2, low, up, alpha): ''' Procedimento de cruzamento de dois individuos ''' tools.cxBlend(ind1, ind2, alpha) clipIndividual(ind1, low, up) clipIndividual(ind2, low, up)
def decoy_cxSim(ind1, ind2): ind1, ind2 = tools.cxBlend(ind1, ind2, 0.5) ind1[0] = clamp(ind1[0], MIN_SUPPORT_THRESHOLD, MAX_SUPPORT_THRESHOLD) ind2[0] = clamp(ind2[0], MIN_SUPPORT_THRESHOLD, MAX_SUPPORT_THRESHOLD) ind1[1] = clamp(ind1[1], MIN_CONF_THRESHOLD, MAX_CONF_THRESHOLD) ind2[1] = clamp(ind2[1], MIN_CONF_THRESHOLD, MAX_CONF_THRESHOLD) return ind1,ind2
def generate_world(population, IndividualClass, FoodClass): # choose best individual models bestpopulaton, middle, worst = choose_best(population, settings.BEST_IND_NUM) # sex between middle for i in range(len(middle), 2): middle[i], middle[i + 1] = cxBlend(middle[i], middle[i + 1], settings.ALPHA) newpopulation = bestpopulaton + middle + deepcopy(bestpopulaton) # mutate for weights in newpopulation: mutate(weights) # set weights keras.backend.clear_session() world = World(IndividualClass, FoodClass, settings.INITIAL_IND_NUM, settings.INITIAL_FOOD_NUM) for ind, weights in zip(world.Population, newpopulation): ind.model.set_weights(weights) return world
def _mate(ind1, ind2, low, up, blend_prob=0.5): # a mixture of blend and 2 point crossover if random.random() < blend_prob: ind1, ind2 = tools.cxBlend(ind1, ind2, alpha=0.5) size = min(len(ind1), len(ind2)) for i, u, l in zip(range(size), up, low): ind1[i] = math.floor(ind1[i]) ind2[i] = math.ceil(ind2[i]) if ind1[i] > u: ind1[i] = u elif ind1[i] < l: ind1[i] = l if ind2[i] > u: ind2[i] = u elif ind2[i] < l: ind2[i] = l return ind1, ind2 else: return tools.cxUniform(ind1, ind2, indpb=0.5)
def f_mate_bit(bit1, bit2, p=None, indpb=0.2): if np.random.random() > indpb: return bit1, bit2 if p['type'] == 'continuous': # Allow 5% either side rng = p['args'] alpha = 0.05 * (rng[1] - rng[0]) nbit1, nbit2 = zip(*tools.cxBlend([bit1], [bit2], alpha))[0] nbit1 = min(max(nbit1, rng[0]), rng[1]) nbit2 = min(max(nbit2, rng[0]), rng[1]) return nbit1, nbit2 elif p['type'] == 'ordinal': a = [ aa for aa in p['args'] if min(bit1, bit2) <= aa <= max(bit1, bit2) ] return np.random.choice(a), np.random.choice(a) elif p['type'] == 'categorical': return bit2, bit1 else: raise ValueError( 'Parameter type should be categorical, ordinal or continuous')
def mutate(individual, indpb): # shuffle seq individual, = tools.mutShuffleIndexes(individual, indpb) # crossover inside the suite for i in range(1, len(list(individual)), 2): if random.random() < MUTPB: if len(list(individual)) <= 2: continue # sys.exit(1) if len(list(individual)) <= 2: continue # sys.exit(1) individual[i - 1], individual[i] = tools.cxBlend( individual[i - 1], individual[i], 0.7) # shuffle events for i in range(len(list(individual))): if random.random() < MUTPB: if len(list(individual)) <= 2: continue # sys.exit(1) list(individual)[i], = tools.mutShuffleIndexes( list(individual)[i], indpb) return individual
line2 = ax2.plot(gen, size_avgs, "r-", label="Average Size") ax2.set_ylabel("Size", color="r") for tl in ax2.get_yticklabels(): tl.set_color("r") lns = line1 + line2 labs = [l.get_label() for l in lns] ax1.legend(lns, labs, loc="center right") plt.show() print(ind2 is mutant) # True print(mutant is ind1) # False child1, child2 = [toolbox.clone(ind) for ind in (ind1, ind2)] tools.cxBlend(child1, child2, 0.5) del child1.fitness.values del child2.fitness.values selected = tools.selBest([child1, child2], 2) print(child1 in selected) # True def evaluate(individual): # Do some hard computing on the individual a = sum(individual) b = len(individual) return a, 1. / b ind1.fitness.values = evaluate(ind1)
ind1.fitness.values = evaluate(ind1) print ind1.fitness.valid print ind1.fitness #突然変異 mutant = toolbox.clone(ind1) ind2, = tools.mutGaussian(mutant, mu=0.0, sigma=0.2, indpb=0.2) del mutant.fitness.values print ind2 is mutant print mutant is ind1 #交叉 child1, child2 = [toolbox.clone(ind) for ind in (ind1, ind2)] tools.cxBlend(child1, child2, 0.5) del child1.fitness.values del child2.fitness.values print ind1, ind2 #選択 selected = tools.selBest([child1, child2], 2) print child1 in selected pop = toolbox.population(n=300) #初期個体発生 pop, logbook = algorithms.eaSimple(pop, toolbox, cxpb=0.5, mutpb=0.5, ngen=50, stats=stats, verbose=True) record = stats.compile(pop)
toolbox.register("population_seed", initPopulation, pop_size=POP_SIZE, indv=toolbox.individual, num_bars=NUM_BARS) pop = toolbox.population_seed() print(pop[0].fitness.valid) print(pop[0].fitness) i1 = pop[0] def evaluate(individual): return (sum(individual), ) fitness_i1 = evaluate(i1) i1.fitness.values = fitness_i1 i2, = tools.mutGaussian(i1, mu=0.0, sigma=0.2, indpb=0.2) i3 = pop[3] i4 = pop[4] print(i3) tools.cxBlend(i3, i4, 0.5) print(i3) #very light weight wrapper, that then deals with some of the messy computation for me and forces me to stage the computation correctly
def cxComb(ind1,ind2): if numpy.random.randint(2)==0: tools.cxOnePoint(ind1,ind2) else: tools.cxBlend(ind1,ind2,0.0) return ind1,ind2