def customCrossover(ind1, ind2): return tools.cxSimulatedBinaryBounded(ind1, ind2, eta = ETA_C, low = [limits[j][0] for j in range(10)] + [limits[j][0] for j in range(10)], up = [limits[j][1] for j in range(10)] + [limits[j][1] for j in range(10)])
def crossover(ind1, ind2, eta): """Performs a 2-Point Crossover on Individuals. :ind1: first parent :ind2: second parent :returns: two offsprings as a tuple """ # get the genomes genome1 = ind1.get_values() genome2 = ind2.get_values() # perform the crossover only on the genomes genome1, genome2 = tools.cxSimulatedBinaryBounded( genome1, genome2, eta, # eta_c 2.0 == wide search; 5.0 narrow search 0.0, # lower bound 1.0) # upper bound # change the values of the individuals ind1.set_values(genome1) ind2.set_values(genome2) return (ind1, ind2)
def mate(self, mate): self_allele, mate_allele = cxSimulatedBinaryBounded( self.allele, mate.allele, eta=self.cxeta, low=-0.5 * self.flexibility, up=0.5 * self.flexibility) self.allele[:] = [round(n, self.precision) for n in self_allele] mate.allele[:] = [round(n, self.precision) for n in mate_allele]
def genetic(evaluate_func, limits, generations=300, popsize=20, elitepercent=.1, crossoveredpercent=.4, sbbx_eta=1.0, log=False, callback=None): assert elitepercent + crossoveredpercent <= 1.0 elitesize = int(popsize * elitepercent) crossoveredsize = int(popsize * crossoveredpercent) crossoveredsize = crossoveredsize - 1 if crossoveredsize & 1 else crossoveredsize # make it even mutatedsize = popsize - elitesize - crossoveredsize assert mutatedsize >= 0 toolbox.register("individual", tools.initIterate, creator.Individual, get_ind_function(limits)) #toolbox.register("population", tools.initRepeat, list, toolbox.individual) toolbox.register("evaluate", evaluate_func) toolbox.register("mutate", get_mutate_func(limits)) toolbox.register("mate", lambda a, b: tools.cxSimulatedBinaryBounded(a, b, sbbx_eta, low=list(map(lambda t: t[0], limits)), up=list(map(lambda t: t[1], limits)))) #toolbox.register("select", tools.selTournament, tournsize=3) #toolbox.register("select", tools.selBest) pop = [toolbox.individual() for _ in range(popsize)] #logbook = tools.Logbook() for gen in range(generations): update_fitnesses(pop) #record = stats.compile(pop) #logbook.record(gen=gen, **record) best=max(pop, key=lambda x: x.fitness.values[0]) if log: print("Gen {}: best: {} !!! {}".format(gen, best, best.fitness.values[0]))#record['max'])) if callback is not None: #if callback([(t, t.fitness.values[0]) for t in pop]) == True: if callback(best.fitness.values[0]) == True: break elite = tools.selBest(pop, k=elitesize) crossovered = list(map(toolbox.clone, tools.selTournament(pop, k=crossoveredsize, tournsize=2))) for child1, child2 in zip(crossovered[::2], crossovered[1::2]): toolbox.mate(child1, child2) del child1.fitness.values del child2.fitness.values mutated = list(map(toolbox.clone, random.sample(pop, mutatedsize))) for mutant in mutated: toolbox.mutate(mutant) del mutant.fitness.values pop[:] = elite + crossovered + mutated return best, best.fitness.values[0]#logbook
def my_crossover(self,LL,UU,ind1, ind2): t=self.t typeOfInput=len(t) ind11 = [] ind22 = [] for i in range(typeOfInput): if t[i] =='float': ind1var1,ind2var1=tools.cxSimulatedBinaryBounded([ind1[i]], [ind2[i]], low=LL[i], up=UU[i], eta=20.0) ind11.append(ind1var1[0]) ind22.append(ind2var1[0]) elif t[i]=='int': ind1var2,ind2var2=tools.cxUniform([ind1[i]], [ind2[i]],indpb=0.9) ind11.append(ind1var2[0]) ind22.append(ind2var2[0]) elif t[i]=='bool': toss = random.random() if toss > 0.5: ind1var3,ind2var3=ind2[i], ind1[i] ind11.append(ind1var3) ind22.append(ind2var3) return ind11,ind22
def init_algorithm(self, params): toolbox = base.Toolbox() ind_size = self.problem.ind_size ngen = params['generations'] nind = params['num_individuals'] cxpb, mutpb, lb, ub, mu, lam = 0.7, 0.2, -1.0, 1.0, nind, nind if hasattr(creator, 'FitnessMin') is False: creator.create('FitnessMin', base.Fitness, weights=(-1.0, )) if hasattr(creator, 'Individual') is False: kw0 = {'typecode': 'd', 'fitness': creator.FitnessMin} creator.create('Individual', array.array, **kw0) atr = lambda: [random.uniform(lb, ub) for _ in range(ind_size)] ind = lambda: tools.initIterate(creator.Individual, atr) population = [ind() for _ in range(nind)] kw1 = {'low': lb, 'up': ub, 'eta': 20.0, 'indpb': 1.0 / ind_size} mut = lambda xs: tools.mutPolynomialBounded(xs, **kw1) kw2 = {'low': lb, 'up': ub, 'eta': 20.0} crs = lambda i1, i2: tools.cxSimulatedBinaryBounded(i1, i2, **kw2) sel = lambda p, n: tools.selTournament(p, n, tournsize=3) toolbox.register('evaluate', self.problem.objective_function) toolbox.register('mate', crs) toolbox.register('mutate', mut) toolbox.register('select', sel) stats = tools.Statistics(lambda ind: ind.fitness.values) stats.register('min', np.min) self.hof = tools.HallOfFame(5) args = (population, toolbox, mu, lam, cxpb, mutpb, ngen) kw3 = {'stats': stats, 'halloffame': self.hof, 'verbose': True} self.algorithm = lambda: algorithms.eaMuPlusLambda(*args, **kw3)
def nsga2_1iter(self, current_pop): """ Run one iteration of the NSGA-II optimizer. Based on the crossover and mutation probabilities, the offsprings are created and evaluated. Based on the fitness of these offsprings and of the current population, a new population is created. Parameters ---------- current_pop : list The initial population. Returns ------- new_pop : list The updated population. """ # create offspring, clone of current population offspring = [deepcopy(ind) for ind in current_pop] # perform crossover for ind1, ind2 in zip(offspring[::2], offspring[1::2]): if random.random() < self.run_dict['cx prob']: # apply crossover operator # in place editing of individuals tools.cxSimulatedBinaryBounded(ind1, ind2, self.run_dict['eta'], self.space_obj.l_b, self.space_obj.u_b) # set the fitness to an empty tuple of modified individuals del ind1.fitness.values del ind2.fitness.values # perform mutation for mutant in offspring: if random.random() < self.run_dict['mut prob']: # apply mutation operator # in place editing of individuals tools.mutPolynomialBounded(mutant, self.run_dict['eta'], self.space_obj.l_b, self.space_obj.u_b, self.run_dict['mut prob']) # set fitness to empty tuple of modified individuals del mutant.fitness.values # evaluate the modified individuals n_eval = 0 invalid_indices = [] invalid_fit_individuals = [] for i, ind in enumerate(offspring): if not ind.fitness.valid: invalid_indices.append(i) invalid_fit_individuals.append(ind) if len(invalid_fit_individuals) > 0: # create samples to evaluate individuals_to_eval, unc_samples = self.define_samples_to_eval( invalid_fit_individuals) # evaluate samples fitnesses = self.evaluate_samples(individuals_to_eval) n_eval += len(individuals_to_eval) # assign fitness to the orginal samples list individuals_to_assign = self.assign_fitness_to_population( invalid_fit_individuals, fitnesses, unc_samples) # construct offspring list for i, ind in zip(invalid_indices, individuals_to_assign): offspring[i] = deepcopy(ind) # select next population using the NSGA-II operator new_pop = tools.selNSGA2(current_pop + offspring, len(current_pop)) # update the population and fitness files self.append_points_to_file(new_pop, 'population') fitness_values = [x.fitness.values for x in new_pop] self.append_points_to_file(fitness_values, 'fitness') # update the STATUS file ite, evals = self.parse_status() self.write_status('%8i%8i' % (ite + 1, evals + n_eval)) return new_pop
def init_algorithm(self, params): if params['algorithm_class'] not in ['simple', 'multiobjective']: raise ValueError('Non-existent algorithm class.') toolbox = base.Toolbox() ngen = params['generations'] nind = params['num_individuals'] cxpb = 0.5 if params['algorithm_class'] == 'simple' else 0.9 lb, ub = -1.0, 1.0 ind_size = self.problem.ind_size if nind % 4 != 0: raise ValueError('Number of individuals must be multiple of four') if hasattr(creator, 'FitnessMin') is False: creator.create('FitnessMin', base.Fitness, weights=(-1.0, -1.0)) if hasattr(creator, 'Individual') is False: creator.create('Individual', array.array, typecode='d', fitness=creator.FitnessMin) atr = lambda: [random.uniform(lb, ub) for _ in range(ind_size)] ind = lambda: tools.initIterate(creator.Individual, atr) population = [ind() for _ in range(nind)] if params['algorithm_class'] == 'simple': self.hof = tools.HallOfFame(1) mut = lambda xs: tools.mutGaussian(xs, mu=0, sigma=1, indpb=0.1) crs = tools.cxTwoPoint sel = lambda p, n: tools.selTournament(p, n, tournsize=3) else: self.hof = tools.ParetoFront() mut = lambda xs: tools.mutPolynomialBounded( xs, low=lb, up=ub, eta=20.0, indpb=1.0 / ind_size) crs = lambda ind1, ind2: tools.cxSimulatedBinaryBounded( ind1, ind2, low=lb, up=ub, eta=20.0) sel = tools.selNSGA2 toolbox.register('evaluate', self.problem.objective_function) toolbox.register('mate', crs) toolbox.register('mutate', mut) toolbox.register('select', sel) stats = tools.Statistics(lambda ind: ind.fitness.values) stats.register('avg', np.mean, axis=0) stats.register('std', np.std, axis=0) stats.register('min', np.min, axis=0) stats.register('max', np.max, axis=0) args = (population, toolbox) kwargs = {'cxpb': cxpb, 'ngen': ngen, 'stats': stats} kwargs['halloffame'] = self.hof if params['algorithm_class'] == 'simple': kwargs['mutpb'] = 0.2 kwargs['verbose'] = True self.algorithm = lambda: algorithms.eaSimple(*args, **kwargs) else: self.algorithm = lambda: self.multiobjective(*args, **kwargs)
def __init__(self, params: AlgorithmParameters): """Initialize the genetic algorithm that will solve the control synthesis problem. The params dictionary has the following items num_control_params The number of control parameters used in a solution. generations The number of iterations of the genetic algorithm to run. """ assert len(params.lower_bounds) == len(params.upper_bounds) self.ind_size = len(params.lower_bounds) ngen = params.generations nind = params.num_individuals cxpb, mutpb, mu, lam = params.crossover_prob, params.mutation_prob, nind, nind # Here we are creating two new types. The FitnessMin type and the Individual type. # FitnessMin inherits from deap.base.Fitness and has a new weights attribute that is # a tuple. For single objective optimization, the weights attribute has one element # for multi-objective optimization it will have multiple elements. A negative weight # indicates this is a minimization problem. if hasattr(creator, 'FitnessMin') is False: creator.create('FitnessMin', base.Fitness, weights=(-1.0, )) # The individual type inherits from array.array (could use list) and it needs a # fitness attribute to know how to calculate the fitness of the individual. # Arrays store only one type of data. This is set by the tyepcode paramter. 'd' is for double. if hasattr(creator, 'Individual') is False: kw0 = {'typecode': 'd', 'fitness': creator.FitnessMin} creator.create('Individual', array.array, **kw0) # Creates the initial lists of individuals. atr = lambda: [ random.uniform(lb, ub) for lb, ub in zip(params.lower_bounds, params.upper_bounds) ] ind = lambda: creator.Individual(atr()) population = [ind() for _ in range(nind)] # The toolbox is a container to store partial functions. In particular we want the evaluate, mate, mutate, # and select genetic algorithm functions defined here. toolbox = base.Toolbox() kw1 = { 'low': params.lower_bounds, 'up': params.upper_bounds, 'eta': 20.0, 'indpb': 1.0 / self.ind_size } mut = lambda xs: tools.mutPolynomialBounded(xs, **kw1) kw2 = { 'low': params.lower_bounds, 'up': params.upper_bounds, 'eta': 20.0 } crs = lambda i1, i2: tools.cxSimulatedBinaryBounded(i1, i2, **kw2) sel = lambda p, n: tools.selTournament(p, n, tournsize=3) toolbox.register('evaluate', params.cost_function) toolbox.register('mate', crs) toolbox.register('mutate', mut) toolbox.register('select', sel) # DEAP provides objects taht will record algorithm statisitics and keep trak of the best solutions. stats = tools.Statistics(lambda ind: ind.fitness.values) stats.register('min', np.min) self.hof = tools.HallOfFame(5) # Setting up the DEAP library for the genetic algorithm comes down to creating and # executing an algorithm function. There are a number of pre-build algorithms that # can be used rather than coding your own. The eaMuPlusLambda algorithm requires # the parameters that have been generated above. args = (population, toolbox, mu, lam, cxpb, mutpb, ngen) kw3 = {'stats': stats, 'halloffame': self.hof, 'verbose': True} self.algorithm = lambda: algorithms.eaMuPlusLambda(*args, **kw3)
def genarate_1(self, parent1, parent2,countFigure): self.gen.append(self.mateTypeOrder(parent1, parent2, countFigure)) newones = cxSimulatedBinaryBounded(parent1.gen[1], parent2.gen[1], uniform(15,20), 0, 1) self.gen.append(newones[randint(0,1)]) self.generateImg()