def __init__(self, variant="DE/rand/1/exp", CR=0.1, F=0.75, **kwargs): set_default_if_none("real", kwargs) super().__init__(**kwargs) self.selection = RandomSelection() self.crossover = DifferentialEvolutionCrossover(weight=F) _, self.var_selection, self.var_n, self.var_mutation, = variant.split( "/") self.mutation = DifferentialEvolutionMutation(self.var_mutation, CR) self.func_display_attrs = disp_single_objective
def __init__(self, pop_size=100, sampling=LatinHypercubeSampling(iterations=100, criterion="maxmin"), variant="DE/rand/1/bin", CR=0.5, F=0.3, dither="vector", jitter=False, **kwargs): """ Parameters ---------- pop_size : {pop_size} sampling : {sampling} variant : {{DE/(rand|best)/1/(bin/exp)}} The different variants of DE to be used. DE/x/y/z where x how to select individuals to be pertubed, y the number of difference vector to be used and z the crossover type. One of the most common variant is DE/rand/1/bin. F : float The weight to be used during the crossover. CR : float The probability the individual exchanges variable values from the donor vector. dither : {{'no', 'scalar', 'vector'}} One strategy to introduce adaptive weights (F) during one run. The option allows the same dither to be used in one iteration ('scalar') or a different one for each individual ('vector). jitter : bool Another strategy for adaptive weights (F). Here, only a very small value is added or subtracted to the weight used for the crossover for each individual. """ _, self.var_selection, self.var_n, self.var_mutation, = variant.split( "/") if self.var_mutation == "exp": mutation = ExponentialCrossover(CR) elif self.var_mutation == "bin": mutation = UniformCrossover(CR) super().__init__(pop_size=pop_size, sampling=sampling, selection=RandomSelection(), crossover=DifferentialEvolutionCrossover( weight=F, dither=dither, jitter=jitter), mutation=mutation, survival=None, **kwargs) self.func_display_attrs = disp_single_objective
def __init__(self, variant="DE/rand/1/bin", CR=0.5, F=0.3, dither="vector", jitter=False, selection=None, crossover=None, mutation=None, **kwargs): _, sel, n_diff, mut, = variant.split("/") self.variant = sel self.n_diffs = int(n_diff) if "-to-" in self.variant: self.n_diffs += 1 if selection is None: selection = DESelection(sel) if mutation is None: if mut == "exp": mutation = ExponentialCrossover(CR) elif mut == "bin": mutation = BiasedCrossover(CR) if crossover is None: crossover = DifferentialEvolutionCrossover(n_diffs=self.n_diffs, weight=F, dither=dither, jitter=jitter) super().__init__(selection, crossover, mutation, **kwargs)
def de( pop_size=100, sampling=LatinHypercubeSampling(criterion="maxmin", iterations=100), variant="DE/rand+best/1/bin", CR=0.5, F=0.75, **kwargs): """ Parameters ---------- pop_size : {pop_size} sampling : {sampling} variant : str CR : float F : float Returns ------- de : :class:`~pymoo.model.algorithm.Algorithm` Returns an DifferentialEvolution algorithm object. """ _, _selection, _n, _mutation, = variant.split("/") return DifferentialEvolution(pop_size=pop_size, sampling=sampling, selection=RandomSelection(), crossover=DifferentialEvolutionCrossover(weight=F), mutation=DifferentialEvolutionMutation(_mutation, CR), **kwargs)
def __init__(self, variant="DE/rand+best/1/bin", CR=0.5, F=0.75, n_replace=None, **kwargs): _, self.var_selection, self.var_n, self.var_mutation, = variant.split("/") set_if_none(kwargs, 'pop_size', 200) set_if_none(kwargs, 'sampling', LatinHypercubeSampling(criterion="maxmin", iterations=100)) set_if_none(kwargs, 'crossover', DifferentialEvolutionCrossover(weight=F)) set_if_none(kwargs, 'selection', RandomSelection()) set_if_none(kwargs, 'mutation', DifferentialEvolutionMutation(self.var_mutation, CR)) set_if_none(kwargs, 'survival', None) super().__init__(**kwargs) self.n_replace = n_replace self.func_display_attrs = disp_single_objective
class DifferentialEvolution(GeneticAlgorithm): def __init__(self, variant="DE/rand/1/exp", CR=0.1, F=0.75, **kwargs): set_default_if_none("real", kwargs) super().__init__(**kwargs) self.selection = RandomSelection() self.crossover = DifferentialEvolutionCrossover(weight=F) _, self.var_selection, self.var_n, self.var_mutation, = variant.split( "/") self.mutation = DifferentialEvolutionMutation(self.var_mutation, CR) self.func_display_attrs = disp_single_objective def _next(self, pop): # create offsprings and add it to the data of the algorithm if self.var_selection == "rand": P = self.selection.do(pop, self.pop_size, self.crossover.n_parents) elif self.var_selection == "best": P = self.selection.do(pop, self.pop_size, self.crossover.n_parents - 1) best = np.argmin(pop.get("F")[:, 0]) P = np.hstack([np.full(len(pop), best)[:, None], P]) else: raise Exception("Unknown selection: %s" % self.var_selection) self.off = self.crossover.do(self.problem, pop, P) # do the mutation by using the offsprings self.off = self.mutation.do(self.problem, pop, algorithm=self) # evaluate the results self.evaluator.eval(self.problem, self.off, algorithm=self) # replace whenever offspring is better than population member for i in range(len(pop)): if self.off[i].F < pop[i].F: pop[i] = self.off[i] return pop
def __init__(self, variant, CR, F, dither, jitter, **kwargs): _, self.var_selection, self.var_n, self.var_mutation, = variant.split("/") set_if_none(kwargs, 'pop_size', 200) set_if_none(kwargs, 'sampling', LatinHypercubeSampling(criterion="maxmin", iterations=100)) set_if_none(kwargs, 'crossover', DifferentialEvolutionCrossover(weight=F, dither=dither, jitter=jitter)) set_if_none(kwargs, 'selection', RandomSelection()) if self.var_mutation == "exp": set_if_none(kwargs, 'mutation', ExponentialCrossover(CR)) elif self.var_mutation == "bin": set_if_none(kwargs, 'mutation', UniformCrossover(CR)) set_if_none(kwargs, 'survival', None) super().__init__(**kwargs) self.func_display_attrs = disp_single_objective
def do(self, problem, pop, n_offsprings, **kwargs): # randomly choose a combination to be tried self.selection = random.choice(self.selections) self.crossover = random.choice(self.crossovers) self.mutation = random.choice(self.mutations) self.repair = random.choice(self.repairs) off = super().do(problem, pop, n_offsprings, **kwargs) return off selections = [RandomSelection()] # define all the crossovers to be tried crossovers = [SimulatedBinaryCrossover(10.0), SimulatedBinaryCrossover(30.0), DifferentialEvolutionCrossover()] # COMMENT out this line to only use the SBX crossover with one eta value # crossovers = [SimulatedBinaryCrossover(30)] mutations = [NoMutation(), PolynomialMutation(10.0), PolynomialMutation(30.0)] repairs = [] ensemble = EnsembleMating(selections, crossovers, mutations, repairs) problem = Rastrigin(n_var=30) algorithm = GA( pop_size=100, mating=ensemble, eliminate_duplicates=True)
self.selection = random.choice(self.selections) self.crossover = random.choice(self.crossovers) self.mutation = random.choice(self.mutations) self.repair = random.choice(self.repairs) off = super().do(problem, pop, n_offsprings, **kwargs) return off selections = [RandomSelection()] # define all the crossovers to be tried crossovers = [ SimulatedBinaryCrossover(10.0), SimulatedBinaryCrossover(30.0), DifferentialEvolutionCrossover() ] # COMMENT out this line to only use the SBX crossover with one eta value # crossovers = [SimulatedBinaryCrossover(30)] mutations = [NoMutation(), PolynomialMutation(10.0), PolynomialMutation(30.0)] repairs = [] ensemble = EnsembleMating(selections, crossovers, mutations, repairs) problem = Rastrigin(n_var=30) algorithm = GA(pop_size=100, mating=ensemble, eliminate_duplicates=True) res = minimize(problem, algorithm, seed=1, verbose=True)