def generate_solutions(self): problem = super(NRP_Random, self).generate_problem() random_generator = RandomGenerator() solutions = [] for _ in range(1000): solutions.append(random_generator.generate(problem)) return solutions
def __init__(self, problem, epsilons, population_size=100, generator=RandomGenerator(), selector=TournamentSelector(2), recency_list_size=50, max_mutation_index=10, **kwargs): super(BorgMOEA, self).__init__( EpsMOEA(problem, epsilons, population_size, generator, selector, **kwargs)) self.recency_list = deque() self.recency_list_size = recency_list_size self.restarted_last_check = False self.base_mutation_index = 0 self.max_mutation_index = max_mutation_index # overload the variator and iterate method self.algorithm.variator = Multimethod(self, [ GAOperator(SBX(), PM()), DifferentialEvolution(), UM(), PCX(), UNDX(), SPX() ]) self.algorithm.iterate = self.iterate
def __init__(self, problem: Problem, repairer: Repairer, population_size=100, generator=RandomGenerator(), selector=TournamentSelector(2), variator=None, archive=None, **kwargs): super(NSGAII_Repair, self).__init__(problem, population_size, generator, selector, variator, archive, **kwargs) self.repairer = repairer
def __init__(self, problem, population_size = 100, generator = RandomGenerator(), selector = TournamentSelector(2), variator = None, archive = None, selection_method = 'nbr_dom', # hv_contr or nbr_dom **kwargs): super(SMSEMOA, self).__init__(problem, population_size, generator, **kwargs) self.selector = selector self.variator = variator self.archive = archive self.selection_method = selection_method
def __init__(self, problem, epsilons, population_size=100, generator=RandomGenerator(), selector=TournamentSelector(2), variator=None, **kwargs): self.problem = problem # Parameterization taken from # Borg: An Auto-Adaptive MOEA Framework - Hadka, Reed variators = [ GAOperator( SBX(probability=self.sbx_prop, distribution_index=self.sbx_dist), PM(probability=self.pm_p, distribution_index=self.pm_dist)), GAOperator( PCX(nparents=self.pcx_nparents, noffspring=self.pcx_noffspring, eta=self.pcx_eta, zeta=self.pcx_zeta), PM(probability=self.pm_p, distribution_index=self.pm_dist)), GAOperator( DifferentialEvolution(crossover_rate=self.de_rate, step_size=self.de_stepsize), PM(probability=self.pm_p, distribution_index=self.pm_dist)), GAOperator( UNDX(nparents=self.undx_nparents, noffspring=self.undx_noffspring, zeta=self.undx_zeta, eta=self.undx_eta), PM(probability=self.pm_p, distribution_index=self.pm_dist)), GAOperator( SPX(nparents=self.spx_nparents, noffspring=self.spx_noffspring, expansion=self.spx_expansion), PM(probability=self.pm_p, distribution_index=self.pm_dist)), UM(probability=self.um_p) ] variator = Multimethod(self, variators) super(GenerationalBorg, self).__init__( NSGAII(problem, population_size, generator, selector, variator, EpsilonBoxArchive(epsilons), **kwargs))
def __init__(self, problem, local_search, mutator, population_size=100, generator=RandomGenerator(), fitness_evaluator=HypervolumeFitnessEvaluator(), fitness_comparator=AttributeDominance(fitness_key, False), variator=None, selector=None, **kwargs): super(IBEA, self).__init__(problem, population_size, generator, **kwargs) self.fitness_evaluator = fitness_evaluator self.fitness_comparator = fitness_comparator self.selector = selector self.variator = variator self.mutation_every_n_steps = 3 self._cur_step = 0 self.mutator = mutator self.local_search = local_search
def __init__(self, problem, epsilons, population_size=100, generator=RandomGenerator(), selector=TournamentSelector(2), variator=None, **kwargs): L = len(problem.parameters) # ------------------------------------------------------------------- # DefaultValue BorgValue # PM probability 1.0 1.0 / L # distribution index 20 < 100 (20) # source Borg: An Auto-Adaptive MOEA Framework - Hadka, Reed # # SBX probability 1.0 > 0.8 (1.0) # distribution index 15 < 100 (15) # source Borg: An Auto-Adaptive MOEA Framework - Hadka, Reed; # Simulated Binary Crossover for Continuous Search # Space - Deb, Agrawal # # PCX nparents 10 3 (10) # noffspring 2 2-15 (2) # eta 0.1 (0.1) # zeta 0.1 (0.1) # source A Computationally Efficient Evolutionary Algorithm # for Real-Parameter Optimization - Deb et al 2002 # # DE crossover rate 0.1 0.6 (0.1) # step size 0.5 0.6 (0.5) # source Borg: An Auto-Adaptive MOEA Framework - Hadka, Reed # # UNDX nparents 10 3 (10) # noffspring 2 2 (2) # zeta 0.5 0.5 # eta 0.35 0.35/sqrt(L) (0.35) # source Borg: An Auto-Adaptive MOEA Framework - Hadka, Reed; # A Computationally Efficient Evolutionary Algorithm # for Real-Parameter Optimization - Deb et al 2002 # # SPX nparents 10 L + 1 (10) # noffspring 2 L + 1 (2) # expansion None sqrt((L+1)+1) (3.0) # source Borg: An Auto-Adaptive MOEA Framework - Hadka, Reed; # Multi-parent Recombination with Simplex Crossover # in Real Coded Genetic Algorithms - Tsutsui # # UM probability 1 1.0 / L # source Borg: An Auto-Adaptive MOEA Framework - Hadka, Reed # ------------------------------------------------------------------- variators = [ GAOperator(SBX(probability=1.0, distribution_index=15.0), PM(probability=1.0 / L, distribution_index=20.0)), GAOperator(PCX(nparents=3, noffspring=2, eta=0.1, zeta=0.1), PM(probability=1.0 / L, distribution_index=20.0)), GAOperator( DifferentialEvolution(crossover_rate=0.6, step_size=0.6), PM(probability=1.0 / L, distribution_index=20.0)), GAOperator( UNDX(nparents=3, noffspring=2, zeta=0.5, eta=0.35 / sqrt(L)), PM(probability=1.0 / L, distribution_index=20.0)), GAOperator( SPX(nparents=L + 1, noffspring=L + 1, expansion=sqrt(L + 2)), PM(probability=1.0 / L, distribution_index=20.0)), UM(probability=1 / L) ] variator = Multimethod(self, variators) super(NSGAIIHybrid, self).__init__( NSGAII(problem, population_size, generator, selector, variator, EpsilonBoxArchive(epsilons), **kwargs))
def __init__(self, problem, epsilons, population_size=100, generator=RandomGenerator(), selector=TournamentSelector(2), variator=None, **kwargs): L = len(problem.nvars) p = 1 / L # Parameterization taken from # Borg: An Auto-Adaptive MOEA Framework - Hadka, Reed variators = [ GAOperator(SBX(probability=1.0, distribution_index=15.0), PM(probability=p, distribution_index=20.0)), GAOperator(PCX(nparents=3, noffspring=2, eta=0.1, zeta=0.1), PM(probability=p, distribution_index=20.0)), GAOperator( DifferentialEvolution(crossover_rate=0.6, step_size=0.6), PM(probability=p, distribution_index=20.0)), GAOperator( UNDX(nparents=3, noffspring=2, zeta=0.5, eta=0.35 / math.sqrt(L)), PM(probability=p, distribution_index=20.0)), GAOperator( SPX(nparents=L + 1, noffspring=L + 1, expansion=math.sqrt(L + 2)), PM(probability=p, distribution_index=20.0)), UM(probability=1 / L) ] variator = Multimethod(self, variators) super(GenerationalBorg, self).__init__( NSGAII(problem, population_size, generator, selector, variator, EpsilonBoxArchive(epsilons), **kwargs)) # class GeneAsGenerationalBorg(EpsilonProgressContinuation): # '''A generational implementation of the BORG Framework, combined with # the GeneAs appraoch for heterogenously typed decision variables # # This algorithm adopts Epsilon Progress Continuation, and Auto Adaptive # Operator Selection, but embeds them within the NSGAII generational # algorithm, rather than the steady state implementation used by the BORG # algorithm. # # Note:: limited to RealParameters only. # # ''' # # # TODO:: # # Addressing the limitation to RealParameters is non-trivial. The best # # option seems to be to extent MultiMethod. Have a set of GAoperators # # for each datatype. # # next Iterate over datatypes and apply the appropriate operator. # # Implementing this in platypus is non-trivial. We probably need to do some # # dirty hacking to create 'views' on the relevant part of the # # solution that is to be modified by the operator # # # # A possible solution is to create a wrapper class for the operators # # This class would create the 'view' on the solution. This view should # # also have a fake problem description because the number of # # decision variables is sometimes used by operators. After applying the # # operator to the view, we can than take the results and set these on the # # actual solution # # # # Also: How many operators are there for Integers and Subsets? # # def __init__(self, problem, epsilons, population_size=100, # generator=RandomGenerator(), selector=TournamentSelector(2), # variator=None, **kwargs): # # L = len(problem.parameters) # p = 1/L # # # Parameterization taken from # # Borg: An Auto-Adaptive MOEA Framework - Hadka, Reed # real_variators = [GAOperator(SBX(probability=1.0, distribution_index=15.0), # PM(probability=p, distribution_index=20.0)), # GAOperator(PCX(nparents=3, noffspring=2, eta=0.1, zeta=0.1), # PM(probability =p, distribution_index=20.0)), # GAOperator(DifferentialEvolution(crossover_rate=0.6, # step_size=0.6), # PM(probability=p, distribution_index=20.0)), # GAOperator(UNDX(nparents= 3, noffspring=2, zeta=0.5, # eta=0.35/math.sqrt(L)), # PM(probability= p, distribution_index=20.0)), # GAOperator(SPX(nparents=L+1, noffspring=L+1, # expansion=math.sqrt(L+2)), # PM(probability=p, distribution_index=20.0)), # UM(probability = 1/L)] # # # TODO # integer_variators = [] # subset_variators = [] # # variators = [VariatorWrapper(variator) for variator in real_variators] # variator = Multimethod(self, variators) # # super(GenerationalBorg, self).__init__( # NSGAII(problem, # population_size, # generator, # selector, # variator, # EpsilonBoxArchive(epsilons), # **kwargs)) # # # class VariatorWrapper(object): # def __init__(self, actual_variator, indices, problem): # ''' # # Parameters # ---------- # actual_variator : underlying GAOperator # indices : np.array # indices to which the variator should be applied # probem : a representation of the problem considering only the # same kind of Parameters # # ''' # self.variator = actual_variator # self.indices = indices # # def evolve(self, parents): fake_parents = [self.create_view[p] for p in parents] fake_children = self.variator.evolve(fake_parents) # tricky, no 1 to 1 mapping between parents and children # some methods have 3 parents, one child children = [map_back] pass