def initialize(self):
        super(IBEA, self).initialize()
        self.fitness_evaluator.evaluate(self.population)

        if self.variator is None:
            self.variator = default_variator(self.problem)

        if self.selector is None:
            self.selector = TournamentSelector(2, self.fitness_comparator)
Esempio n. 2
0
    def __init__(self,
                 problem,
                 epsilons,
                 population_size=100,
                 generator=RandomGenerator(),
                 selector=TournamentSelector(2),
                 recency_list_size=50,
                 max_mutation_index=10,
                 **kwargs):
        super(BorgMOEA, self).__init__(
            EpsMOEA(problem, epsilons, population_size, generator, selector,
                    **kwargs))
        self.recency_list = deque()
        self.recency_list_size = recency_list_size
        self.restarted_last_check = False
        self.base_mutation_index = 0
        self.max_mutation_index = max_mutation_index

        # overload the variator and iterate method
        self.algorithm.variator = Multimethod(self, [
            GAOperator(SBX(), PM()),
            DifferentialEvolution(),
            UM(),
            PCX(),
            UNDX(),
            SPX()
        ])

        self.algorithm.iterate = self.iterate
def get_critical_nodes():
    algorithm = NSGAII(BOCNDP(),
                       selector=TournamentSelector(dominance=NashDominance()),
                       archive=NashArchive())
    algorithm.run(100)

    print(algorithm.result[0].objectives)
    return algorithm.result[0].objectives
def get_critical_nodes():
    algorithm = NSGAII(CNDP(),
                       selector=TournamentSelector(dominance=BergeDominance()),
                       archive=BergeArchive())
    algorithm.run(1000)

    fitness = algorithm.result[0].objectives[0]
    print(fitness)

    return fitness
Esempio n. 5
0
 def __init__(self,
              problem: Problem,
              repairer: Repairer,
              population_size=100,
              generator=RandomGenerator(),
              selector=TournamentSelector(2),
              variator=None,
              archive=None,
              **kwargs):
     super(NSGAII_Repair,
           self).__init__(problem, population_size, generator, selector,
                          variator, archive, **kwargs)
     self.repairer = repairer
Esempio n. 6
0
 def __init__(self, problem,
              population_size = 100,
              generator = RandomGenerator(),
              selector = TournamentSelector(2),
              variator = None,
              archive = None,
              selection_method = 'nbr_dom', # hv_contr or nbr_dom
              **kwargs):
     super(SMSEMOA, self).__init__(problem, population_size, generator, **kwargs)
     self.selector = selector
     self.variator = variator
     self.archive = archive
     self.selection_method = selection_method
Esempio n. 7
0
    def __init__(self,
                 problem,
                 epsilons,
                 population_size=100,
                 generator=RandomGenerator(),
                 selector=TournamentSelector(2),
                 variator=None,
                 **kwargs):
        self.problem = problem

        # Parameterization taken from
        # Borg: An Auto-Adaptive MOEA Framework - Hadka, Reed
        variators = [
            GAOperator(
                SBX(probability=self.sbx_prop,
                    distribution_index=self.sbx_dist),
                PM(probability=self.pm_p, distribution_index=self.pm_dist)),
            GAOperator(
                PCX(nparents=self.pcx_nparents,
                    noffspring=self.pcx_noffspring,
                    eta=self.pcx_eta,
                    zeta=self.pcx_zeta),
                PM(probability=self.pm_p, distribution_index=self.pm_dist)),
            GAOperator(
                DifferentialEvolution(crossover_rate=self.de_rate,
                                      step_size=self.de_stepsize),
                PM(probability=self.pm_p, distribution_index=self.pm_dist)),
            GAOperator(
                UNDX(nparents=self.undx_nparents,
                     noffspring=self.undx_noffspring,
                     zeta=self.undx_zeta,
                     eta=self.undx_eta),
                PM(probability=self.pm_p, distribution_index=self.pm_dist)),
            GAOperator(
                SPX(nparents=self.spx_nparents,
                    noffspring=self.spx_noffspring,
                    expansion=self.spx_expansion),
                PM(probability=self.pm_p, distribution_index=self.pm_dist)),
            UM(probability=self.um_p)
        ]

        variator = Multimethod(self, variators)

        super(GenerationalBorg, self).__init__(
            NSGAII(problem, population_size, generator, selector, variator,
                   EpsilonBoxArchive(epsilons), **kwargs))
Esempio n. 8
0
def moea(name, solsize, popsize, wscalar_, moea_type, max_gen=float('inf'), timeLimit=float('inf')):
    from platypus import HUX, BitFlip, TournamentSelector
    from platypus import Problem, Binary
    from platypus import NSGAII, NSGAIII, SPEA2
    
    from platyplus.operators import varOr
    from platyplus.algorithms import SMSEMOA
    
    time_start = time.perf_counter()
    logger.info('Running '+moea_type+' in '+name)
    
    prMutation = 0.1
    prVariation = 1-prMutation
    
    vartor = varOr(HUX(), BitFlip(1), prVariation, prMutation)
    
    def evalKnapsack(x):
        return wscalar_.fobj([xi[0] for xi in x])
    
    problem = Problem(wscalar_.N, wscalar_.M)
    problem.types[:] = [Binary(1) for i in range(wscalar_.N)]
    problem.function = evalKnapsack
    
    
    if moea_type in ['NSGAII', 'NSGAII-2', 'NSGAII-4']:
        alg = NSGAII(problem, population_size=popsize,
                     selector=TournamentSelector(1),
                     variator=vartor)
    elif moea_type in ['NSGAIII', 'NSGAIII-2', 'NSGAIII-4']:
        alg = NSGAIII(problem, divisions_outer=3,
                      population_size=popsize,
                      selector=TournamentSelector(1),
                      variator=vartor)
    elif moea_type in ['SPEA2', 'SPEA2-2', 'SPEA2-4']:
        alg = SPEA2(problem, population_size=popsize,
                     selector=TournamentSelector(1),
                     variator=vartor)
    elif moea_type in ['SMSdom']:
        alg = SMSEMOA(problem, population_size=popsize,
                     selector=TournamentSelector(1),
                     variator=vartor,
                     selection_method = 'nbr_dom')
    elif moea_type in ['SMShv']:
        alg = SMSEMOA(problem, population_size=popsize,
                     selector=TournamentSelector(1),
                     variator=vartor,
                     selection_method = 'hv_contr')
        
    gen = 1
    while gen<max_gen and time.perf_counter()-time_start<timeLimit:
        alg.step()
        gen+=1
    
    alg.population_size = solsize
    alg.step()

    moeaSols = [evalKnapsack(s.variables) for s in alg.result]

    moea_time = time.perf_counter() - time_start

    logger.info(moea_type+' in '+name+' finnished.')
    
    return moeaSols, moea_time
Esempio n. 9
0
    def __init__(self,
                 problem,
                 epsilons,
                 population_size=100,
                 generator=RandomGenerator(),
                 selector=TournamentSelector(2),
                 variator=None,
                 **kwargs):

        L = len(problem.parameters)

        # -------------------------------------------------------------------
        #                           DefaultValue            BorgValue
        # PM   probability          1.0                     1.0 / L
        #      distribution index   20                      < 100 (20)
        #      source     Borg: An Auto-Adaptive MOEA Framework - Hadka, Reed
        #
        # SBX  probability          1.0                     > 0.8 (1.0)
        #      distribution index   15                      < 100 (15)
        #      source     Borg: An Auto-Adaptive MOEA Framework - Hadka, Reed;
        #                 Simulated Binary Crossover for Continuous Search
        #                 Space - Deb, Agrawal
        #
        # PCX  nparents             10                      3 (10)
        #      noffspring           2                       2-15 (2)
        #      eta                  0.1                     (0.1)
        #      zeta                 0.1                     (0.1)
        #      source     A Computationally Efficient Evolutionary Algorithm
        #                 for Real-Parameter Optimization - Deb et al 2002
        #
        # DE   crossover rate       0.1                     0.6 (0.1)
        #      step size            0.5                     0.6 (0.5)
        #      source     Borg: An Auto-Adaptive MOEA Framework - Hadka, Reed
        #
        # UNDX nparents             10                      3 (10)
        #      noffspring           2                       2 (2)
        #      zeta                 0.5                     0.5
        #      eta                  0.35                    0.35/sqrt(L) (0.35)
        #      source     Borg: An Auto-Adaptive MOEA Framework - Hadka, Reed;
        #                 A Computationally Efficient Evolutionary Algorithm
        #                 for Real-Parameter Optimization - Deb et al 2002
        #
        # SPX  nparents             10                      L + 1 (10)
        #      noffspring           2                       L + 1 (2)
        #      expansion            None                    sqrt((L+1)+1) (3.0)
        #      source     Borg: An Auto-Adaptive MOEA Framework - Hadka, Reed;
        #                 Multi-parent Recombination with Simplex Crossover
        #                 in Real Coded Genetic Algorithms - Tsutsui
        #
        # UM   probability          1                       1.0 / L
        #      source     Borg: An Auto-Adaptive MOEA Framework - Hadka, Reed
        # -------------------------------------------------------------------

        variators = [
            GAOperator(SBX(probability=1.0, distribution_index=15.0),
                       PM(probability=1.0 / L, distribution_index=20.0)),
            GAOperator(PCX(nparents=3, noffspring=2, eta=0.1, zeta=0.1),
                       PM(probability=1.0 / L, distribution_index=20.0)),
            GAOperator(
                DifferentialEvolution(crossover_rate=0.6, step_size=0.6),
                PM(probability=1.0 / L, distribution_index=20.0)),
            GAOperator(
                UNDX(nparents=3, noffspring=2, zeta=0.5, eta=0.35 / sqrt(L)),
                PM(probability=1.0 / L, distribution_index=20.0)),
            GAOperator(
                SPX(nparents=L + 1, noffspring=L + 1, expansion=sqrt(L + 2)),
                PM(probability=1.0 / L, distribution_index=20.0)),
            UM(probability=1 / L)
        ]

        variator = Multimethod(self, variators)

        super(NSGAIIHybrid, self).__init__(
            NSGAII(problem, population_size, generator, selector, variator,
                   EpsilonBoxArchive(epsilons), **kwargs))
Esempio n. 10
0
    def __init__(self,
                 problem,
                 epsilons,
                 population_size=100,
                 generator=RandomGenerator(),
                 selector=TournamentSelector(2),
                 variator=None,
                 **kwargs):

        L = len(problem.nvars)
        p = 1 / L

        # Parameterization taken from
        # Borg: An Auto-Adaptive MOEA Framework - Hadka, Reed
        variators = [
            GAOperator(SBX(probability=1.0, distribution_index=15.0),
                       PM(probability=p, distribution_index=20.0)),
            GAOperator(PCX(nparents=3, noffspring=2, eta=0.1, zeta=0.1),
                       PM(probability=p, distribution_index=20.0)),
            GAOperator(
                DifferentialEvolution(crossover_rate=0.6, step_size=0.6),
                PM(probability=p, distribution_index=20.0)),
            GAOperator(
                UNDX(nparents=3,
                     noffspring=2,
                     zeta=0.5,
                     eta=0.35 / math.sqrt(L)),
                PM(probability=p, distribution_index=20.0)),
            GAOperator(
                SPX(nparents=L + 1,
                    noffspring=L + 1,
                    expansion=math.sqrt(L + 2)),
                PM(probability=p, distribution_index=20.0)),
            UM(probability=1 / L)
        ]

        variator = Multimethod(self, variators)

        super(GenerationalBorg, self).__init__(
            NSGAII(problem, population_size, generator, selector, variator,
                   EpsilonBoxArchive(epsilons), **kwargs))

        # class GeneAsGenerationalBorg(EpsilonProgressContinuation):
        #     '''A generational implementation of the BORG Framework, combined with
        #     the GeneAs appraoch for heterogenously typed decision variables
        #
        #     This algorithm adopts Epsilon Progress Continuation, and Auto Adaptive
        #     Operator Selection, but embeds them within the NSGAII generational
        #     algorithm, rather than the steady state implementation used by the BORG
        #     algorithm.
        #
        #     Note:: limited to RealParameters only.
        #
        #     '''
        #
        #     # TODO::
        #     # Addressing the limitation to RealParameters is non-trivial. The best
        #     # option seems to be to extent MultiMethod. Have a set of GAoperators
        #     # for each datatype.
        #     # next Iterate over datatypes and apply the appropriate operator.
        #     # Implementing this in platypus is non-trivial. We probably need to do some
        #     # dirty hacking to create 'views' on the relevant part of the
        #     # solution that is to be modified by the operator
        #     #
        #     # A possible solution is to create a wrapper class for the operators
        #     # This class would create the 'view' on the solution. This view should
        #     # also have a fake problem description because the number of
        #     # decision variables is sometimes used by operators. After applying the
        #     # operator to the view, we can than take the results and set these on the
        #     # actual solution
        #     #
        #     # Also: How many operators are there for Integers and Subsets?
        #
        #     def __init__(self, problem, epsilons, population_size=100,
        #                  generator=RandomGenerator(), selector=TournamentSelector(2),
        #                  variator=None, **kwargs):
        #
        #         L = len(problem.parameters)
        #         p = 1/L
        #
        #         # Parameterization taken from
        #         # Borg: An Auto-Adaptive MOEA Framework - Hadka, Reed
        #         real_variators = [GAOperator(SBX(probability=1.0, distribution_index=15.0),
        #                                PM(probability=p, distribution_index=20.0)),
        #                     GAOperator(PCX(nparents=3, noffspring=2, eta=0.1, zeta=0.1),
        #                                PM(probability =p, distribution_index=20.0)),
        #                     GAOperator(DifferentialEvolution(crossover_rate=0.6,
        #                                                      step_size=0.6),
        #                                PM(probability=p, distribution_index=20.0)),
        #                     GAOperator(UNDX(nparents= 3, noffspring=2, zeta=0.5,
        #                                     eta=0.35/math.sqrt(L)),
        #                                PM(probability= p, distribution_index=20.0)),
        #                     GAOperator(SPX(nparents=L+1, noffspring=L+1,
        #                                    expansion=math.sqrt(L+2)),
        #                                PM(probability=p, distribution_index=20.0)),
        #                     UM(probability = 1/L)]
        #
        #         # TODO
        #         integer_variators = []
        #         subset_variators = []
        #
        #         variators = [VariatorWrapper(variator) for variator in real_variators]
        #         variator = Multimethod(self, variators)
        #
        #         super(GenerationalBorg, self).__init__(
        #                 NSGAII(problem,
        #                        population_size,
        #                        generator,
        #                        selector,
        #                        variator,
        #                        EpsilonBoxArchive(epsilons),
        #                        **kwargs))
        #
        #
        # class VariatorWrapper(object):
        #     def __init__(self, actual_variator, indices, problem):
        #         '''
        #
        #         Parameters
        #         ----------
        #         actual_variator : underlying GAOperator
        #         indices : np.array
        #                   indices to which the variator should be applied
        #         probem :  a representation of the problem considering only the
        #                   same kind of Parameters
        #
        #         '''
        #         self.variator = actual_variator
        #         self.indices = indices
        #
        #     def evolve(self, parents):

        fake_parents = [self.create_view[p] for p in parents]
        fake_children = self.variator.evolve(fake_parents)

        # tricky, no 1 to 1 mapping between parents and children
        # some methods have 3 parents, one child
        children = [map_back]

        pass
Esempio n. 11
0
crossover_extent = 1.0
mutation_probability = 1.0
# The number of function evaluations
expected_num_steps = np.array([5000, 5, 6])
nfes = population_size * expected_num_steps

# The number of runs
number_of_runs = 1

# Chose the optimization specifics
#crossover=JoinMatrices2(extent=crossover_extent)
#crossover = GradientDescentJoin()
mutation = DeleteColumn(probability=mutation_probability)
#mutation = NullMutation()
selector = TournamentSelector(2,
                              AttributeDominance(fitness_key,
                                                 False))  # crossover selector
constraint = OrthogonalityEnforcer()
comparator = ParetoDominance()  # used to find worst candidates

# List the problems to solve
ns = [100, 500, 800]
directories = [
    'data/test_data_n=100_k=10_N=5_density=6.00/',
    'data/test_data_n=500_k=10_N=5_density=6.00/',
    'data/test_data_n=800_k=10_N=5_density=6.00/'
]
out_file_names = ['multi_n100k10d6', 'multi_n500k10d6', 'multi_n800k10d6']
matrices_list=[['R1_100_1.1800.csv','R2_100_2.0000.csv','R3_100_3.7000.csv','R4_100_2.5000.csv','R5_100_1.9000.csv'], \
          ['R1_500_1.8408.csv','R2_500_2.0232.csv','R3_500_1.8416.csv','R4_500_1.7220.csv','R5_500_2.1224.csv'], \
          ['R1_800_2.0164.csv','R2_800_1.8628.csv','R3_800_1.5189.csv','R4_800_1.9077.csv','R5_800_1.6077.csv']]
Esempio n. 12
0
def moea(name,
         solsize,
         popsize,
         wscalar_,
         moea_type,
         max_gen=float('inf'),
         timeLimit=float('inf')):
    from platypus import Problem, TournamentSelector
    from platypus import NSGAII, NSGAIII, SPEA2

    from platyplus.operators import varOr, mutGauss, cxUniform
    from platyplus.types import RealGauss
    from platyplus.algorithms import SMSEMOA

    N = wscalar_.xdim
    M = wscalar_.M

    time_start = time.perf_counter()
    logger.info('Running ' + moea_type + ' in ' + name)

    prMutation = 0.1
    prVariation = 1 - prMutation

    vartor = varOr(cxUniform(), mutGauss(), prVariation, prMutation)

    def eval_(theta):
        return wscalar_.f(np.array(theta))

    problem = Problem(N, M)
    problem.types[:] = [RealGauss() for i in range(N)]
    problem.function = eval_

    if moea_type == 'NSGAII':
        alg = NSGAII(problem,
                     population_size=popsize,
                     selector=TournamentSelector(1),
                     variator=vartor)
    elif moea_type == 'NSGAIII':
        alg = NSGAIII(problem,
                      divisions_outer=3,
                      population_size=popsize,
                      selector=TournamentSelector(1),
                      variator=vartor)
    elif moea_type == 'SPEA2':
        alg = SPEA2(problem,
                    population_size=popsize,
                    selector=TournamentSelector(1),
                    variator=vartor)
    elif moea_type == 'SMSdom':
        alg = SMSEMOA(problem,
                      population_size=popsize,
                      selector=TournamentSelector(1),
                      variator=vartor,
                      selection_method='nbr_dom')
    elif moea_type == 'SMShv':
        alg = SMSEMOA(problem,
                      population_size=popsize,
                      selector=TournamentSelector(1),
                      variator=vartor,
                      selection_method='hv_contr')
    gen = 1
    while gen < max_gen and time.perf_counter() - time_start < timeLimit:
        alg.step()
        gen += 1

    alg.population_size = solsize
    alg.step()

    moeaSols = [eval_(s.variables) for s in alg.result]

    moea_time = time.perf_counter() - time_start

    logger.info(moea_type + ' in ' + name + ' finnished.')

    return moeaSols, moea_time
class IBEA(AbstractGeneticAlgorithm):
    def __init__(self,
                 problem,
                 local_search,
                 mutator,
                 population_size=100,
                 generator=RandomGenerator(),
                 fitness_evaluator=HypervolumeFitnessEvaluator(),
                 fitness_comparator=AttributeDominance(fitness_key, False),
                 variator=None,
                 selector=None,
                 **kwargs):
        super(IBEA, self).__init__(problem, population_size, generator,
                                   **kwargs)
        self.fitness_evaluator = fitness_evaluator
        self.fitness_comparator = fitness_comparator
        self.selector = selector
        self.variator = variator
        self.mutation_every_n_steps = 3
        self._cur_step = 0
        self.mutator = mutator

        self.local_search = local_search

    def initialize(self):
        super(IBEA, self).initialize()
        self.fitness_evaluator.evaluate(self.population)

        if self.variator is None:
            self.variator = default_variator(self.problem)

        if self.selector is None:
            self.selector = TournamentSelector(2, self.fitness_comparator)

    def iterate(self):
        offspring = []

        print("crossover")
        while len(offspring) < self.population_size:
            parents = self.selector.select(self.variator.arity,
                                           self.population)
            offspring.extend(self.variator.evolve(parents))

        print("mutation")
        offspring = [self.mutator.mutate(x) for x in offspring]

        self.evaluate_all(offspring)

        self.population.extend(offspring)
        self.fitness_evaluator.evaluate(self.population)
        while len(self.population) > self.population_size:
            # self.fitness_evaluator.remove(self.population, self._find_worst())
            ii = self._find_worst()
            print('---' + str(self.population[ii].objectives))
            self.fitness_evaluator.remove(self.population, ii)

        for cand in self.population:
            print('RSE: ' + str(cand.objectives))

        if self._cur_step % self.mutation_every_n_steps == 0:
            print("local search whole population")
            self.population = [
                self.local_search.mutate(x) for x in self.population
            ]
            self.fitness_evaluator.evaluate(self.population)
            for cand in self.population:
                print('RSE: ' + str(cand.objectives))
        self._cur_step += 1

    def _find_worst(self):
        index = 0

        for i in range(1, len(self.population)):
            if self.fitness_comparator.compare(self.population[index],
                                               self.population[i]) < 0:
                index = i

        return index
Esempio n. 14
0
    def run_algorithm(self):
        file_path = self.lineFilePath.text()
        if file_path == "":
            self.show_simple_error("Please choose file!")
            return
        print(file_path)
        # Reading file to NRP instance
        reader: AbstractFileReader = None
        if self.radioClassicFormat.isChecked():
            reader = ClassicFileReader()
        else:
            reader = CommonFileReader()
        try:
            self.nrp_instance: NRPInstance = reader.read_nrp_instance(
                filename=file_path)
        except RuntimeError as ex:
            # If cycle
            self.show_file_error(ex, str(ex))
            return
        except Exception as ex:
            self.show_file_error(ex)
            return
        # Multi or Single
        nrp_problem: Problem = None
        self.is_last_single = not self.radioMulti.isChecked()
        if self.radioMulti.isChecked():
            nrp_problem = NRP_Problem_MO(self.nrp_instance)
        else:
            nrp_problem = NRP_Problem_SO(self.nrp_instance)

        algorithm: AbstractGeneticAlgorithm = None
        # TODO Move somewhere and add config
        # Crossover probability is 0.8 and mutation probability = 1 / (size of binary vector)
        variator = None
        # TODO try single-point crossover
        variator = GAOperator(HUX(probability=0.8), BitFlip(probability=1))
        selector = TournamentSelector(5)
        #  Dep or without dep
        if self.radioDependYes.isChecked():
            algorithm = NSGAII_Repair(nrp_problem,
                                      repairer=Repairer(
                                          self.nrp_instance.requirements),
                                      variator=variator,
                                      selector=selector)
        else:
            algorithm = NSGAII(nrp_problem,
                               variator=variator,
                               selector=selector)
        #  Take n runs
        try:
            nruns = int(self.lineNumOfRuns.text())
            if nruns < 1 or nruns > 10000000:
                self.show_simple_error(
                    "Number of runs must be between 1 and 10000000!")
                return
        except ValueError:
            self.show_simple_error("Number of runs must be integer!")
            return

        self.wait_start()
        worker = Worker(self.run_and_back, algorithm, nruns)
        self.threadpool.start(worker)
Esempio n. 15
0
reshaper = Reshape(m,n,k)


def eval(x):
    G, Ss = reshaper.vec2mat(x)

    cost, gradG, gradS = p.cost_grad(G, Ss)
    print("eval_print", cost)
    return [cost]


problem = Problem(n * k + k * k * m, 1)
problem.types[:] = Real(0, 1)
problem.function = eval

sel = TournamentSelector(2, AttributeDominance(fitness_key, False))
# sel = SUS(2, AttributeDominance(fitness_key, False))
# algorithm = IBEA(problem, selector=sel,  variator=TestMut(n, k, m, p, probability=1), population_size = 7,)

# SPREMINJANJE PARAMETROV ZA MUTACIJE, KRIZANJA IN LOCAL SEARCH
cross = DifferentialEvolution(1, 0.08) #crossover_rate=1, step_size=0.08


local_search = AdamLocalSearch(n, k, m, p, probability=1, steps = 200) #probability=1, STEVILO KORAKOV ADAMA = 200
mutation = UniformMutation(0.0004, 0.03) # probability=0.0004, perturbation=0.03

algorithm = IBEA(problem, mutator=mutation,
                 local_search=local_search,
                 selector=sel,
                 variator=cross,
                 population_size=5) #Velikost populacije