Beispiel #1
0
    def _next(self):

        # do the mating using the total population
        Hm = Population.merge(self.pop, self.da)
        self.off = self.mating.do(self.problem,
                                  Hm,
                                  n_offsprings=self.n_offsprings,
                                  algorithm=self)

        # if the mating could not generate any new offspring (duplicate elimination might make that happen)
        if len(self.off) == 0:
            self.termination.force_termination = True
            return

        # if not the desired number of offspring could be created
        elif len(self.off) < self.n_offsprings:
            if self.verbose:
                print(
                    "WARNING: Mating could not produce the required number of (unique) offsprings!"
                )

        # evaluate the offspring
        self.evaluator.eval(self.problem, self.off, algorithm=self)

        # merge the offsprings with the current population
        self.pop = Population.merge(self.pop, self.off)

        # the do survival selection
        self.pop, self.da = self.survival.do(self.problem,
                                             self.pop,
                                             self.da,
                                             self.pop_size,
                                             algorithm=self)
Beispiel #2
0
    def test_update_da(self):
        problem = C1DTLZ3(n_var=12, n_obj=3)
        for i in range(2):
            ca_x = np.loadtxt(
                path_to_test_resources('ctaea', 'c1dtlz3', f'case{i+1}',
                                       'preCA.x'))
            CA = Population.create(ca_x)
            self.evaluator.eval(problem, CA)

            da_x = np.loadtxt(
                path_to_test_resources('ctaea', 'c1dtlz3', f'case{i+1}',
                                       'preDA.x'))
            DA = Population.create(da_x)
            self.evaluator.eval(problem, DA)

            off_x = np.loadtxt(
                path_to_test_resources('ctaea', 'c1dtlz3', f'case{i+1}',
                                       'offspring.x'))
            off = Population.create(off_x)
            self.evaluator.eval(problem, off)

            survival = CADASurvival(self.ref_dirs)
            mixed = Population.merge(CA, off)
            survival.ideal_point = np.min(np.vstack(
                (DA.get("F"), mixed.get("F"))),
                                          axis=0)

            post_ca_x = np.loadtxt(
                path_to_test_resources('ctaea', 'c1dtlz3', f'case{i+1}',
                                       'postCA.x'))
            CA = Population.create(post_ca_x)
            self.evaluator.eval(problem, CA)

            Hd = Population.merge(DA, off)
            pDA = survival._updateDA(CA, Hd, 91)

            true_S1 = [
                151, 35, 6, 63, 67, 24, 178, 106, 134, 172, 148, 159, 41, 173,
                145, 77, 62, 40, 127, 61, 130, 27, 171, 115, 52, 176, 22, 75,
                55, 87, 36, 149, 154, 47, 78, 170, 90, 15, 53, 175, 179, 165,
                56, 89, 132, 82, 141, 39, 32, 25, 131, 14, 72, 65, 177, 140,
                66, 143, 34, 81, 103, 99, 147, 168, 51, 26, 70, 94, 54, 97,
                158, 107, 29, 120, 50, 108, 157, 11, 85, 174, 80, 0, 95, 13,
                142, 101, 156, 19, 8, 98, 20
            ]

            true_S2 = [
                78, 173, 59, 21, 101, 52, 36, 94, 17, 20, 37, 96, 90, 129, 150,
                136, 162, 70, 146, 75, 138, 154, 65, 179, 98, 32, 97, 11, 26,
                107, 12, 128, 95, 170, 24, 171, 40, 180, 14, 44, 49, 43, 130,
                23, 60, 79, 148, 62, 87, 56, 157, 73, 104, 45, 177, 74, 15,
                152, 164, 28, 80, 113, 41, 33, 158, 57, 77, 34, 114, 118, 18,
                54, 53, 145, 93, 115, 121, 174, 142, 39, 13, 105, 10, 69, 120,
                55, 6, 153, 91, 137, 46
            ]
            if i == 0:
                assert np.all(pDA == Hd[true_S1])
            else:
                assert np.all(pDA == Hd[true_S2])
Beispiel #3
0
    def _solve(self, problem, evaluator):

        self._initialize(problem)

        # create the population according to the factoring strategy
        pop = Population()
        if isinstance(self.sampling, np.ndarray):
            pop.X = self.sampling
        else:
            pop.X = self.sampling.sample(problem, self.pop_size, self)
        pop.F, pop.G = evaluator.eval(problem, pop.X)
        pop = self.survival.do(pop, self.pop_size, self)

        # setup initial generation
        n_gen = 0

        # while there are functions evaluations left
        while evaluator.has_next():

            # increase the generation and do printing and callback
            self._do_each_generation(n_gen, evaluator, pop)
            n_gen += 1

            # initialize selection and offspring methods
            off = Population()
            off.X = np.full((self.pop_size, problem.n_var), np.inf)
            self.selection.set_population(pop, self)

            n_off = 0
            n_parents = self.crossover.n_parents
            n_children = self.crossover.n_children

            while n_off < self.pop_size:
                parents = self.selection.next(n_parents)
                X = self.crossover.do(problem, pop.X[parents, :], self)

                off.X[n_off:min(n_off + n_children, self.pop_size)] = X
                n_off = n_off + X.shape[0]

            off.X = self.mutation.do(problem, off.X)
            off.F, off.G = evaluator.eval(problem, off.X)

            # merge the population
            pop.merge(off)

            # eliminate all duplicates in the population
            if self.eliminate_duplicates:
                # pop.filter(unique_rows(pop.F))
                pop.filter(unique_rows(pop.X))

            # truncate the population
            pop = self.survival.do(pop, self.pop_size, self)

        self._do_each_generation(n_gen, evaluator, pop)
        return pop.X, pop.F, pop.G
Beispiel #4
0
def crossover(crossover, a, b, c=None, xl=0, xu=1, type_var=np.double, **kwargs):
    n = a.shape[0]
    _pop = Population.merge(Population().new("X", a), Population().new("X", b))
    _P = np.column_stack([np.arange(n), np.arange(n) + n])

    if c is not None:
        _pop = Population.merge(_pop, Population().new("X", c))
        _P = np.column_stack([_P, np.arange(n) + 2 * n])

    problem = get_problem_func(a.shape[1], xl, xu, type_var)(**kwargs)
    return crossover.do(problem, _pop, _P, **kwargs).get("X")
Beispiel #5
0
    def _solve(self, pop):
        # generate direction vectors by random sampling
        ref_dirs = np.random.random((self.batch_size, self.n_obj))
        ref_dirs /= np.expand_dims(np.sum(ref_dirs, axis=1), 1)

        algo = MOEAD_algo(ref_dirs=ref_dirs, n_neighbors=len(ref_dirs), eliminate_duplicates=False)
        repair, crossover, mutation = algo.mating.repair, algo.mating.crossover, algo.mating.mutation

        if isinstance(algo.decomposition, str):
            decomp = algo.decomposition
            if decomp == 'auto':
                if self.n_obj <= 2:
                    decomp = 'tchebi'
                else:
                    decomp = 'pbi'
            decomposition = get_decomposition(decomp)
        else:
            decomposition = algo.decomposition

        ideal_point = np.min(pop.get('F'), axis=0)

        # find the optimal individual for each reference direction
        opt_pop = Population(0, individual=Individual())
        for i in range(self.batch_size):
            N = algo.neighbors[i, :]
            FV = decomposition.do(pop.get("F"), weights=ref_dirs[i], ideal_point=ideal_point)
            opt_pop = Population.merge(opt_pop, pop[np.argmin(FV)])

        all_off = Population(0, individual=Individual())
        for i in np.random.permutation(self.batch_size):
            N = algo.neighbors[i, :]

            if self.batch_size > 1:
                if np.random.random() < algo.prob_neighbor_mating:
                    parents = N[np.random.permutation(algo.n_neighbors)][:crossover.n_parents]
                else:
                    parents = np.random.permutation(algo.pop_size)[:crossover.n_parents]

                # do recombination and create an offspring
                off = crossover.do(self.real_problem, opt_pop, parents[None, :])
            else:
                off = opt_pop[N].copy()
                
            off = mutation.do(self.real_problem, off)
            off = off[np.random.randint(0, len(off))]

            # repair first in case it is necessary
            if repair:
                off = algo.repair.do(self.real_problem, off, algorithm=algo)

            all_off = Population.merge(all_off, off)
        
        return all_off
Beispiel #6
0
    def _exploration_move(self, center, opt=None):
        if opt is None:
            opt = center

        def step(x, delta, k):

            # copy and add delta to the new point
            X = np.copy(x)

            # normalize the delta by the bounds if they are provided by the problem
            eps = delta[k]

            # if the problem has bounds normalize the delta
            if self.problem.has_bounds():
                xl, xu = self.problem.bounds()
                eps *= (xu[k] - xl[k])

            # now add to the current solution
            X[k] = X[k] + eps

            # repair if out of bounds if necessary
            X = set_to_bounds_if_outside_by_problem(self.problem, X)

            # return the new solution as individual
            mutant = pop_from_array_or_individual(X)[0]

            return mutant

        for k in range(self.problem.n_var):

            # create the the individual and evaluate it
            mutant = step(center.X, self.explr_delta, k)
            self.evaluator.eval(self.problem, mutant, algorithm=self)
            self.pop = Population.merge(self.pop, mutant)

            if is_better(mutant, opt):
                center, opt = mutant, mutant

            else:

                # inverse the sign of the delta
                self.explr_delta[k] = -self.explr_delta[k]

                # now try the other sign if there was no improvement
                mutant = step(center.X, self.explr_delta, k)
                self.evaluator.eval(self.problem, mutant, algorithm=self)
                self.pop = Population.merge(self.pop, mutant)

                if is_better(mutant, opt):
                    center, opt = mutant, mutant

        return opt
Beispiel #7
0
    def _next(self):
        # the offspring population to finally evaluate and attach to the population
        off = Population()

        # find the potential optimal solution in the current population
        potential_optimal = self._potential_optimal()

        # for each of those solutions execute the division move
        for current in potential_optimal:

            # find the largest dimension the solution has not been evaluated yet
            nxl, nxu = norm_bounds(current, problem)
            k = np.argmax(nxu - nxl)

            # the delta value to be used to get left and right - this is one sixth of the range
            xl, xu = current.get("xl"), current.get("xu")

            delta = (xu[k] - xl[k]) / 6

            # print(current.X, delta, k, xl, xu)

            # create the left individual
            left_x = np.copy(current.X)
            left_x[k] = xl[k] + delta
            left = Individual(X=left_x)

            # create the right individual
            right_x = np.copy(current.X)
            right_x[k] = xu[k] - delta
            right = Individual(X=right_x)

            # update the boundaries for all the points accordingly
            for ind in [current, left, right]:
                update_bounds(ind, xl, xu, k, delta)

            # create the offspring population, evaluate and attach to current population
            _off = Population.create(left, right)
            _off.set("depth", current.get("depth") + 1)

            off = Population.merge(off, _off)

        # evaluate the offsprings
        self.evaluator.eval(self.problem, off, algorithm=self)

        # print(off.get("X"))

        # add the offsprings to the population
        self.pop = Population.merge(self.pop, off)
Beispiel #8
0
    def _step(self):
        pop = self.pop
        X = pop.get("X")
        F = pop.get("F")

        #Levy Flight
        #pick the best one from random optimum nests (leas infeasibles or PF members)
        best = self.opt[np.random.randint(len(self.opt), size=len(X))]
        G_X = np.array([best_nest.get("X") for best_nest in best])

        step_size = self._get_global_step_size(X)
        _X = X + np.random.rand(*X.shape) * step_size * (G_X - X)
        _X = set_to_bounds_if_outside_by_problem(self.problem, _X)

        #Evaluate
        off = Population(len(_X)).set("X", _X)
        self.evaluator.eval(self.problem, off, algorithm=self)

        #Local Random Walk
        _X = off.get("X")
        dir_vec = self._get_local_directional_vector(X)
        _X = _X + dir_vec
        _X = set_to_bounds_if_outside_by_problem(self.problem, _X)
        off = Population(len(_X)).set("X", _X)
        self.evaluator.eval(self.problem, off, algorithm=self)

        #append offspring to population and then sort for elitism (survival)
        self.pop = Population.merge(pop, off)
        self.pop = self.survival.do(self.problem,
                                    self.pop,
                                    self.pop_size,
                                    algorithm=self)
Beispiel #9
0
    def test_association(self):
        problem = C1DTLZ3(n_var=12, n_obj=3)
        ca_x = np.loadtxt(path_to_test_resources('ctaea', 'c1dtlz3', 'case3', 'preCA.x'))
        CA = Population.create(ca_x)
        self.evaluator.eval(problem, CA)

        da_x = np.loadtxt(path_to_test_resources('ctaea', 'c1dtlz3', 'case3', 'preDA.x'))
        DA = Population.create(da_x)
        self.evaluator.eval(problem, DA)

        off_x = np.loadtxt(path_to_test_resources('ctaea', 'c1dtlz3', 'case3', 'offspring.x'))
        off = Population.create(off_x)
        self.evaluator.eval(problem, off)

        true_assoc = np.loadtxt(path_to_test_resources('ctaea', 'c1dtlz3', 'case3', 'feasible_rank0.txt'))
        true_niche = true_assoc[:, 1]
        true_id = true_assoc[:, 0]
        sorted_id = np.argsort(true_id)

        survival = CADASurvival(self.ref_dirs)
        mixed = Population.merge(CA, off)
        survival.ideal_point = np.min(np.vstack((DA.get("F"), mixed.get("F"))), axis=0)

        fronts = NonDominatedSorting().do(mixed.get("F"), n_stop_if_ranked=len(self.ref_dirs))
        I = np.concatenate(fronts)
        niche, _ = survival._associate(mixed[I])
        sorted_I = np.argsort(I)

        assert (niche[sorted_I] == true_niche[sorted_id]).all()
Beispiel #10
0
    def _initialize(self):
        super()._initialize()
        self.alpha, self.beta, self.gamma, self.delta = self.func_params(
            self.problem)

        # the corresponding x values of the provided or best found solution
        x0 = self.x0.X

        # if lower and upper bounds are given take 5% of the range and add
        if self.problem.has_bounds():
            self.simplex_scaling = 0.05 * (self.problem.xu - self.problem.xl)

        # no bounds are given do it based on x0 - MATLAB procedure
        else:
            self.simplex_scaling = 0.05 * self.x0.X
            # some value needs to be added if x0 is zero
            self.simplex_scaling[self.simplex_scaling == 0] = 0.00025

        # initialize the simplex
        simplex = pop_from_array_or_individual(self.initialize_simplex(x0))
        self.evaluator.eval(self.problem, simplex, algorithm=self)

        # make the simplex the current the population and sort them by fitness
        pop = Population.merge(self.opt, simplex)
        self.pop = FitnessSurvival().do(self.problem, pop, len(pop))
Beispiel #11
0
    def _do(self, problem, pop, n_offsprings, parents=None, **kwargs):
        rnd = np.random.random(n_offsprings)
        n_neighbors = (rnd <= self.bias).sum()

        other = super()._do(problem, pop, n_offsprings - n_neighbors, parents,
                            **kwargs)

        N = []

        cand = TournamentSelection(comp_by_rank).do(pop,
                                                    n_neighbors,
                                                    n_parents=1)[:, 0]
        for k in cand:
            N.append(pop[k])

            n_cand_neighbors = pop[k].get("neighbors")
            rnd = np.random.permutation(
                len(n_cand_neighbors))[:self.crossover.n_parents - 1]
            [N.append(e) for e in n_cand_neighbors[rnd]]

        parents = np.reshape(np.arange(len(N)), (-1, self.crossover.n_parents))
        N = Population.create(*N)

        bias = super()._do(problem, N, n_neighbors, parents, **kwargs)

        return Population.merge(bias, other)
Beispiel #12
0
    def _step(self):
        selection, crossover, mutation = self.mating.selection, self.mating.crossover, self.mating.mutation

        # retrieve the current population
        pop = self.pop

        # get the vectors from the population
        F, CV, feasible = pop.get("F", "CV", "feasible")
        F = parameter_less(F, CV)

        # create offsprings and add it to the data of the algorithm
        P = selection.do(pop, self.pop_size, crossover.n_parents)

        if self.var_selection == "best":
            P[:, 0] = np.argmin(F[:, 0])
        elif self.var_selection == "rand+best":
            P[np.random.random(len(pop)) < 0.3, 0] = np.argmin(F[:, 0])

        # do the first crossover which is the actual DE operation
        off = crossover.do(self.problem, pop, P, algorithm=self)

        # then do the mutation (which is actually a crossover between old and new individual)
        _pop = Population.merge(self.pop, off)
        _P = np.column_stack(
            [np.arange(len(pop)),
             np.arange(len(pop)) + len(pop)])
        off = mutation.do(self.problem, _pop, _P,
                          algorithm=self)[:len(self.pop)]

        # bounds back if something is out of bounds
        off = BounceBackOutOfBoundsRepair().do(self.problem, off)

        return off
Beispiel #13
0
    def _step(self):
        pop = self.pop
        X = pop.get("X")
        F = pop.get("F")

        #Levy Flight
        best = self.opt
        G_X = best.get("X")

        step_size = self._get_global_step_size(X)
        _X = X + np.random.rand(*X.shape) * step_size * (G_X - X)
        _X = set_to_bounds_if_outside_by_problem(self.problem, _X)

        #Evaluate
        off = Population(len(pop)).set("X", _X)
        self.evaluator.eval(self.problem, off, algorithm=self)

        # replace the worse pop with better off per index
        # this method includes replacement with less constraints violation
        # which the original paper doesn't have
        ImprovementReplacement().do(self.problem, pop, off, inplace=True)

        #Local Random Walk
        dir_vec = self._get_local_directional_vector(X)
        _X = X + dir_vec
        _X = set_to_bounds_if_outside_by_problem(self.problem, _X)
        off = Population(len(pop)).set("X", _X)
        self.evaluator.eval(self.problem, off, algorithm=self)

        #append offspring to population and then sort for elitism (survival)
        self.pop = Population.merge(pop, off)
        self.pop = self.survival.do(self.problem,
                                    self.pop,
                                    self.pop_size,
                                    algorithm=self)
Beispiel #14
0
    def test_restricted_mating_selection(self):
        np.random.seed(200)
        selection = RestrictedMating(func_comp=comp_by_cv_dom_then_random)

        problem = C3DTLZ4(n_var=12, n_obj=3)
        ca_x = np.loadtxt(path_to_test_resources('ctaea', 'c3dtlz4', 'case2', 'preCA.x'))
        CA = Population.create(ca_x)
        self.evaluator.eval(problem, CA)

        da_x = np.loadtxt(path_to_test_resources('ctaea', 'c3dtlz4', 'case2', 'preDA.x'))
        DA = Population.create(da_x)
        self.evaluator.eval(problem, DA)

        Hm = Population.merge(CA, DA)
        n_pop = len(CA)

        _, rank = NonDominatedSorting().do(Hm.get('F'), return_rank=True)

        Pc = (rank[:n_pop] == 0).sum()/len(Hm)
        Pd = (rank[n_pop:] == 0).sum()/len(Hm)

        P = selection.do(Hm, len(CA))

        assert P.shape == (91, 2)
        if Pc > Pd:
            assert (P[:, 0] < n_pop).all()
        else:
            assert (P[:, 0] >= n_pop).all()
        assert (P[:, 1] >= n_pop).any()
        assert (P[:, 1] < n_pop).any()
Beispiel #15
0
 def _update(self):
     D = self.D
     ind = Individual(X=np.copy(D["X"]),
                      F=np.copy(D["F"]),
                      G=np.copy(-D["G"]))
     pop = Population.merge(self.pop, Population.create(ind))
     set_cv(pop)
     self.pop = pop
Beispiel #16
0
    def _next(self):
        pop = self.pop
        elites = np.where(pop.get("type") == "elite")[0]

        # actually do the mating given the elite selection and biased crossover
        off = self.mating.do(self.problem, pop, n_offsprings=self.n_offsprings, algorithm=self)

        # create the mutants randomly to fill the population with
        mutants = FloatRandomSampling().do(self.problem, self.n_mutants, algorithm=self)

        # evaluate all the new solutions
        to_evaluate = Population.merge(off, mutants)
        self.evaluator.eval(self.problem, to_evaluate, algorithm=self)

        # finally merge everything together and sort by fitness
        pop = Population.merge(pop[elites], to_evaluate)

        # the do survival selection - set the elites for the next round
        self.pop = self.survival.do(self.problem, pop, len(pop), algorithm=self)
Beispiel #17
0
 def do(self, _, pop, da, n_survive, **kwargs):
     # Offspring are last of merged population
     off = pop[-n_survive:]
     # Update ideal point
     self.ideal_point = np.min(np.vstack((self.ideal_point, off.get("F"))), axis=0)
     # Update CA
     pop = self._updateCA(pop, n_survive)
     # Update DA
     Hd = Population.merge(da, off)
     da = self._updateDA(pop, Hd, n_survive)
     return pop, da
Beispiel #18
0
    def _do(self, problem, pop, n_survive, out=None, algorithm=None, **kwargs):
        X, F = pop.get("X", "F")
        if F.shape[1] != 1:
            raise ValueError(
                "FitnessSurvival can only used for single objective single!")

        n_neighbors = 5

        # calculate the normalized euclidean distances from each solution to another
        D = norm_eucl_dist(problem, X, X, fill_diag_with_inf=True)

        # set the neighborhood for each individual
        for k, individual in enumerate(pop):

            # the neighbors in the current population
            neighbors = pop[D[k].argsort()[:n_neighbors]]

            # get the neighbors of the current individual and merge
            N = individual.get("neighbors")
            if N is not None:
                rec = []
                h = set()
                for n in N:
                    for entry in n.get("neighbors"):
                        if entry not in h:
                            rec.append(entry)
                            h.add(entry)

                neighbors = Population.merge(neighbors, rec)

            # keep only the closest solutions to the individual
            _D = norm_eucl_dist(problem, individual.X[None, :],
                                neighbors.get("X"))[0]

            # find only the closest neighbors
            closest = _D.argsort()[:n_neighbors]

            individual.set("crowding", _D[closest].mean())
            individual.set("neighbors", neighbors[closest])

        best = F[:, 0].argmin()
        print(F[best], pop[best].get("crowding"))

        # plt.scatter(F[:, 0], pop.get("crowding"))
        # plt.show()

        pop.set("_F", pop.get("F"))
        pop.set("F", np.column_stack([F, -pop.get("crowding")]))
        pop = RankAndCrowdingSurvival().do(problem, pop, n_survive)
        pop.set("F", pop.get("_F"))

        return pop
Beispiel #19
0
    def _do(self, problem, pop, n_offsprings, parents=None, **kwargs):

        P = self.selection.do(pop, len(pop), self.crossover.n_parents)

        # do the first crossover which is the actual DE operation
        off = self.crossover.do(problem, pop, P, algorithm=self)

        # then do the mutation (which is actually a crossover between old and new individual)
        _pop = Population.merge(pop, off)
        _P = np.column_stack(
            [np.arange(len(pop)),
             np.arange(len(pop)) + len(pop)])
        off = self.mutation.do(problem, _pop, _P, algorithm=self)[:len(pop)]

        return off
Beispiel #20
0
    def _next(self):
        selection, crossover, mutation = self.mating.selection, self.mating.crossover, self.mating.mutation

        # retrieve the current population
        pop = self.pop

        # get the vectors from the population
        F, CV, feasible = pop.get("F", "CV", "feasible")
        F = parameter_less(F, CV)

        # create offsprings and add it to the data of the algorithm
        if self.var_selection == "rand":
            P = selection.do(pop, self.pop_size, crossover.n_parents)

        elif self.var_selection == "best":
            best = np.argmin(F[:, 0])
            P = selection.do(pop, self.pop_size, crossover.n_parents - 1)
            P = np.column_stack([np.full(len(pop), best), P])

        elif self.var_selection == "rand+best":
            best = np.argmin(F[:, 0])
            P = selection.do(pop, self.pop_size, crossover.n_parents)
            use_best = np.random.random(len(pop)) < 0.3
            P[use_best, 0] = best

        else:
            raise Exception("Unknown selection: %s" % self.var_selection)

        # do the first crossover which is the actual DE operation
        self.off = crossover.do(self.problem, pop, P, algorithm=self)

        # then do the mutation (which is actually )
        _pop = Population.merge(self.pop, self.off)
        _P = np.column_stack(
            [np.arange(len(pop)),
             np.arange(len(pop)) + len(pop)])
        self.off = mutation.do(self.problem, _pop, _P,
                               algorithm=self)[:len(self.pop)]

        # bounds back if something is out of bounds
        self.off = BounceBackOutOfBoundsRepair().do(self.problem, self.off)

        # evaluate the results
        self.evaluator.eval(self.problem, self.off, algorithm=self)

        # replace the individuals that have improved
        self.pop = ImprovementReplacement().do(self.problem, self.pop,
                                               self.off)
Beispiel #21
0
    def do(self, problem, pop, n_survive, return_indices=False, **kwargs):

        # make sure the population has at least one individual
        if len(pop) == 0:
            return pop

        # if the split should be done beforehand
        if self.filter_infeasible and problem.n_constr > 0:
            feasible, infeasible = split_by_feasibility(
                pop, sort_infeasbible_by_cv=True)

            # initialize the feasible and infeasible population
            feas_pop, infeas_pop = Population(), Population()

            # if there was no feasible solution was added at all - which means at least one infeasible
            if len(feasible) == 0:
                infeas_pop = self.cv_survival.do(problem, pop[infeasible],
                                                 n_survive)

            # if there are feasible solutions in the population
            else:

                # if feasible solution do exist
                if len(feasible) > 0:
                    feas_pop = self._do(problem, pop[feasible],
                                        min(len(feasible), n_survive),
                                        **kwargs)

                # calculate how many individuals are still remaining to be filled up with infeasible ones
                n_remaining = n_survive - len(feas_pop)

                # if infeasible solutions needs to be added
                if n_remaining > 0:
                    infeas_pop = self.cv_survival.do(problem, pop[infeasible],
                                                     n_remaining)

            survivors = Population.merge(feas_pop, infeas_pop)

        else:
            survivors = self._do(problem, pop, n_survive, **kwargs)

        if return_indices:
            H = {}
            for k, ind in enumerate(pop):
                H[ind] = k
            return [H[survivor] for survivor in survivors]
        else:
            return survivors
Beispiel #22
0
    def _next(self):
        # do the mating using the current population
        self.off = self._mating(self.pop)

        ################################################################
        # Add a local optimization here
        ################################################################

        # evaluate the offspring
        self.evaluator.eval(self.problem, self.off, algorithm=self)

        # merge the offsprings with the current population
        self.pop = Population.merge(self.pop, self.off)

        # the do survival selection
        self.pop = self.survival.do(self.problem,
                                    self.pop,
                                    self.pop_size,
                                    algorithm=self)
Beispiel #23
0
    def test_update(self):
        problem = C3DTLZ4(n_var=12, n_obj=3)
        ca_x = np.loadtxt(
            path_to_test_resources('ctaea', 'c3dtlz4', 'case2', 'preCA.x'))
        CA = Population.create(ca_x)
        self.evaluator.eval(problem, CA)

        da_x = np.loadtxt(
            path_to_test_resources('ctaea', 'c3dtlz4', 'case2', 'preDA.x'))
        DA = Population.create(da_x)
        self.evaluator.eval(problem, DA)

        off_x = np.loadtxt(
            path_to_test_resources('ctaea', 'c3dtlz4', 'case2', 'offspring.x'))
        off = Population.create(off_x)
        self.evaluator.eval(problem, off)

        post_ca_x = np.loadtxt(
            path_to_test_resources('ctaea', 'c3dtlz4', 'case2', 'postCA.x'))
        true_pCA = Population.create(post_ca_x)
        self.evaluator.eval(problem, true_pCA)

        post_da_x = np.loadtxt(
            path_to_test_resources('ctaea', 'c3dtlz4', 'case2', 'postDA.x'))
        true_pDA = Population.create(post_da_x)
        self.evaluator.eval(problem, true_pDA)

        survival = CADASurvival(self.ref_dirs)
        mixed = Population.merge(CA, off)
        survival.ideal_point = np.array([0., 0., 0.])

        pCA, pDA = survival.do(problem, mixed, DA, len(self.ref_dirs))

        pCA_X = set([tuple(x) for x in pCA.get("X")])
        tpCA_X = set([tuple(x) for x in true_pCA.get("X")])

        pDA_X = set([tuple(x) for x in pDA.get("X")])
        tpDA_X = set([tuple(x) for x in true_pDA.get("X")])

        assert pCA_X == tpCA_X
        assert pDA_X == tpDA_X
Beispiel #24
0
    def ask(self):

        # if the initial population has not been generated yet
        if self.get_population() is None:

            self.algorithm.initialize(self.problem)

            # deactivate the survival because no values have been set yet
            survival = self.algorithm.survival
            self.algorithm.survival = None

            self.problem._evaluate = types.MethodType(evaluate_to_nan, self.problem)
            self.algorithm._initialize()

            # activate the survival for the further runs
            self.algorithm.survival = survival

            return self.get_population().get("X")

        # usually the case - create the next output
        else:

            # if offsprings do not exist set the pop - otherwise always offsprings
            if self.get_offsprings() is not None:
                self.set_population(Population.merge(self.get_population(), self.get_offsprings()))

            # execute a survival of the algorithm
            survivors = self.algorithm.survival.do(self.problem, self.get_population(),
                                                   self.algorithm.pop_size, algorithm=self.algorithm)
            self.set_population(survivors)

            # execute the mating using the population
            off = self.algorithm.mating.do(self.algorithm.problem, self.get_population(),
                                           n_offsprings=self.algorithm.n_offsprings, algorithm=self.algorithm)

            # execute the fake evaluation of the individuals
            self.problem._evaluate = types.MethodType(evaluate_to_nan, self.problem)
            self.algorithm.evaluator.eval(self.problem, off, algorithm=self.algorithm)
            self.set_offsprings(off)

            return off.get("X")
Beispiel #25
0
    def do(self, problem, pop, n_offsprings, **kwargs):

        # the population object to be used
        off = pop.new()

        # infill counter - counts how often the mating needs to be done to fill up n_offsprings
        n_infills = 0

        # iterate until enough offsprings are created
        while len(off) < n_offsprings:

            # how many offsprings are remaining to be created
            n_remaining = n_offsprings - len(off)

            # do the mating
            _off = self._do(problem, pop, n_remaining, **kwargs)

            # repair the individuals if necessary - disabled if repair is NoRepair
            _off = self.repair.do(problem, _off, **kwargs)

            # eliminate the duplicates - disabled if it is NoRepair
            _off = self.eliminate_duplicates.do(_off, pop, off)

            # if more offsprings than necessary - truncate them randomly
            if len(off) + len(_off) > n_offsprings:
                # IMPORTANT: Interestingly, this makes a difference in performance
                n_remaining = n_offsprings - len(off)
                _off = _off[:n_remaining]

            # add to the offsprings and increase the mating counter
            off = Population.merge(off, _off)
            n_infills += 1

            # if no new offsprings can be generated within a pre-specified number of generations
            if n_infills > self.n_max_iterations:
                break

        return off
Beispiel #26
0
    def _do(self, problem, pop, n_survive, algorithm=None, **kwargs):

        if algorithm is None:
            raise Exception("Algorithm object needs to be passed to determine the current generation!")

        # get the generations of the population
        gen = pop.get("n_gen")

        # by default will with the current generation if unknown (should not happen)
        for k in range(len(gen)):
            if gen[k] is None:
                gen[k] = algorithm.n_gen

        # get the age of each individual
        age = algorithm.n_gen - gen

        # define what individuals are too old and which are young enough
        too_old = age > self.n_max_age
        young_enough = ~too_old

        # initialize the survivors
        survivors = Population()

        # do the survival with individuals being young enough
        if young_enough.sum() > 0:
            survivors = self.survival.do(problem, pop[young_enough], n_survive, algorithm=algorithm, **kwargs)

        n_remaining = n_survive - len(survivors)

        # if really necessary fill up with individuals which are actually too old
        if n_remaining > 0:
            fill_up = pop[too_old]
            fill_up = fill_up[fill_up.get("n_gen").argsort()]

            survivors = Population.merge(survivors, fill_up[:n_remaining])

        return survivors
Beispiel #27
0
 def fun(x, n):
     ind = Individual(X=np.copy(x))
     Evaluator().eval(problem, ind)
     global pop
     pop = Population.merge(pop, ind)
     return ind.F[0]
Beispiel #28
0
 def _set_optimum(self):
     val = self.pop
     if self.opt is not None:
         val = Population.merge(val, self.opt)
     self.opt = filter_optimum(val, least_infeasible=True)
Beispiel #29
0
    def _next(self):

        self.framework.train(x=self.archive["x"],
                             f=self.archive["f"],
                             g=self.archive["g"])
        out_pop = Population(0, individual=Individual())
        for fr in self.framework.best_frameworks:
            self.samoo_problem.framework = fr
            for lf_algorithm in self.lf_algorithm_list:

                if fr.type == 2:
                    if lf_algorithm in self.simultaneous_algorithm:

                        res = lf_minimize(problem=self.samoo_problem,
                                          method=lf_algorithm,
                                          method_args={
                                              'pop_size': self.pop_size_lf,
                                              'ref_dirs': self.ref_dirs
                                          },
                                          termination=('n_gen', self.n_gen_lf),
                                          pf=self.pf,
                                          save_history=False,
                                          disp=False)

                        if self.pop_size_per_algorithm < len(res.pop):
                            res.pop = framework_candidate_select(
                                fr.framework_id,
                                ref_dirs=self.ref_dirs,
                                pop=res.pop,
                                n_select=self.pop_size_per_algorithm)
                        out_pop = out_pop.merge(res.pop)

                elif fr.type == 1:
                    if lf_algorithm in self.generative_algorithm:
                        # if fr.framework_id in ['11', '21']:
                        #    fr.train(x=self.archive["x"], f=self.archive["f"], g=self.archive["g"])
                        for i in range(len(self.ref_dirs)):
                            self.cur_ref_no = i
                            fr.set_current_reference(self.cur_ref_no)

                            if fr.framework_id in ['31', '41', '5']:
                                fr.train(x=self.archive["x"],
                                         f=self.archive["f"],
                                         g=self.archive["g"])

                            res = lf_minimize(problem=self.samoo_problem,
                                              method=lf_algorithm,
                                              method_args={
                                                  'pop_size': self.pop_size_lf,
                                                  'ref_dirs': self.ref_dirs
                                              },
                                              termination=('n_gen',
                                                           self.n_gen_lf),
                                              pf=self.pf,
                                              save_history=False,
                                              disp=False)

                            if np.any(res.pop.get("CV") <= 0):
                                I = res.pop.get("CV") <= 0
                                res.pop = res.pop[I.flatten()]
                                ind = res.pop[np.argmin(res.pop.get("F"))]
                            else:
                                ind = res.pop[np.argmin(res.pop.get("CV"))]

                            # # out_pop.append(ind)
                            # if len(out_pop) == 0:
                            #     out_pop = Population(1, individual=ind)
                            # else:
                            out_pop = out_pop.merge(
                                Population(1, individual=ind))

        # if self.pop_size_per_epoch < len(out_pop):
        #     out_pop = self.candidate_select(ref_dirs=self.ref_dirs, pop=out_pop)

        return out_pop.get("X")
Beispiel #30
0
 def _update(self):
     D = self.D
     ind = Individual(X=np.copy(D["X"]), F=np.copy(D["F"]), G=np.copy(-D["G"]))
     ind.CV = Problem.calc_constraint_violation(ind.G[None, :])[0]
     ind.feasible = (ind.CV <= 0)
     self.pop = Population.merge(self.pop, Population.create(ind))