コード例 #1
0
    def _next(self, pop):

        # get the vectors from the population
        F, CV, feasible = pop.get("F", "CV", "feasible")
        F = parameter_less(F, CV)

        # create offsprings and add it to the data of the algorithm
        if self.var_selection == "rand":
            P = self.selection.do(pop, self.pop_size, self.crossover.n_parents)

        elif self.var_selection == "best":
            best = np.argmin(F[:, 0])
            P = self.selection.do(pop, self.pop_size,
                                  self.crossover.n_parents - 1)
            P = np.column_stack([np.full(len(pop), best), P])

        elif self.var_selection == "rand+best":
            best = np.argmin(F[:, 0])
            P = self.selection.do(pop, self.pop_size, self.crossover.n_parents)
            use_best = random.random(len(pop)) < 0.3
            P[use_best, 0] = best

        else:
            raise Exception("Unknown selection: %s" % self.var_selection)

        self.off = self.crossover.do(self.problem, pop, P)

        # do the mutation by using the offsprings
        self.off = self.mutation.do(self.problem, self.off, algorithm=self)

        # bring back to bounds if violated through crossover - bounce back strategy
        X = self.off.get("X")
        xl = np.repeat(self.problem.xl[None, :], X.shape[0], axis=0)
        xu = np.repeat(self.problem.xu[None, :], X.shape[0], axis=0)

        # otherwise bounds back into the feasible space
        X[X < xl] = (xl + (xl - X))[X < xl]
        X[X > xu] = (xu - (X - xu))[X > xu]
        self.off.set("X", X)

        # evaluate the results
        self.evaluator.eval(self.problem, self.off, algorithm=self)

        _F, _CV, _feasible = self.off.get("F", "CV", "feasible")
        _F = parameter_less(_F, _CV)

        # find the individuals which are indeed better
        is_better = np.where((_F <= F)[:, 0])[0]

        # truncate the replacements if desired
        if self.n_replace is not None and self.n_replace < len(is_better):
            is_better = is_better[random.perm(len(is_better))[:self.n_replace]]

        # replace the individuals in the population
        pop[is_better] = self.off[is_better]

        return pop
コード例 #2
0
    def _next(self):

        # retrieve the current population
        pop = self.pop

        # get the vectors from the population
        F, CV, feasible = pop.get("F", "CV", "feasible")
        F = parameter_less(F, CV)

        # create offsprings and add it to the data of the algorithm
        if self.var_selection == "rand":
            P = self.selection.do(pop, self.pop_size, self.crossover.n_parents)

        elif self.var_selection == "best":
            best = np.argmin(F[:, 0])
            P = self.selection.do(pop, self.pop_size,
                                  self.crossover.n_parents - 1)
            P = np.column_stack([np.full(len(pop), best), P])

        elif self.var_selection == "rand+best":
            best = np.argmin(F[:, 0])
            P = self.selection.do(pop, self.pop_size, self.crossover.n_parents)
            use_best = np.random.random(len(pop)) < 0.3
            P[use_best, 0] = best

        else:
            raise Exception("Unknown selection: %s" % self.var_selection)

        # do the first crossover which is the actual DE operation
        self.off = self.crossover.do(self.problem, pop, P, algorithm=self)

        # then do the mutation (which is actually )
        _pop = self.off.new().merge(self.pop).merge(self.off)
        _P = np.column_stack(
            [np.arange(len(pop)),
             np.arange(len(pop)) + len(pop)])
        self.off = self.mutation.do(self.problem, _pop, _P,
                                    algorithm=self)[:len(self.pop)]

        # bounds back if something is out of bounds
        self.off = BoundsBackRepair().do(self.problem, self.off)

        # evaluate the results
        self.evaluator.eval(self.problem, self.off, algorithm=self)

        _F, _CV, _feasible = self.off.get("F", "CV", "feasible")
        _F = parameter_less(_F, _CV)

        # find the individuals which are indeed better
        is_better = np.where((_F <= F)[:, 0])[0]

        # replace the individuals in the population
        pop[is_better] = self.off[is_better]

        # store the population in the algorithm object
        self.pop = pop
コード例 #3
0
    def _do(self, pop, n_select, n_parents, **kwargs):
        variant = self.variant

        # create offsprings and add it to the data of the algorithm
        P = RandomSelection().do(pop, n_select, n_parents)

        F, CV = pop.get("F", "CV")
        fitness = parameter_less(F, CV)[:, 0]
        sorted_by_fitness = fitness.argsort()
        best = sorted_by_fitness[0]

        if variant == "best":
            P[:, 0] = best
        elif variant == "current-to-best":
            P[:, 0] = np.arange(len(pop))
            P[:, 1] = best
            P[:, 2] = np.arange(len(pop))
        elif variant == "current-to-rand":
            P[:, 0] = np.arange(len(pop))
            P[:, 2] = np.arange(len(pop))
        elif variant == "rand-to-best":
            P[:, 1] = best
            P[:, 2] = np.arange(len(pop))
        elif variant == "current-to-pbest":
            n_pbest = int(np.ceil(0.1 * len(pop)))
            pbest = sorted_by_fitness[:n_pbest]

            P[:, 0] = np.arange(len(pop))
            P[:, 1] = np.random.choice(pbest, len(pop))
            P[:, 2] = np.arange(len(pop))

        return P
コード例 #4
0
ファイル: so_de.py プロジェクト: simplecellzg/pymoo
    def _step(self):
        selection, crossover, mutation = self.mating.selection, self.mating.crossover, self.mating.mutation

        # retrieve the current population
        pop = self.pop

        # get the vectors from the population
        F, CV, feasible = pop.get("F", "CV", "feasible")
        F = parameter_less(F, CV)

        # create offsprings and add it to the data of the algorithm
        P = selection.do(pop, self.pop_size, crossover.n_parents)

        if self.var_selection == "best":
            P[:, 0] = np.argmin(F[:, 0])
        elif self.var_selection == "rand+best":
            P[np.random.random(len(pop)) < 0.3, 0] = np.argmin(F[:, 0])

        # do the first crossover which is the actual DE operation
        off = crossover.do(self.problem, pop, P, algorithm=self)

        # then do the mutation (which is actually a crossover between old and new individual)
        _pop = Population.merge(self.pop, off)
        _P = np.column_stack(
            [np.arange(len(pop)),
             np.arange(len(pop)) + len(pop)])
        off = mutation.do(self.problem, _pop, _P,
                          algorithm=self)[:len(self.pop)]

        # bounds back if something is out of bounds
        off = BounceBackOutOfBoundsRepair().do(self.problem, off)

        return off
コード例 #5
0
    def _next(self):
        selection, crossover, mutation = self.mating.selection, self.mating.crossover, self.mating.mutation

        # retrieve the current population
        pop = self.pop

        # get the vectors from the population
        F, CV, feasible = pop.get("F", "CV", "feasible")
        F = parameter_less(F, CV)

        # create offsprings and add it to the data of the algorithm
        if self.var_selection == "rand":
            P = selection.do(pop, self.pop_size, crossover.n_parents)

        elif self.var_selection == "best":
            best = np.argmin(F[:, 0])
            P = selection.do(pop, self.pop_size, crossover.n_parents - 1)
            P = np.column_stack([np.full(len(pop), best), P])

        elif self.var_selection == "rand+best":
            best = np.argmin(F[:, 0])
            P = selection.do(pop, self.pop_size, crossover.n_parents)
            use_best = np.random.random(len(pop)) < 0.3
            P[use_best, 0] = best

        else:
            raise Exception("Unknown selection: %s" % self.var_selection)

        # do the first crossover which is the actual DE operation
        self.off = crossover.do(self.problem, pop, P, algorithm=self)

        # then do the mutation (which is actually )
        _pop = Population.merge(self.pop, self.off)
        _P = np.column_stack(
            [np.arange(len(pop)),
             np.arange(len(pop)) + len(pop)])
        self.off = mutation.do(self.problem, _pop, _P,
                               algorithm=self)[:len(self.pop)]

        # bounds back if something is out of bounds
        self.off = BounceBackOutOfBoundsRepair().do(self.problem, self.off)

        # evaluate the results
        self.evaluator.eval(self.problem, self.off, algorithm=self)

        # replace the individuals that have improved
        self.pop = ImprovementReplacement().do(self.problem, self.pop,
                                               self.off)