Esempio n. 1
0
def default_termination(problem):
    if problem.n_obj > 1:
        termination = MultiObjectiveDefaultTermination()
    else:
        termination = SingleObjectiveDefaultTermination()

    return termination
Esempio n. 2
0
class NelderAndMeadTermination2(Termination):
    def __init__(self, **kwargs):
        super().__init__()
        self.default = SingleObjectiveDefaultTermination(**kwargs)

    def _do_continue(self, algorithm):
        do_continue = self.default.do_continue(algorithm)

        # if the default says do not continue just follow that
        if not do_continue:
            return False

        # additionally check for degenerated simplex
        else:
            X = algorithm.pop.get("X")

            # degenerated simplex - get all edges and minimum and maximum length
            D = vectorized_cdist(X, X)
            val = D[np.triu_indices(len(X), 1)]
            min_e, max_e = val.min(), val.max()

            # either if the maximum length is very small or the ratio is degenerated
            is_degenerated = max_e < 1e-16 or min_e / max_e < 1e-16

            return not is_degenerated
Esempio n. 3
0
def minimize(problem, algorithm, termination=None, **kwargs):
    """

    Minimization of function of one or more variables, objectives and constraints.

    This is used as a convenience function to execute several algorithms with default settings which turned
    out to work for a test single. However, evolutionary computations utilizes the idea of customizing a
    meta-algorithm. Customizing the algorithm using the object oriented interface is recommended to improve the
    convergence.

    Parameters
    ----------

    problem : pymoo.problem
        A problem object which is defined using pymoo.

    algorithm : :class:`~pymoo.model.algorithm.Algorithm`
        The algorithm object that should be used for the optimization.

    termination : class or tuple
        The termination criterion that is used to stop the algorithm.


    Returns
    -------
    res : :class:`~pymoo.model.result.Result`
        The optimization result represented as an object.

    """

    # create a copy of the algorithm object to ensure no side-effects
    algorithm = copy.deepcopy(algorithm)

    # set the termination criterion and store it in the algorithm object
    if termination is None:
        termination = None
    elif not isinstance(termination, Termination):
        if isinstance(termination, str):
            termination = get_termination(termination)
        else:
            termination = get_termination(*termination)

    # initialize the method given a problem
    algorithm.initialize(problem, termination=termination, **kwargs)

    if algorithm.termination is None:
        if problem.n_obj > 1:
            algorithm.termination = MultiObjectiveDefaultTermination()
        else:
            algorithm.termination = SingleObjectiveDefaultTermination()

    # actually execute the algorithm
    res = algorithm.solve()

    # store the copied algorithm in the result object
    res.algorithm = algorithm

    return res
Esempio n. 4
0
    def __init__(self,
                 n_elites=200,
                 n_offsprings=700,
                 n_mutants=100,
                 bias=0.7,
                 sampling=FloatRandomSampling(),
                 survival=None,
                 display=SingleObjectiveDisplay(),
                 eliminate_duplicates=False,
                 **kwargs
                 ):
        """


        Parameters
        ----------

        n_elites : int
            Number of elite individuals

        n_offsprings : int
            Number of offsprings to be generated through mating of an elite and a non-elite individual

        n_mutants : int
            Number of mutations to be introduced each generation

        bias : float
            Bias of an offspring inheriting the allele of its elite parent

        eliminate_duplicates : bool or class
            The duplicate elimination is more important if a decoding is used. The duplicate check has to be
            performed on the decoded variable and not on the real values. Therefore, we recommend passing
            a DuplicateElimination object.
            If eliminate_duplicates is simply set to `True`, then duplicates are filtered out whenever the
            objective values are equal.

        """

        if survival is None:
            survival = EliteSurvival(n_elites, eliminate_duplicates=eliminate_duplicates)

        super().__init__(pop_size=n_elites + n_offsprings + n_mutants,
                         n_offsprings=n_offsprings,
                         sampling=sampling,
                         selection=EliteBiasedSelection(),
                         crossover=BinomialCrossover(bias, prob=1.0),
                         mutation=NoMutation(),
                         survival=survival,
                         display=display,
                         eliminate_duplicates=True,
                         advance_after_initial_infill=True,
                         **kwargs)

        self.n_elites = n_elites
        self.n_mutants = n_mutants
        self.bias = bias
        self.default_termination = SingleObjectiveDefaultTermination()
Esempio n. 5
0
    def __init__(self,
                 pop_size=100,
                 sampling=LatinHypercubeSampling(),
                 variant="DE/rand/1/bin",
                 CR=0.5,
                 F=0.3,
                 dither="vector",
                 jitter=False,
                 display=SingleObjectiveDisplay(),
                 **kwargs):
        """

        Parameters
        ----------

        pop_size : {pop_size}

        sampling : {sampling}

        variant : {{DE/(rand|best)/1/(bin/exp)}}
         The different variants of DE to be used. DE/x/y/z where x how to select individuals to be pertubed,
         y the number of difference vector to be used and z the crossover type. One of the most common variant
         is DE/rand/1/bin.

        F : float
         The weight to be used during the crossover.

        CR : float
         The probability the individual exchanges variable values from the donor vector.

        dither : {{'no', 'scalar', 'vector'}}
         One strategy to introduce adaptive weights (F) during one run. The option allows
         the same dither to be used in one iteration ('scalar') or a different one for
         each individual ('vector).

        jitter : bool
         Another strategy for adaptive weights (F). Here, only a very small value is added or
         subtracted to the weight used for the crossover for each individual.

        """

        mating = DifferentialEvolutionMating(variant=variant,
                                             CR=CR,
                                             F=F,
                                             dither=dither,
                                             jitter=jitter)

        super().__init__(pop_size=pop_size,
                         sampling=sampling,
                         mating=mating,
                         survival=None,
                         display=display,
                         **kwargs)

        self.default_termination = SingleObjectiveDefaultTermination()
Esempio n. 6
0
    def __init__(self,
                 display=CSDisplay(),
                 sampling=FloatRandomSampling(),
                 survival=FitnessSurvival(),
                 eliminate_duplicates=DefaultDuplicateElimination(),
                 termination=SingleObjectiveDefaultTermination(),
                 pop_size=100,
                 beta=1.5,
                 alfa=0.01,
                 pa=0.35,
                 **kwargs):
        """

        Parameters
        ----------
        display : {display}
        sampling : {sampling}
        survival : {survival}
        eliminate_duplicates: This does not exists in the original paper/book.
            Without this the solutions might get too biased to current global best solution,
            because the global random walk use the global best solution as the reference.

        termination : {termination}

        pop_size : The number of nests (solutions)

        beta : The input parameter of the Mantegna's Algorithm to simulate
            sampling on Levy Distribution

        alfa : alfa is the step size scaling factor and is usually
            0.01, so that the step size will be scaled down to O(L/100) with L is
            the scale (range of bounds) of the problem.

        pa   : The switch probability, pa fraction of the nests will be
            abandoned on every iteration
        """

        super().__init__(**kwargs)

        self.initialization = Initialization(sampling)
        self.survival = survival
        self.display = display
        self.pop_size = pop_size
        self.default_termination = termination
        self.eliminate_duplicates = eliminate_duplicates

        #the scale will be multiplied by problem scale after problem given in setup
        self.alfa = alfa
        self.scale = alfa
        self.pa = pa
        self.beta = beta
        a = math.gamma(1. + beta) * math.sin(math.pi * beta / 2.)
        b = beta * math.gamma((1. + beta) / 2.) * 2**((beta - 1.) / 2)
        self.sig = (a / b)**(1. / (2 * beta))
Esempio n. 7
0
    def __init__(self,
                 pop_size=200,
                 n_parallel=10,
                 sampling=LatinHypercubeSampling(),
                 display=SingleObjectiveDisplay(),
                 repair=None,
                 individual=Individual(),
                 **kwargs):
        """

        Parameters
        ----------
        pop_size : {pop_size}
        sampling : {sampling}
        selection : {selection}
        crossover : {crossover}
        mutation : {mutation}
        eliminate_duplicates : {eliminate_duplicates}
        n_offsprings : {n_offsprings}

        """

        super().__init__(display=display, **kwargs)

        self.initialization = Initialization(sampling,
                                             individual=individual,
                                             repair=repair)

        self.pop_size = pop_size
        self.n_parallel = n_parallel
        self.each_pop_size = pop_size // n_parallel

        self.solvers = None
        self.niches = []

        def cmaes(problem, x):
            solver = CMAES(x0=x, tolfun=1e-11, tolx=1e-3, restarts=0)
            solver.initialize(problem)
            solver.next()
            return solver

        def nelder_mead(problem, x):
            solver = NelderMead(X=x)
            solver.initialize(problem)
            solver._initialize()
            solver.n_gen = 1
            solver.next()
            return solver

        self.func_create_solver = nelder_mead

        self.default_termination = SingleObjectiveDefaultTermination()
Esempio n. 8
0
class PatternSearchTermination(Termination):
    def __init__(self, eps=1e-5, **kwargs):
        super().__init__()
        self.default = SingleObjectiveDefaultTermination(**kwargs)
        self.eps = eps

    def do_continue(self, algorithm):
        decision_default = self.default.do_continue(algorithm)
        delta = np.max(np.abs(algorithm.explr_delta))
        if delta < self.eps:
            return decision_default
        else:
            return True
Esempio n. 9
0
    def __init__(self,
                 n_offsprings=200,
                 pop_size=None,
                 rule=1.0 / 7.0,
                 phi=1.0,
                 gamma=0.85,
                 sampling=FloatRandomSampling(),
                 survival=FitnessSurvival(),
                 display=SingleObjectiveDisplay(),
                 **kwargs):
        """
        Evolutionary Strategy (ES)

        Parameters
        ----------
        n_offsprings : int
            The number of individuals created in each iteration.
        pop_size : int
            The number of individuals which are surviving from the offspring population (non-elitist)
        rule : float
            The rule (ratio) of individuals surviving. This automatically either calculated `n_offsprings` or `pop_size`.
        phi : float
            Expected rate of convergence (usually 1.0).
        gamma : float
            If not `None`, some individuals are created using the differentials with this as a length scale.
        sampling : object
            The sampling method for creating the initial population.
        """

        if pop_size is None and n_offsprings is not None:
            pop_size = int(np.math.ceil(n_offsprings * rule))
        elif n_offsprings is None and pop_size is not None:
            n_offsprings = int(np.math.fllor(n_offsprings / rule))

        assert pop_size is not None and n_offsprings is not None, "You have to at least provivde pop_size of n_offsprings."
        assert n_offsprings >= 2 * pop_size, "The number of offsprings should be at least double the population size."

        super().__init__(pop_size=pop_size,
                         n_offsprings=n_offsprings,
                         sampling=sampling,
                         survival=survival,
                         display=display,
                         advance_after_initial_infill=True,
                         **kwargs)

        self.default_termination = SingleObjectiveDefaultTermination()
        self.phi = phi
        self.gamma = gamma

        self.tau, self.taup, self.sigma_max = None, None, None
Esempio n. 10
0
    def __init__(self,
                 n_elites=20,
                 n_offsprings=70,
                 n_mutants=10,
                 bias=0.7,
                 sampling=FloatRandomSampling(),
                 survival=None,
                 display=SingleObjectiveDisplay(),
                 eliminate_duplicates=False,
                 **kwargs):
        """


        Parameters
        ----------

        n_elites : int
            Population size

        n_offsprings : int
            Fraction of elite items into each population

        n_mutants : int
            Fraction of mutants introduced at each generation into the population

        bias : float
            Probability that an offspring inherits the allele of its elite parent

        """

        if survival is None:
            survival = EliteSurvival(n_elites,
                                     eliminate_duplicates=eliminate_duplicates)

        super().__init__(pop_size=n_elites + n_offsprings + n_mutants,
                         n_offsprings=n_offsprings,
                         sampling=sampling,
                         selection=EliteBiasedSelection(),
                         crossover=BiasedCrossover(bias, prob=1.0),
                         mutation=NoMutation(),
                         survival=survival,
                         display=display,
                         eliminate_duplicates=True,
                         **kwargs)

        self.n_elites = n_elites
        self.n_mutants = n_mutants
        self.bias = bias
        self.default_termination = SingleObjectiveDefaultTermination()
Esempio n. 11
0
    def __init__(self,
                 X,
                 dX=None,
                 objective=0,
                 display=SingleObjectiveDisplay(),
                 **kwargs) -> None:
        super().__init__(display=display, **kwargs)

        self.objective = objective
        self.n_restarts = 0
        self.default_termination = SingleObjectiveDefaultTermination()

        self.X, self.dX = X, dX
        self.F, self.CV = None, None

        if self.X.ndim == 1:
            self.X = np.atleast_2d(X)
Esempio n. 12
0
    def __init__(self,
                 pop_size=100,
                 norm_niche_size=0.05,
                 norm_by_dim=False,
                 return_all_opt=False,
                 display=NicheDisplay(),
                 **kwargs):
        """

        Parameters
        ----------
        norm_niche_size : float
            The radius in which the clearing shall be performed. The clearing is performed in the normalized design
            space, e.g. 0.05 corresponds to clear all solutions which have less norm euclidean distance than 5%.
        pop_size : {pop_size}
        sampling : {sampling}
        selection : {selection}
        crossover : {crossover}
        mutation : {mutation}
        eliminate_duplicates : {eliminate_duplicates}
        n_offsprings : {n_offsprings}

        """

        surv = kwargs.get("survival")
        if surv is None:
            surv = EpsilonClearingSurvival(norm_niche_size,
                                           n_max_each_iter=None,
                                           norm_by_dim=norm_by_dim)

        selection = kwargs.get("selection")
        if selection is None:
            selection = TournamentSelection(comp_by_cv_and_clearing_fitness)

        super().__init__(pop_size=pop_size,
                         selection=selection,
                         survival=surv,
                         display=display,
                         **kwargs)

        # self.default_termination = NicheTermination()
        self.default_termination = SingleObjectiveDefaultTermination()

        # whether with rank one after clearing or just the best should be considered as optimal
        self.return_all_opt = return_all_opt
Esempio n. 13
0
    def __init__(self,
                 pop_size=25,
                 n_offsprings=None,
                 sampling=LHS(),
                 termination=SingleObjectiveDefaultTermination(),
                 display=SingleObjectiveDisplay(),
                 beta=1.5,
                 alpha=0.01,
                 pa=0.1,
                 **kwargs):
        """

        Parameters
        ----------

        sampling : {sampling}

        termination : {termination}

        pop_size : int
         The number of nests to be used

        beta : float
            The input parameter of the Mantegna's Algorithm to simulate
            sampling on Levy Distribution

        alpha : float
            The step size scaling factor and is usually 0.01.

        pa : float
            The switch probability, pa fraction of the nests will be abandoned on every iteration
        """
        mating = kwargs.get("mating")
        if mating is None:
            mating = LevyFlights(alpha, beta)

        super().__init__(pop_size=pop_size,
                         n_offsprings=n_offsprings,
                         sampling=sampling,
                         mating=mating,
                         termination=termination,
                         display=display,
                         **kwargs)

        self.pa = pa
Esempio n. 14
0
    def __init__(self,
                 pop_size=100,
                 sampling=FloatRandomSampling(),
                 selection=RandomSelection(),
                 crossover=SimulatedBinaryCrossover(prob=0.9, eta=3),
                 mutation=PolynomialMutation(prob=None, eta=5),
                 eliminate_duplicates=True,
                 n_offsprings=None,
                 display=SingleObjectiveDisplay(),
                 **kwargs):
        """

        Parameters
        ----------
        pop_size : {pop_size}
        sampling : {sampling}
        selection : {selection}
        crossover : {crossover}
        mutation : {mutation}
        eliminate_duplicates : {eliminate_duplicates}
        n_offsprings : {n_offsprings}

        """

        super().__init__(pop_size=pop_size,
                         sampling=sampling,
                         selection=selection,
                         crossover=crossover,
                         mutation=mutation,
                         survival=NichingSurvival(),
                         eliminate_duplicates=eliminate_duplicates,
                         n_offsprings=n_offsprings,
                         display=display,
                         **kwargs)

        # self.mating = NeighborBiasedMating(selection,
        #                                    crossover,
        #                                    mutation,
        #                                    repair=self.mating.repair,
        #                                    eliminate_duplicates=self.mating.eliminate_duplicates,
        #                                    n_max_iterations=self.mating.n_max_iterations)

        self.default_termination = SingleObjectiveDefaultTermination()
Esempio n. 15
0
    def __init__(
            self,
            pop_size=100,
            sampling=FloatRandomSampling(),
            selection=TournamentSelection(func_comp=comp_by_cv_and_fitness),
            crossover=SimulatedBinaryCrossover(prob=0.9, eta=3),
            mutation=PolynomialMutation(prob=None, eta=5),
            survival=FitnessSurvival(),
            eliminate_duplicates=True,
            n_offsprings=None,
            display=SingleObjectiveDisplay(),
            **kwargs):
        """

        Parameters
        ----------
        pop_size : {pop_size}
        sampling : {sampling}
        selection : {selection}
        crossover : {crossover}
        mutation : {mutation}
        eliminate_duplicates : {eliminate_duplicates}
        n_offsprings : {n_offsprings}

        """

        super().__init__(pop_size=pop_size,
                         sampling=sampling,
                         selection=selection,
                         crossover=crossover,
                         mutation=mutation,
                         survival=survival,
                         eliminate_duplicates=eliminate_duplicates,
                         n_offsprings=n_offsprings,
                         display=display,
                         **kwargs)

        self.default_termination = SingleObjectiveDefaultTermination()
Esempio n. 16
0
    def __init__(self,
                 pop_size=20,
                 w=0.9,
                 c1=2.0,
                 c2=2.0,
                 sampling=LatinHypercubeSampling(),
                 adaptive=True,
                 pertube_best=True,
                 display=PSODisplay(),
                 repair=None,
                 individual=Individual(),
                 **kwargs):
        """

        Parameters
        ----------
        pop_size : {pop_size}
        sampling : {sampling}

        """

        super().__init__(display=display, **kwargs)

        self.initialization = Initialization(sampling,
                                             individual=individual,
                                             repair=repair)

        self.pop_size = pop_size
        self.adaptive = adaptive
        self.pertube_best = pertube_best
        self.default_termination = SingleObjectiveDefaultTermination()
        self.V_max = None

        self.w = w
        self.c1 = c1
        self.c2 = c2
Esempio n. 17
0
    def __init__(self,
                 pop_size=100,
                 sampling=FloatRandomSampling(),
                 selection=DES("rand"),
                 crossover=DEX(CR=0.7),
                 mutation=PM(eta=20),
                 survival=NichingSurvival(0.1, 0.01, 5, 4),
                 eliminate_duplicates=True,
                 n_offsprings=None,
                 display=SingleObjectiveDisplay(),
                 **kwargs):

        super().__init__(pop_size=pop_size,
                         sampling=sampling,
                         selection=selection,
                         crossover=crossover,
                         mutation=mutation,
                         survival=survival,
                         eliminate_duplicates=eliminate_duplicates,
                         n_offsprings=n_offsprings,
                         display=display,
                         **kwargs)

        self.default_termination = SingleObjectiveDefaultTermination()
Esempio n. 18
0
def minimize(problem, algorithm, termination=None, **kwargs):
    """

    Minimization of function of one or more variables, objectives and constraints.

    This is used as a convenience function to execute several algorithms with default settings which turned
    out to work for a test single. However, evolutionary computations utilizes the idea of customizing a
    meta-algorithm. Customizing the algorithm using the object oriented interface is recommended to improve the
    convergence.

    Parameters
    ----------

    problem : :class:`~pymoo.model.problem.Problem`
        A problem object which is defined using pymoo.

    algorithm : :class:`~pymoo.model.algorithm.Algorithm`
        The algorithm object that should be used for the optimization.

    termination : :class:`~pymoo.model.termination.Termination` or tuple
        The termination criterion that is used to stop the algorithm.

    seed : integer
        The random seed to be used.

    verbose : bool
        Whether output should be printed or not.

    display : :class:`~pymoo.util.display.Display`
        Each algorithm has a default display object for printouts. However, it can be overwritten if desired.

    callback : :class:`~pymoo.model.callback.Callback`
        A callback object which is called each iteration of the algorithm.

    save_history : bool
        Whether the history should be stored or not.

    Returns
    -------
    res : :class:`~pymoo.model.result.Result`
        The optimization result represented as an object.

    """

    # create a copy of the algorithm object to ensure no side-effects
    algorithm = copy.deepcopy(algorithm)

    # get the termination if provided as a tuple - create an object
    if termination is not None and not isinstance(termination, Termination):
        if isinstance(termination, str):
            termination = get_termination(termination)
        else:
            termination = get_termination(*termination)

    # initialize the algorithm object given a problem
    algorithm.initialize(problem, termination=termination, **kwargs)

    # if no termination could be found add the default termination either for single or multi objective
    if algorithm.termination is None:
        if problem.n_obj > 1:
            algorithm.termination = MultiObjectiveDefaultTermination()
        else:
            algorithm.termination = SingleObjectiveDefaultTermination()

    # actually execute the algorithm
    res = algorithm.solve()

    # store the deep copied algorithm in the result object
    res.algorithm = algorithm

    return res
Esempio n. 19
0
 def __init__(self, delta=0.05, **kwargs):
     super().__init__(**kwargs)
     self.default_termination = SingleObjectiveDefaultTermination()
     self.alpha = delta
     self.point = None
Esempio n. 20
0
            pop[k].set("X", _x)

        return pop


algorithm = GA(
    pop_size=20,
    sampling=RandomPermutationSampling(),
    mutation=InversionMutation(),
    # crossover=EdgeRecombinationCrossover(),
    crossover=OrderCrossover(),
    repair=StartFromZeroRepair(),
    eliminate_duplicates=True)

# if the algorithm did not improve the last 200 generations then it will terminate (and disable the max generations)
termination = SingleObjectiveDefaultTermination(n_last=200, n_max_gen=np.inf)

res = minimize(problem, algorithm, termination, seed=1, verbose=False)

print(res.F)
print(res.algorithm.evaluator.n_eval)

visualize(problem, res.X)


class PathVisualization(Callback):
    def __init__(self):
        super().__init__()
        self.vid = Video(File("tsp.mp4"))

    def notify(self, algorithm):
Esempio n. 21
0
    def __init__(self,
                 pop_size=100,
                 n_offsprings=None,
                 sampling=LHS(),
                 variant="DE/best/1/bin",
                 CR=0.5,
                 F=None,
                 dither="vector",
                 jitter=False,
                 mutation=NoMutation(),
                 survival=None,
                 display=SingleObjectiveDisplay(),
                 **kwargs):
        """

        Parameters
        ----------

        pop_size : {pop_size}

        sampling : {sampling}

        variant : {{DE/(rand|best)/1/(bin/exp)}}
         The different variants of DE to be used. DE/x/y/z where x how to select individuals to be pertubed,
         y the number of difference vector to be used and z the crossover type. One of the most common variant
         is DE/rand/1/bin.

        F : float
         The F to be used during the crossover.

        CR : float
         The probability the individual exchanges variable values from the donor vector.

        dither : {{'no', 'scalar', 'vector'}}
         One strategy to introduce adaptive weights (F) during one run. The option allows
         the same dither to be used in one iteration ('scalar') or a different one for
         each individual ('vector).

        jitter : bool
         Another strategy for adaptive weights (F). Here, only a very small value is added or
         subtracted to the F used for the crossover for each individual.

        """

        # parse the information from the string
        _, sel, n_diff, mut, = variant.split("/")
        n_diffs = int(n_diff)
        if "-to-" in variant:
            n_diffs += 1

        selection = DES(sel)

        crossover = DEX(prob=1.0,
                        n_diffs=n_diffs,
                        F=F,
                        CR=CR,
                        variant=mut,
                        dither=dither,
                        jitter=jitter)

        super().__init__(pop_size=pop_size,
                         n_offsprings=n_offsprings,
                         sampling=sampling,
                         selection=selection,
                         crossover=crossover,
                         mutation=mutation,
                         survival=survival,
                         display=display,
                         **kwargs)

        self.default_termination = SingleObjectiveDefaultTermination()
Esempio n. 22
0
from pymoo.algorithms.so_genetic_algorithm import GA
from pymoo.operators.crossover.order_crossover import OrderCrossover
from pymoo.operators.mutation.inversion_mutation import InversionMutation
from pymoo.operators.sampling.random_permutation_sampling import RandomPermutationSampling
from pymoo.optimize import minimize
from pymoo.problems.single.flowshop_scheduling import visualize, create_random_flowshop_problem
from pymoo.util.termination.default import SingleObjectiveDefaultTermination

problem = create_random_flowshop_problem(n_machines=5, n_jobs=10, seed=1)

algorithm = GA(pop_size=20,
               eliminate_duplicates=True,
               sampling=RandomPermutationSampling(),
               mutation=InversionMutation(),
               crossover=OrderCrossover())

# if the algorithm did not improve the last 200 generations then it will terminate
termination = SingleObjectiveDefaultTermination(n_last=50, n_max_gen=10000)

res = minimize(problem, algorithm, termination, seed=1, verbose=True)

visualize(problem, res.X)
Esempio n. 23
0
    def __init__(self,
                 pop_size=25,
                 sampling=LatinHypercubeSampling(),
                 w=0.9,
                 c1=2.0,
                 c2=2.0,
                 adaptive=True,
                 initial_velocity="random",
                 max_velocity_rate=0.20,
                 pertube_best=True,
                 display=PSODisplay(),
                 **kwargs):
        """

        Parameters
        ----------
        pop_size : The size of the swarm being used.

        sampling : {sampling}

        adaptive : bool
            Whether w, c1, and c2 are changed dynamically over time. The update uses the spread from the global
            optimum to determine suitable values.

        w : float
            The inertia weight to be used in each iteration for the velocity update. This can be interpreted
            as the momentum term regarding the velocity. If `adaptive=True` this is only the
            initially used value.

        c1 : float
            The cognitive impact (personal best) during the velocity update. If `adaptive=True` this is only the
            initially used value.
        c2 : float
            The social impact (global best) during the velocity update. If `adaptive=True` this is only the
            initially used value.

        initial_velocity : str - ('random', or 'zero')
            How the initial velocity of each particle should be assigned. Either 'random' which creates a
            random velocity vector or 'zero' which makes the particles start to find the direction through the
            velocity update equation.


        max_velocity_rate : float
            The maximum velocity rate. It is determined variable (and not vector) wise. We consider the rate here
            since the value is normalized regarding the `xl` and `xu` defined in the problem.

        pertube_best : bool
            Some studies have proposed to mutate the global best because it has been found to converge better.
            Which means the population size is reduced by one particle and one function evaluation is spend
            additionally to permute the best found solution so far.

        """

        super().__init__(display=display, **kwargs)

        self.initialization = Initialization(sampling)

        self.pop_size = pop_size
        self.adaptive = adaptive
        self.pertube_best = pertube_best
        self.default_termination = SingleObjectiveDefaultTermination()
        self.V_max = None
        self.initial_velocity = initial_velocity
        self.max_velocity_rate = max_velocity_rate

        self.w = w
        self.c1 = c1
        self.c2 = c2
Esempio n. 24
0
 def __init__(self, eps=1e-5, **kwargs):
     super().__init__()
     self.default = SingleObjectiveDefaultTermination(**kwargs)
     self.eps = eps
Esempio n. 25
0
from pymoo.algorithms.so_genetic_algorithm import GA
from pymoo.factory import get_problem
from pymoo.optimize import minimize
from pymoo.util.termination.default import SingleObjectiveDefaultTermination

problem = get_problem("g05")
algorithm = GA(pop_size=100)
termination = SingleObjectiveDefaultTermination()

res = minimize(problem,
               algorithm,
               termination,
               return_least_infeasible=True,
               pf=None,
               seed=1,
               verbose=True)

print("n_gen: ", res.algorithm.n_gen)
print("CV: ", res.CV[0])
print("F: ", res.F[0])

Esempio n. 26
0
from pymoo.algorithms.so_genetic_algorithm import GA
from pymoo.factory import get_problem, get_termination
from pymoo.optimize import minimize
from pymoo.util.termination.default import SingleObjectiveDefaultTermination
from pymoo.visualization.scatter import Scatter

problem = get_problem("rastrigin")

termination = SingleObjectiveDefaultTermination(x_tol=1e-100,
                                                cv_tol=1e-6,
                                                f_tol=1e-6,
                                                nth_gen=5,
                                                n_last=20,
                                                n_max_gen=5)

algorithm = GA(pop_size=100, eliminate_duplicates=True)

res = minimize(problem, algorithm, termination, seed=1, verbose=True)

plot = Scatter()
plot.add(problem.pareto_front(), plot_type="line", color="black", alpha=0.7)
plot.add(res.F, color="red")
plot.show()