Exemple #1
0
 def setUp(self):
     self.D, self.nFES, self.nGEN = 10, 10, 10
     self.t = StoppingTask(D=self.D,
                           nFES=self.nFES,
                           nGEN=self.nGEN,
                           refValue=1,
                           benchmark=MyBenchmark())
 def test_BA_iters_to_fes(self):
     task = StoppingTask(D=10,
                         nGEN=1000,
                         optType=OptimizationType.MINIMIZATION,
                         benchmark=Sphere())
     algo = BatAlgorithm(NP=10)
     algo.runTask(task)
     evals = task.evals()
     self.assertEqual(10000, evals)
 def test_DE_iters_fine(self):
     task = StoppingTask(D=10,
                         nGEN=1000,
                         optType=OptimizationType.MINIMIZATION,
                         benchmark=Sphere())
     algo = DifferentialEvolution(NP=40, CR=0.9, F=0.5)
     algo.runTask(task)
     iters = task.iters()
     self.assertEqual(1000, iters)
 def test_FA_evals_fine(self):
     task = StoppingTask(D=10,
                         nFES=1000,
                         optType=OptimizationType.MINIMIZATION,
                         benchmark=Sphere())
     algo = FireflyAlgorithm(NP=25)
     algo.runTask(task)
     evals = task.evals()
     self.assertEqual(1000, evals)
 def test_FA_iters_fine(self):
     task = StoppingTask(D=10,
                         nGEN=1000,
                         optType=OptimizationType.MINIMIZATION,
                         benchmark=Sphere())
     algo = FireflyAlgorithm(NP=25)
     algo.runTask(task)
     iters = task.iters()
     self.assertEqual(1000, iters)
Exemple #6
0
    def run(self,
            fitness_name,
            pipeline_population_size,
            inner_population_size,
            number_of_pipeline_evaluations,
            number_of_inner_evaluations,
            optimization_algorithm,
            inner_optimization_algorithm=None):
        r"""Run classification pipeline optimization process.

        Arguments:
            fitness_name (str): Name of the fitness class to use as a function.
            pipeline_population_size (uint): Number of pipeline individuals in the optimization process.
            inner_population_size (uint): Number of individuals in the hiperparameter optimization process.
            number_of_pipeline_evaluations (uint): Number of maximum evaluations.
            number_of_inner_evaluations (uint): Number of maximum inner evaluations.
            optimization_algorithm (str): Name of the optimization algorithm to use.
            inner_optimization_algorithm (Optional[str]): Name of the inner optimization algorithm to use. Defaults to the optimization_algorithm argument.
        
        Returns:
            Pipeline: Best pipeline found in the optimization process.
        """

        algo = self.__niapy_algorithm_utility.get_algorithm(
            optimization_algorithm)
        algo.NP = pipeline_population_size

        features = self.__data.get_x()

        if self.__imputer is not None:
            features, self.__imputers = impute_features(
                features, self.__imputer)

        if self.__categorical_features_encoder is not None:
            features, self.__categorical_features_encoders = encode_categorical_features(
                features, self.__categorical_features_encoder)

        self.__data.set_x(features)

        benchmark = _PipelineOptimizerBenchmarkV2(
            self, fitness_name, inner_population_size,
            number_of_inner_evaluations,
            inner_optimization_algorithm if inner_optimization_algorithm
            is not None else optimization_algorithm)
        task = StoppingTask(D=3,
                            nFES=number_of_pipeline_evaluations,
                            benchmark=benchmark)
        algo.run(task)

        pipeline = benchmark.get_pipeline()
        if pipeline is not None:
            pipeline.set_categorical_features_encoders(
                self.__categorical_features_encoders)
            pipeline.set_imputers(self.__imputers)

        return pipeline
Exemple #7
0
    def benchmark_factory(self, name):
        r"""Create optimization task.

        Args:
                name (str): Benchmark name.

        Returns:
                Task: Optimization task to use.

        """
        return StoppingTask(D=self.D, nFES=self.nFES, optType=OptimizationType.MINIMIZATION, benchmark=name)
Exemple #8
0
    def optimize(self, x, y, population_size, number_of_evaluations, optimization_algorithm, fitness_function):
        r"""Optimize pipeline's hyperparameters.

        Arguments:
            x (pandas.core.frame.DataFrame): n samples to classify.
            y (pandas.core.series.Series): n classes of the samples in the x array.
            population_size (uint): Number of individuals in the optimization process.
            number_of_evaluations (uint): Number of maximum evaluations.
            optimization_algorithm (str): Name of the optimization algorithm to use.
            fitness_function (str): Name of the fitness function to use.
        
        Returns:
            float: Best fitness value found in optimization process.
        """

        if self.__imputers is not None:
            for key in self.__imputers:
                x.loc[:, key] = self.__imputers[key].transform(x[[key]])

        if self.__categorical_features_encoders is not None:
            to_drop = []
            enc_features = pd.DataFrame()
            cols = [col for col in x.columns if x[col].dtype != np.dtype('float64') and x[col].dtype != np.dtype('int64')]
            for c in cols:
                self.__categorical_features_encoders[c].fit(x[[c]])
                tr = self.__categorical_features_encoders[c].transform(x[[c]])
                to_drop.append(c)
                enc_features = pd.concat([enc_features, tr], axis=1)
            x = x.drop(to_drop, axis=1)
            x = pd.concat([x, enc_features], axis=1)

        D = 0
        if self.__feature_selection_algorithm is not None:
            D += len(self.__feature_selection_algorithm.get_params_dict().keys())
        if self.__feature_transform_algorithm is not None:
            D += len(self.__feature_transform_algorithm.get_params_dict().keys())

        D += len(self.__classifier.get_params_dict().keys())

        algo = self.__niapy_algorithm_utility.get_algorithm(optimization_algorithm)
        algo.NP = population_size

        task = StoppingTask(
            D=D,
            nFES=number_of_evaluations,
            benchmark=_PipelineBenchmark(x, y, self, population_size, fitness_function)
            )
        best = algo.run(task)
        return best[1]
    def select_features(self, x, y, **kwargs):
        r"""Perform the feature selection process.

        Arguments:
            x (pandas.core.frame.DataFrame): Array of original features.
            y (pandas.core.series.Series) Expected classifier results.

        Returns:
            numpy.ndarray[bool]: Mask of selected features.
        """
        num_features = x.shape[1]
        benchmark = _FeatureSelectionThresholdBenchmark(x, y)
        task = StoppingTask(D=num_features+1, nFES=1000, benchmark=benchmark)
        best = self.__pso.run(task)
        return self.__final_output(benchmark.get_best_solution())
Exemple #10
0
    def optimize(self):
        """
        The main optimization procedure
        Should return the best arm as and array/ list
        :return:

        THe instance to be optimized is the self.objective value
        """
        try:

            class MyBenchmark(Benchmark):
                def __init__(self,
                             benchmark_objective,
                             Lower=self.lowerBound,
                             Upper=self.higherBound):
                    self.benchmark_objective = benchmark_objective
                    Benchmark.__init__(self, Lower, Upper)

                def function(self):
                    bench = self.benchmark_objective

                    def evaluate(D, sol):
                        val = bench(sol)
                        return val

                    return evaluate

            # run the random search algorithm
            task = StoppingTask(D=len(self.x0),
                                nFES=self.maxfeval,
                                benchmark=MyBenchmark(
                                    benchmark_objective=self.objective,
                                    Lower=self.lowerBound,
                                    Upper=self.higherBound))
            best = self.algo.run(task)
            #Some classes in the NiaPy return different formats for the best result
            xopt = []
            for xi in best[0]:
                xopt.append(xi)
            success = True
            best_arm = convertToArray(xopt)
        except:
            logger.warning('Optimization for ' + str(self.algorithm_name) +
                           ' failed.')
            best_arm = NAN
            success = False
        return best_arm, success, self.objective
def Krill_Herd_Optimization_4(matrix):
        task = StoppingTask(D=6305, nGEN=1, benchmark=Qing())
        scaler = StandardScaler()
        matrix = scaler.fit_transform(matrix)
        #matrix=np.transpose(matrix)
        kh=KrillHerdV4(NP=3001)
        
        KH, KH_f, d = initPopulation_KH(task, matrix, kh)
        #print(KH)
        #print(KH_f)
        #print(KH_f.shape)
        #print(d)
        xb, fxb=getBest_algo(KH,KH_f)
        print(xb)
        #print(xb.shape)
        print(fxb)
        xb, fxb=run_algo(kh, task, KH, KH_f, xb, fxb, d['W_n'], d['W_f'], d['N'], d['F'])
        return xb,fxb
Exemple #12
0
    def _run(X, y, train_index, val_index, random_seed, optimizer, evaluator,
             benchmark, optimizer_settings):
        opt_settings = es.get_args(optimizer)
        opt_settings.update(optimizer_settings)
        benchm = benchmark(X=X,
                           y=y,
                           train_indices=train_index,
                           valid_indices=val_index,
                           random_seed=random_seed,
                           evaluator=evaluator)
        task = StoppingTask(D=X.shape[1] + 1,
                            nGEN=opt_settings.pop('nGEN', 960),
                            optType=OptimizationType.MINIMIZATION,
                            benchmark=benchm)

        evo = optimizer(seed=random_seed, **opt_settings)
        r = evo.run(task=task)
        if isinstance(r[0], np.ndarray):
            return benchmark.to_phenotype(r[0], benchm.split), r[1]
        else:
            return benchmark.to_phenotype(r[0].x, benchm.split), r[1]
Exemple #13
0
# encoding=utf8
# This is temporary fix to import module from parent folder
# It will be removed when package is published on PyPI
import sys
sys.path.append('../')
# End of fix

import random
from NiaPy.algorithms.basic import ParticleSwarmAlgorithm
from NiaPy.task import StoppingTask, OptimizationType
from NiaPy.benchmarks import Sphere

#we will run ParticleSwarmAlgorithm for 5 independent runs
for i in range(5):
    task = StoppingTask(D=10, nFES=1000, optType=OptimizationType.MAXIMIZATION, benchmark=Sphere())
    algo = ParticleSwarmAlgorithm(NP=40, C1=2.0, C2=2.0, w=0.7, vMin=-4, vMax=4)
    best = algo.run(task=task)
    print best
Exemple #14
0
# This is temporary fix to import module from parent folder
# It will be removed when package is published on PyPI
import sys
sys.path.append('../')
# End of fix

from NiaPy.algorithms.basic import GreyWolfOptimizer
from NiaPy.task import StoppingTask

# we will run 10 repetitions of Grey Wolf Optimizer against Pinter benchmark function
for i in range(10):
    task = StoppingTask(D=10, nFES=1000, benchmark='pinter')
    algorithm = GreyWolfOptimizer(NP=20)
    best = algorithm.run(task)
    print(best)
Exemple #15
0
# encoding=utf8
# This is temporary fix to import module from parent folder
# It will be removed when package is published on PyPI
from NiaPy.benchmarks import Sphere
from NiaPy.task import StoppingTask
from NiaPy.algorithms.basic import BeesAlgorithm

import sys
sys.path.append('../')


# we will run Bees Algorithm for 5 independent runs
for i in range(5):
    task = StoppingTask(D=20, nGEN=2, benchmark=Sphere())
    algo = BeesAlgorithm(NP=50, m=20, e=10, nep=20, nsp=15, ngh=7)
    best = algo.run(task)
    print('%s -> %s' % (best[0], best[1]))
Exemple #16
0
# encoding=utf8
# This is temporary fix to import module from parent folder
# It will be removed when package is published on PyPI
import sys
sys.path.append('../')
# End of fix

import random
import logging
from NiaPy.task import StoppingTask
from NiaPy.algorithms.basic import DifferentialEvolution
from NiaPy.benchmarks import Griewank, Sphere

# 1 Number of function evaluations (nFES) as a stopping criteria
for i in range(10):
    task = StoppingTask(D=10, nFES=10000, benchmark=Sphere())
    algo = DifferentialEvolution(NP=40, CR=0.9, F=0.5)
    best = algo.run(task)
    print('%s -> %s' % (best[0], best[1]))

print('---------------------------------------')

# 2 Number of generations (iterations) as a stopping criteria
for i in range(10):
    task = StoppingTask(D=10, nGEN=1000, benchmark=Sphere())
    algo = DifferentialEvolution(NP=40, CR=0.9, F=0.5)
    best = algo.run(task)
    print('%s -> %s' % (best[0], best[1]))

print('---------------------------------------')
Exemple #17
0
# encoding=utf8
# This is temporary fix to import module from parent folder
# It will be removed when package is published on PyPI
import sys
sys.path.append('../')
# End of fix

import random
from NiaPy.algorithms.basic import ComprehensiveLearningParticleSwarmOptimizer
from NiaPy.benchmarks import Sphere
from NiaPy.task import StoppingTask

# we will run ParticleSwarmAlgorithm for 5 independent runs
algo = ComprehensiveLearningParticleSwarmOptimizer(NP=50,
                                                   C1=.3,
                                                   C2=1.0,
                                                   m=5,
                                                   w=0.86,
                                                   vMin=-2,
                                                   vMax=2)
for i in range(5):
    task = StoppingTask(D=25, nFES=20000, benchmark=Sphere())
    best = algo.run(task=task)
    print('%s -> %f' % (best[0], best[1]))
print(algo.getParameters())

# vim: tabstop=3 noexpandtab shiftwidth=3 softtabstop=3
Exemple #18
0
    def run_v1(self, fitness_name, population_size, number_of_evaluations,
               optimization_algorithm):
        r"""Run classification pipeline optimization process according to the original NiaAML paper.

        Reference:
            Fister, Iztok, Milan Zorman, and Dušan Fister. "Continuous Optimizers for Automatic Design and Evaluation of Classification Pipelines." Frontier Applications of Nature Inspired Computation. Springer, Singapore, 2020. 281-301.

        Arguments:
            fitness_name (str): Name of the fitness class to use as a function.
            population_size (uint): Number of individuals in the optimization process.
            number_of_evaluations (uint): Number of maximum evaluations.
            optimization_algorithm (str): Name of the optimization algorithm to use.
        
        Returns:
            Pipeline: Best pipeline found in the optimization process.
        """

        algo = self.__niapy_algorithm_utility.get_algorithm(
            optimization_algorithm)
        algo.NP = population_size

        features = self.__data.get_x()

        if self.__imputer is not None:
            features, self.__imputers = impute_features(
                features, self.__imputer)

        if self.__categorical_features_encoder is not None:
            features, self.__categorical_features_encoders = encode_categorical_features(
                features, self.__categorical_features_encoder)

        self.__data.set_x(features)

        D = 3
        factories = [(self.__feature_selection_algorithms,
                      FeatureSelectionAlgorithmFactory()),
                     (self.__feature_transform_algorithms,
                      FeatureTransformAlgorithmFactory()),
                     (self.__classifiers, ClassifierFactory())]

        for f in factories:
            m = 0
            if f[0] is not None:
                for e in f[0]:
                    if e is not None:
                        el = f[1].get_result(e)
                        params = el.get_params_dict()
                        if params is not None and len(params) > m:
                            m = len(params)
            D += m

        benchmark = _PipelineOptimizerBenchmarkV1(self, fitness_name)
        task = StoppingTask(D=D,
                            nFES=number_of_evaluations,
                            benchmark=benchmark)
        algo.run(task)

        pipeline = benchmark.get_pipeline()
        if pipeline is not None:
            pipeline.set_categorical_features_encoders(
                self.__categorical_features_encoders)
            pipeline.set_imputers(self.__imputers)

        return pipeline
Exemple #19
0
# encoding=utf8
# This is temporary fix to import module from parent folder
# It will be removed when package is published on PyPI
import sys

sys.path.append('../')
# End of fix

from NiaPy.algorithms.modified import HybridSelfAdaptiveBatAlgorithm
from NiaPy.task import StoppingTask
from NiaPy.benchmarks import Griewank

# we will run Bat Algorithm for 5 independent runs
algo = HybridSelfAdaptiveBatAlgorithm(NP=50)
for i in range(5):
    task = StoppingTask(D=10,
                        nGEN=10000,
                        benchmark=Griewank(Upper=600, Lower=-600))
    best = algo.run(task)
    print('%s -> %s' % (best[0], best[1]))
print(algo.getParameters())

# vim: tabstop=3 noexpandtab shiftwidth=3 softtabstop=3
Exemple #20
0
from NiaPy.task import StoppingTask, OptimizationType
from NiaPy.benchmarks import Benchmark
from NiaPy.algorithms.basic import GreyWolfOptimizer

# our custom benchmark class
class MyBenchmark(Benchmark):
	def __init__(self):
		Benchmark.__init__(self, -10, 10)

	def function(self):
		def evaluate(D, sol):
			val = 0.0
			for i in range(D): val += sol[i] ** 2
			return val
		return evaluate


# we will run 10 repetitions of Grey Wolf Optimizer against our custom MyBenchmark benchmark function
for i in range(10):
    task = StoppingTask(D=20, nGEN=100, optType=OptimizationType.MINIMIZATION, benchmark=MyBenchmark())

    # parameter is population size
    algo = GreyWolfOptimizer(NP=20)

    # running algorithm returns best found minimum
    best = algo.run(task)

    # printing best minimum
    print(best[-1])
Exemple #21
0
# encoding=utf8
# This is temporary fix to import module from parent folder
# It will be removed when package is published on PyPI
from NiaPy.algorithms.basic import CovarianceMatrixAdaptionEvolutionStrategy
from NiaPy.task import StoppingTask, OptimizationType
from NiaPy.benchmarks import Sphere

import sys
sys.path.append('../')
# End of fix

# we will run CMA-ES for 5 independent runs
for i in range(5):
    task = StoppingTask(D=10, nFES=1000, optType=OptimizationType.MINIMIZATION, logger=True, benchmark=Sphere())
    algo = CovarianceMatrixAdaptionEvolutionStrategy(NP=20)
    best = algo.run(task=task)
    print('%s -> %s' % (best[0], best[1]))

Exemple #22
0
# encoding=utf8
# This is temporary fix to import module from parent folder
# It will be removed when package is published on PyPI
import sys
sys.path.append('../')
# End of fix

from NiaPy.algorithms.modified import SelfAdaptiveDifferentialEvolution
from NiaPy.task import StoppingTask
from NiaPy.benchmarks import Griewank

# we will run jDE algorithm for 5 independent runs
algo = SelfAdaptiveDifferentialEvolution(NP=40, F=0.5, F_l=0.0, F_u=2.0, Tao1=0.9, CR=0.5, Tao2=0.45)
for i in range(5):
	task = StoppingTask(D=10, nFES=10000, benchmark=Griewank(Lower=-600, Upper=600), logger=True)
	best = algo.run(task)
	print('%s -> %s' % (best[0], best[1]))
print(algo.getParameters())

# vim: tabstop=3 noexpandtab shiftwidth=3 softtabstop=3

Exemple #23
0
# encoding=utf8
# This is temporary fix to import module from parent folder
# It will be removed when package is published on PyPI
import sys
sys.path.append('../')
# End of fix

from NiaPy.algorithms.modified import ParameterFreeBatAlgorithm

from NiaPy.task import StoppingTask
from NiaPy.benchmarks import Sphere

algo = ParameterFreeBatAlgorithm()

for i in range(10):
	task = StoppingTask(D=10, nFES=10000, benchmark=Sphere(Upper=5.12, Lower=-5.12))
	best = algo.run(task)
	print('%s -> %s' % (best[0], best[1]))
print(algo.getParameters())
Exemple #24
0
 def setUp(self):
     self.D = 20
     self.x, self.task = rnd.uniform(-100, 100, self.D), StoppingTask(
         D=self.D, nFES=230, nGEN=inf, benchmark=MyBenchmark())
     self.s1, self.s2, self.s3 = Individual(x=self.x, e=False), Individual(
         task=self.task, rand=rnd), Individual(task=self.task)
Exemple #25
0
    def _run(X, y, train_index, val_index, random_seed, optimizer, evaluator,
             benchmark, optimizer_settings, nGENs, continue_opt, cut_type,
             cutting_perc, j):
        opt_settings = es.get_args(optimizer)
        opt_settings.update(optimizer_settings)
        X1 = X
        cuted = []  # Which genes (features) are cutted
        fitnesses = []  # Fitness values after every cutting
        xb, fxb, benchm = None, -1, None
        pop, fpop = None, None
        for nGENp in nGENs:  # Every interval before cutting
            benchm = benchmark(X=X1,
                               y=y,
                               train_indices=train_index,
                               valid_indices=val_index,
                               random_seed=random_seed,
                               evaluator=evaluator)
            task = StoppingTask(D=X1.shape[1],
                                nGEN=nGENp,
                                optType=OptimizationType.MINIMIZATION,
                                benchmark=benchm)

            evo = optimizer(seed=random_seed, **opt_settings)

            # Start new optimization. Continue on pop and give fitness of pop.
            pop, fpop, xb, fxb = DynFeatureSelection.runTask(
                evo, task, starting_pop=pop, starting_fpop=fpop)

            if not isinstance(xb, np.ndarray):
                xb = xb.x
            xb = np.copy(xb)

            # Cut genotype. Four different strategies
            if cut_type == 'diff':
                # Best solutions say which features are most common, and worst solutions vote which features are most common.
                # Difference between votes say which features will be cutted - those that are more coomon in worst solutions and
                # least common in best features.
                idx = DynFeatureSelection.cut_n_vote_diff(pop,
                                                          fpop,
                                                          cutting_perc,
                                                          benchm,
                                                          n=25)
            elif cut_type == 'vote_all':
                # Every solution votes which features are most common. Similar than best_vote worst, but every solution votes.
                idx = DynFeatureSelection.cut_all_vote_for_worst(
                    pop, fpop, cutting_perc, benchm)
            elif cut_type == 'best_vote_worst':
                # Best solutions say which features are most common. Least common features are cutted.
                idx = DynFeatureSelection.cut_n_vote(pop,
                                                     fpop,
                                                     cutting_perc,
                                                     benchm,
                                                     n=50)
            elif cut_type == 'worst_vote_best':
                # Worst solutions say which features are most common. Most common features are cutted.
                idx = DynFeatureSelection.cut_n_vote(pop,
                                                     fpop,
                                                     cutting_perc,
                                                     benchm,
                                                     n=-50)
            cuted.append(idx)  # Log which genes are cutted
            fitnesses.append(fxb)  # Log fitenss value after the cutting

            X1 = np.delete(X1, idx,
                           axis=1)  # Delete columns (feature) from genotypes

            # If we want the optimization to continue on genes not cutted
            if continue_opt:
                if isinstance(
                        pop[0],
                        np.ndarray):  # If population is in shape of ndarray
                    pop = np.delete(pop, idx, 1)
                else:  # If population is in shape of individuals
                    for ind in pop:
                        ind.x = np.delete(ind.x, idx)
            else:  # If we want that after cutting, the solutions are reseted (random reinitialization)
                pop = None
                fpop = None

        # Transform solutions to datasets with selected features
        xb = benchmark.to_phenotype(xb, benchm.split)

        # Remove not selected features
        for i in range(len(cuted) - 2, -1, -1):
            cut = np.sort(cuted[i])
            for c in cut:
                xb = np.insert(xb, c, False)

        return xb, fxb
Exemple #26
0
# encoding=utf8
# This is temporary fix to import module from parent folder
# It will be removed when package is published on PyPI
from NiaPy.algorithms.basic import CovarianceMatrixAdaptionEvolutionStrategy
from NiaPy.benchmarks import Sphere
from NiaPy.task import StoppingTask

import sys
sys.path.append('../')
# End of fix

# we will run CMA-ES for 5 independent runs
for i in range(5):
    task = StoppingTask(D=10, nFES=1000, logger=True, benchmark=Sphere())
    algo = CovarianceMatrixAdaptionEvolutionStrategy(NP=20)
    best = algo.run(task)
    print('%s -> %s' % (best[0], best[1]))

Exemple #27
0
# encoding=utf8
# This is temporary fix to import module from parent folder
# It will be removed when package is published on PyPI
import sys
sys.path.append('../')
# End of fix

from NiaPy.algorithms.basic import KrillHerdV2
from NiaPy.task import StoppingTask
from NiaPy.benchmarks import Sphere

# we will run Fireworks Algorithm for 5 independent runs
for i in range(5):
    task = StoppingTask(D=10, nGEN=50, benchmark=Sphere())
    algo = KrillHerdV2(NP=70, Ainit=0.1, Afinal=0.9)
    best = algo.run(task)
    print('%s -> %s' % (best[0], best[1]))

# vim: tabstop=3 noexpandtab shiftwidth=3 softtabstop=3
Exemple #28
0
 def setUp(self):
     self.D = 6
     self.Lower, self.Upper = [2, 1, 1], [10, 10, 2]
     self.task = StoppingTask(Lower=self.Lower, Upper=self.Upper, D=self.D)
Exemple #29
0
# encoding=utf8
# This is temporary fix to import module from parent folder
# It will be removed when package is published on PyPI
import sys
sys.path.append('../')
# End of fix

import random
from NiaPy.algorithms.basic import AgingNpMultiMutationDifferentialEvolution
from NiaPy.algorithms.basic.de import CrossCurr2Best1, CrossBest2
from NiaPy.task import StoppingTask
from NiaPy.benchmarks import Sphere

# we will run Differential Evolution for 5 independent runs
for i in range(5):
    task = StoppingTask(D=10, nFES=5000, benchmark=Sphere())
    algo = AgingNpMultiMutationDifferentialEvolution(NP=10, F=0.2, CR=0.65, strategies=(CrossCurr2Best1, CrossBest2), delta_np=0.05, omega=0.9)
    best = algo.run(task)
    print('%s -> %s' % (best[0], best[1]))

# vim: tabstop=3 noexpandtab shiftwidth=3 softtabstop=3
Exemple #30
0
sys.path.append('../')
# End of fix

from NiaPy.algorithms.basic import GreyWolfOptimizer
from NiaPy.task import StoppingTask, OptimizationType
from NiaPy.benchmarks import Pinter

# initialize Pinter benchamrk with custom bound
pinterCustom = Pinter(-5, 5)

# we will run 10 repetitions of Grey Wolf Optimizer against Pinter benchmark function
for i in range(10):
    # first parameter takes dimension of problem
    # second parameter takes the number of function evaluations
    # third parameter is benchmark optimization type
    # forth parameter is benchmark function
    task = StoppingTask(D=20,
                        nGEN=100,
                        optType=OptimizationType.MINIMIZATION,
                        benchmark=pinterCustom)

    # parameter is population size
    algo = GreyWolfOptimizer(NP=20)

    # running algorithm returns best found minimum
    best = algo.run(task)

    # printing best minimum
    print(best[-1])