def test_BA_iters_to_fes(self): task = Task(D=10, nGEN=1000, optType=OptimizationType.MINIMIZATION, benchmark=Sphere()) algo = BatAlgorithm(task=task, NP=10) algo.run() evals = algo.task.evals() self.assertEqual(evals, 10010)
def test_BA_iters_fine(self): task = Task(D=10, nGEN=1000, optType=OptimizationType.MINIMIZATION, benchmark=Sphere()) algo = BatAlgorithm(task=task, NP=25) algo.run() iters = algo.task.iters() self.assertEqual(iters, 1000)
def test_BA_evals_fine(self): task = Task(D=10, nFES=1000, optType=OptimizationType.MINIMIZATION, benchmark=Sphere()) algo = BatAlgorithm(task=task, NP=25) algo.run() evals = algo.task.evals() self.assertEqual(evals, 1000)
class BATestCase(TestCase): def setUp(self): self.ba_custom = BatAlgorithm(10, 20, 10000, 0.5, 0.5, 0.0, 2.0, MyBenchmark()) self.ba_griewank = BatAlgorithm(10, 40, 10000, 0.5, 0.5, 0.0, 2.0, 'griewank') def test_custom_works_fine(self): self.assertTrue(self.ba_custom.run()) def test_griewank_works_fine(self): self.assertTrue(self.ba_griewank.run())
def optimize(bench, algo): average_mfo = 0 average_de = 0 average_abc = 0 average_pso = 0 average_ba = 0 average_fa = 0 average_ga = 0 for i in np.arange(epoch): mfo = MothFlameOptimizer(D=dim, NP=pop, nGEN=maxIter, benchmark=bench) de = DifferentialEvolution(D=dim, NP=pop, nGEN=maxIter, benchmark=bench) abc = ArtificialBeeColonyAlgorithm(D=dim, NP=pop, nFES=maxIter, benchmark=bench) pso = ParticleSwarmAlgorithm(D=dim, NP=pop, nGEN=maxIter, benchmark=bench) ba = BatAlgorithm(D=dim, NP=pop, nFES=maxIter, benchmark=bench) fa = FireflyAlgorithm(D=dim, NP=pop, nFES=maxIter, benchmark=bench) ga = GeneticAlgorithm(D=dim, NP=pop, nFES=maxIter, benchmark=bench) gen, best_de = de.run() gen, best_mfo = mfo.run() gen, best_abc = abc.run() gen, best_pso = pso.run() gen, best_ba = ba.run() gen, best_fa = fa.run() gen, best_ga = ga.run() average_mfo += best_de / epoch average_de += best_mfo / epoch average_abc += best_abc / epoch average_pso += best_pso / epoch average_ba += best_ba / epoch average_fa += best_fa / epoch average_ga += best_ga / epoch print(algo, ': DE Average of Bests over', epoch, 'run: ', average_de) print(algo, ': MFO Average of Bests over', epoch, 'run: ', average_mfo) print(algo, ': ABC Average of Bests over', epoch, 'run: ', average_abc) print(algo, ': PSO Average of Bests over', epoch, 'run: ', average_pso) print(algo, ': BA Average of Bests over', epoch, 'run: ', average_ba) print(algo, ': FA Average of Bests over', epoch, 'run: ', average_fa) print(algo, ': GA Average of Bests over', epoch, 'run: ', average_ga) return [ average_de, average_mfo, average_abc, average_pso, average_ba, average_fa, average_ga ]
class BATestCase(TestCase): def setUp(self): self.ba_custom = BatAlgorithm(D=10, NP=20, nFES=1000, A=0.5, r=0.5, Qmin=0.0, Qmax=2.0, benchmark=MyBenchmark()) self.ba_griewank = BatAlgorithm(NP=10, D=40, nFES=1000, A=0.5, r=0.5, Qmin=0.0, Qmax=2.0, benchmark='griewank') def test_custom_works_fine(self): self.assertTrue(self.ba_custom.run()) def test_griewank_works_fine(self): self.assertTrue(self.ba_griewank.run())
# encoding=utf8 # This is temporary fix to import module from parent folder # It will be removed when package is published on PyPI import sys sys.path.append('../') # End of fix from NiaPy.algorithms.basic import BatAlgorithm from NiaPy.util import StoppingTask, OptimizationType from NiaPy.benchmarks import Sphere # we will run Bat Algorithm for 5 independent runs for i in range(5): task = StoppingTask(D=10, nGEN=1000, optType=OptimizationType.MINIMIZATION, benchmark=Sphere()) algo = BatAlgorithm(NP=40, A=0.5, r=0.5, Qmin=0.0, Qmax=2.0) best = algo.run(task=task) print('%s -> %s' % (best[0], best[1]))
return evaluate # example using custom benchmark "MyBenchmark" logger.info('Running with custom MyBenchmark...') for i in range(10): Algorithm = BatAlgorithm(D=10, NP=40, nFES=10000, A=0.5, r=0.5, Qmin=0.0, Qmax=2.0, benchmark=MyBenchmark()) Best = Algorithm.run() logger.info(Best) # example using predifined benchmark function # available benchmarks are: # - griewank # - rastrigin # - rosenbrock # - sphere logger.info('Running with default Griewank benchmark...') griewank = Griewank() for i in range(10): Algorithm = BatAlgorithm(D=10, NP=40,
class BatAlgorithm(FeatureSelectionAlgorithm): r"""Implementation of feature selection using BA algorithm. Date: 2020 Author: Luka Pečnik Reference: The implementation is adapted according to the following article: D. Fister, I. Fister, T. Jagrič, I. Fister Jr., J. Brest. A novel self-adaptive differential evolution for feature selection using threshold mechanism . In: Proceedings of the 2018 IEEE Symposium on Computational Intelligence (SSCI 2018), pp. 17-24, 2018. Reference URL: http://iztok-jr-fister.eu/static/publications/236.pdf License: MIT See Also: * :class:`niaaml.preprocessing.feature_selection.feature_selection_algorithm.FeatureSelectionAlgorithm` """ Name = 'Bat Algorithm' def __init__(self, **kwargs): r"""Initialize BA feature selection algorithm. """ self._params = dict(A=ParameterDefinition(MinMax(0.5, 1.0), param_type=float), r=ParameterDefinition(MinMax(0.0, 0.5), param_type=float), Qmin=ParameterDefinition(MinMax(0.0, 1.0), param_type=float), Qmax=ParameterDefinition(MinMax(1.0, 2.0), param_type=float)) self.__ba = BA(NP=10) def set_parameters(self, **kwargs): r"""Set the parameters/arguments of the algorithm. """ kwargs['NP'] = self.__ba.NP self.__ba.setParameters(**kwargs) def __final_output(self, sol): r"""Calculate final array of features. Arguments: sol (numpy.ndarray[float]): Individual of population/ possible solution. Returns: numpy.ndarray[bool]: Mask of selected features. """ selected = numpy.ones(sol.shape[0] - 1, dtype=bool) threshold = sol[sol.shape[0] - 1] for i in range(sol.shape[0] - 1): if sol[i] < threshold: selected[i] = False return selected def select_features(self, x, y, **kwargs): r"""Perform the feature selection process. Arguments: x (pandas.core.frame.DataFrame): Array of original features. y (pandas.core.series.Series) Expected classifier results. Returns: pandas.core.frame.DataFrame: Mask of selected features. """ num_features = x.shape[1] benchmark = _FeatureSelectionThresholdBenchmark(x, y) task = StoppingTask(D=num_features + 1, nFES=1000, benchmark=benchmark) best = self.__ba.run(task) return self.__final_output(benchmark.get_best_solution()) def to_string(self): r"""User friendly representation of the object. Returns: str: User friendly representation of the object. """ return FeatureSelectionAlgorithm.to_string(self).format( name=self.Name, args=self._parameters_to_string(self.__ba.getParameters()))