def test_custom_works_fine(self): jde_custom = SelfAdaptiveDifferentialEvolution(D=self.D, NP=40, nFES=self.nFES, nGEN=self.nGEN, F=0.5, F_l=0.0, F_u=2.0, Tao1=0.9, CR=0.1, Tao2=0.45, benchmark=MyBenchmark(), seed=self.seed) jde_customc = SelfAdaptiveDifferentialEvolution( D=self.D, NP=40, nFES=self.nFES, nGEN=self.nGEN, F=0.5, F_l=0.0, F_u=2.0, Tao1=0.9, CR=0.1, Tao2=0.45, benchmark=MyBenchmark(), seed=self.seed) AlgorithmTestCase.algorithm_run_test(self, jde_custom, jde_customc)
def test_griewank_works_fine(self): jde_griewank = SelfAdaptiveDifferentialEvolution(D=self.D, NP=40, nFES=self.nFES, nGEN=self.nGEN, F=0.5, F_l=0.0, F_u=2.0, Tao1=0.9, CR=0.1, Tao2=0.45, benchmark='griewank', seed=self.seed) jde_griewankc = SelfAdaptiveDifferentialEvolution(D=self.D, NP=40, nFES=self.nFES, nGEN=self.nGEN, F=0.5, F_l=0.0, F_u=2.0, Tao1=0.9, CR=0.1, Tao2=0.45, benchmark='griewank', seed=self.seed) AlgorithmTestCase.algorithm_run_test(self, jde_griewank, jde_griewankc)
def __init__(self, **kwargs): r"""Initialize GWO feature selection algorithm. """ super(jDEFSTH, self).__init__() self.__jdefsth = SelfAdaptiveDifferentialEvolution(NP=10, F=0.5, F_l=0.0, F_u=2.0, Tao1=0.9, CR=0.5, Tao2=0.45)
def test_griewank_works_fine(self): jde_griewank = SelfAdaptiveDifferentialEvolution(NP=40, F=0.5, F_l=0.0, F_u=2.0, Tao1=0.9, CR=0.1, Tao2=0.45, seed=self.seed) jde_griewankc = SelfAdaptiveDifferentialEvolution(NP=40, F=0.5, F_l=0.0, F_u=2.0, Tao1=0.9, CR=0.1, Tao2=0.45, seed=self.seed) AlgorithmTestCase.algorithm_run_test(self, jde_griewank, jde_griewankc)
def test_custom_works_fine(self): jde_custom = SelfAdaptiveDifferentialEvolution(NP=40, F=0.5, F_l=0.0, F_u=2.0, Tao1=0.9, CR=0.1, Tao2=0.45, seed=self.seed) jde_customc = SelfAdaptiveDifferentialEvolution(NP=40, F=0.5, F_l=0.0, F_u=2.0, Tao1=0.9, CR=0.1, Tao2=0.45, seed=self.seed) AlgorithmTestCase.test_algorithm_run(self, jde_custom, jde_customc, MyBenchmark())
def test_typeParameters(self): d = SelfAdaptiveDifferentialEvolution.typeParameters() self.assertTrue(d['F_l'](10)) self.assertFalse(d['F_l'](-10)) self.assertFalse(d['F_l'](-0)) self.assertTrue(d['F_u'](10)) self.assertFalse(d['F_u'](-10)) self.assertFalse(d['F_u'](-0)) self.assertTrue(d['Tao1'](0.32)) self.assertFalse(d['Tao1'](-1.123)) self.assertFalse(d['Tao1'](1.123)) self.assertTrue(d['Tao2'](0.32)) self.assertFalse(d['Tao2'](-1.123)) self.assertFalse(d['Tao2'](1.123))
# encoding=utf8 # This is temporary fix to import module from parent folder # It will be removed when package is published on PyPI import sys sys.path.append('../') # End of fix from NiaPy.algorithms.modified import SelfAdaptiveDifferentialEvolution from NiaPy.task import StoppingTask from NiaPy.benchmarks import Griewank # we will run jDE algorithm for 5 independent runs algo = SelfAdaptiveDifferentialEvolution(NP=40, F=0.5, F_l=0.0, F_u=2.0, Tao1=0.9, CR=0.5, Tao2=0.45) for i in range(5): task = StoppingTask(D=10, nFES=10000, benchmark=Griewank(Lower=-600, Upper=600), logger=True) best = algo.run(task) print('%s -> %s' % (best[0], best[1])) print(algo.getParameters()) # vim: tabstop=3 noexpandtab shiftwidth=3 softtabstop=3
# encoding=utf8 # This is temporary fix to import module from parent folder # It will be removed when package is published on PyPI import sys sys.path.append('../') # End of fix import random from NiaPy.algorithms.modified import SelfAdaptiveDifferentialEvolution from NiaPy.util import StoppingTask, OptimizationType from NiaPy.benchmarks import Sphere #we will run jDE algorithm for 5 independent runs for i in range(5): task = StoppingTask(D=10, nFES=100, optType=OptimizationType.MINIMIZATION, benchmark=Sphere()) algo = SelfAdaptiveDifferentialEvolution(NP=40, F=0.5, F_l=0.0, F_u=2.0, Tao1=0.9, CR=0.1, Tao2=0.45) best = algo.run(task=task) print(best)
class jDEFSTH(FeatureSelectionAlgorithm): r"""Implementation of self-adaptive differential evolution for feature selection using threshold mechanism. Date: 2020 Author: Iztok Fister Jr. Reference: D. Fister, I. Fister, T. Jagrič, I. Fister Jr., J. Brest. A novel self-adaptive differential evolution for feature selection using threshold mechanism . In: Proceedings of the 2018 IEEE Symposium on Computational Intelligence (SSCI 2018), pp. 17-24, 2018. Reference URL: http://iztok-jr-fister.eu/static/publications/236.pdf License: MIT See Also: * :class:`niaaml.preprocessing.feature_selection.feature_selection_algorithm.FeatureSelectionAlgorithm` """ Name = 'Self-Adaptive Differential Evolution' def __init__(self, **kwargs): r"""Initialize GWO feature selection algorithm. """ super(jDEFSTH, self).__init__() self.__jdefsth = SelfAdaptiveDifferentialEvolution(NP=10, F=0.5, F_l=0.0, F_u=2.0, Tao1=0.9, CR=0.5, Tao2=0.45) def __final_output(self, sol): r"""Calculate final array of features. Arguments: sol (numpy.ndarray[float]): Individual of population/ possible solution. Returns: numpy.ndarray[bool]: Mask of selected features. """ selected = numpy.ones(sol.shape[0] - 1, dtype=bool) threshold = sol[sol.shape[0] - 1] for i in range(sol.shape[0] - 1): if sol[i] < threshold: selected[i] = False return selected def select_features(self, x, y, **kwargs): r"""Perform the feature selection process. Arguments: x (pandas.core.frame.DataFrame): Array of original features. y (pandas.core.series.Series) Expected classifier results. Returns: numpy.ndarray[bool]: Mask of selected features. """ num_features = x.shape[1] benchmark = _FeatureSelectionThresholdBenchmark(x, y) task = StoppingTask(D=num_features + 1, nFES=1000, benchmark=benchmark) best = self.__jdefsth.run(task) return self.__final_output(benchmark.get_best_solution()) def to_string(self): r"""User friendly representation of the object. Returns: str: User friendly representation of the object. """ return FeatureSelectionAlgorithm.to_string(self).format( name=self.Name, args=self._parameters_to_string(self.__jdefsth.getParameters()))