optFunc=MinMB, **kn): NP = 120 task = TaskConvPlot(D=D, nFES=nFES, nGEN=nGEN, optType=optType, benchmark=optFunc()) algo = alg(seed=seed[0], task=task, Np=NP) best = algo.run() logger.info('%s %s' % (best[0], best[1])) input('Press [enter] to continue') def getOptType(otype): if otype == OptimizationType.MINIMIZATION: return MinMB elif otype == OptimizationType.MAXIMIZATION: return MaxMB else: return None if __name__ == '__main__': pargs, algo = getDictArgs(sys.argv[1:]), Runner.getAlgorithm('DE') optFunc = getOptType(pargs['optType']) if not pargs['runType']: simple_example(algo, optFunc=optFunc, **pargs) elif pargs['runType'] == 'log': logging_example(algo, optFunc=optFunc, **pargs) elif pargs['runType'] == 'plot': plot_example(algo, optFunc=optFunc, **pargs) # vim: tabstop=3 noexpandtab shiftwidth=3 softtabstop=3
Ackley, Griewank, Sphere, HappyCat ) """Example demonstrating the use of NiaPy Runner.""" runner = Runner( D=40, nFES=100, nRuns=2, useAlgorithms=[ GreyWolfOptimizer(), "FlowerPollinationAlgorithm", ParticleSwarmAlgorithm(), "HybridBatAlgorithm", "SimulatedAnnealing", "CuckooSearch"], useBenchmarks=[ Ackley(), Griewank(), Sphere(), HappyCat(), "rastrigin"] ) print(runner.run(verbose=True))
func = getCecBench(cec, D) task = TaskConvPrint(D=D, nFES=nFES, nGEN=nGEN, optType=optType, benchmark=optFunc(func, sr[0], sr[1], fnum)) algo = alg(seed=seed[0], task=task) best = algo.run() logger.info('%s %s' % (best[0], best[1])) def plot_example(alg, cec, fnum=1, D=10, nFES=50000, nGEN=5000, seed=[None], optType=OptimizationType.MINIMIZATION, optFunc=MinMB, wout=False, sr=[-100, 100], **kwu): func = getCecBench(cec, D) task = TaskConvPlot(D=D, nFES=nFES, nGEN=nGEN, optType=optType, benchmark=optFunc(func, sr[0], sr[1], fnum)) algo = alg(seed=seed[0], task=task) best = algo.run() logger.info('%s %s' % (best[0], best[1])) input('Press [enter] to continue') def getOptType(otype): if otype == OptimizationType.MINIMIZATION: return MinMB elif otype == OptimizationType.MAXIMIZATION: return MaxMB else: return None if __name__ == '__main__': pargs = getDictArgs(sys.argv[1:]) pargs['nFES'] = round(pargs['D'] * getMaxFES(pargs['cec']) * pargs['reduc']) algo = Runner.getAlgorithm(pargs['algo']) optFunc = getOptType(pargs['optType']) if not pargs['runType']: simple_example(algo, optFunc=optFunc, **pargs) elif pargs['runType'] == 'log': logging_example(algo, optFunc=optFunc, **pargs) elif pargs['runType'] == 'plot': plot_example(algo, optFunc=optFunc, **pargs) else: simple_example(algo, optFunc=optFunc, **pargs) # vim: tabstop=3 noexpandtab shiftwidth=3 softtabstop=3
def simple_example(alg, runs=2, D=10, nFES=50000, nGEN=10000, seed=[None], optType=OptimizationType.MINIMIZATION, optFunc=MinMB, benchmark='ackley', **kn): for i in range(runs): algo = Runner.getAlgorithm('GA')(D=10, NP=55, nFES=nFES, nGEN=nGEN, A=0.5, r=0.5, Qmin=0.0, Qmax=2.0, benchmark=benchmark, seed=seed[i % len(seed)]) best = algo.run() logger.info('%s \n %s %s' % (algo.task.unused_evals(), best[0], best[1]))
def evaluate(D, sol): val = 0.0 for i in range(D): val = val + sol[i] * sol[i] return val return evaluate # example using custom benchmark "MyBenchmark" logger.info('Running with custom MyBenchmark...') for i in range(10): Algorithm = Runner.getAlgorithm('BA')(D=10, NP=40, nFES=50000, A=0.5, r=0.5, Qmin=0.0, Qmax=2.0, benchmark=MyBenchmark()) Best = Algorithm.run() logger.info(Best) # example using predifined benchmark function # available benchmarks are: # - griewank # - rastrigin # - rosenbrock # - sphere logger.info('Running with default Griewank benchmark...') griewank = Griewank()
optFunc=MinMB, **kn): task = TaskConvPlot(D=D, nFES=nFES, nGEN=nGEN, optType=optType, benchmark=optFunc()) algo = alg(seed=seed[0], task=task) best = algo.run() logger.info('%s %s' % (best[0], best[1])) input('Press [enter] to continue') def getOptType(otype): if otype == OptimizationType.MINIMIZATION: return MinMB elif otype == OptimizationType.MAXIMIZATION: return MaxMB else: return None if __name__ == '__main__': pargs, algo = getDictArgs( sys.argv[1:]), Runner.getAlgorithm('EnhancedFireworksAlgorithm') optFunc = getOptType(pargs['optType']) if not pargs['runType']: simple_example(algo, optFunc=optFunc, **pargs) elif pargs['runType'] == 'log': logging_example(algo, optFunc=optFunc, **pargs) elif pargs['runType'] == 'plot': plot_example(algo, optFunc=optFunc, **pargs) # vim: tabstop=3 noexpandtab shiftwidth=3 softtabstop=3
optFunc=MinMB, **kn): task = TaskConvPlot(D=D, nFES=nFES, nGEN=nGEN, optType=optType, benchmark=optFunc()) algo = alg(seed=seed, task=task) best = algo.run() logger.info('%s %s' % (best[0], best[1])) input('Press [enter] to continue') def getOptType(otype): if otype == OptimizationType.MINIMIZATION: return MinMB elif otype == OptimizationType.MAXIMIZATION: return MaxMB else: return None if __name__ == '__main__': pargs, algo = getDictArgs(sys.argv[1:]), Runner.getAlgorithm( 'CovarianceMaatrixAdaptionEvolutionStrategy') optFunc = getOptType(pargs['optType']) if not pargs['runType']: simple_example(algo, optFunc=optFunc, **pargs) elif pargs['runType'] == 'log': logging_example(algo, optFunc=optFunc, **pargs) elif pargs['runType'] == 'plot': plot_example(algo, optFunc=optFunc, **pargs) # vim: tabstop=3 noexpandtab shiftwidth=3 softtabstop=3
algo = alg(seed=seed[i % len(seed)], task=task) best = algo.run() logger.info('%s %s' % (best[0], best[1])) def logging_example(alg, D=10, nFES=50000, nGEN=100000, seed=[None], optType=OptimizationType.MINIMIZATION, optFunc=MinMB, **kn): task = TaskConvPrint(D=D, nFES=nFES, nGEN=nGEN, optType=optType, benchmark=optFunc()) algo = alg(seed=seed[0], task=task) best = algo.run() logger.info('%s %s' % (best[0], best[1])) def plot_example(alg, D=10, nFES=50000, nGEN=100000, seed=[None], optType=OptimizationType.MINIMIZATION, optFunc=MinMB, **kn): task = TaskConvPlot(D=D, nFES=nFES, nGEN=nGEN, optType=optType, benchmark=optFunc()) algo = alg(seed=seed[0], task=task) best = algo.run() logger.info('%s %s' % (best[0], best[1])) input('Press [enter] to continue') def getOptType(otype): if otype == OptimizationType.MINIMIZATION: return MinMB elif otype == OptimizationType.MAXIMIZATION: return MaxMB else: return None if __name__ == '__main__': pargs, algo = getDictArgs(sys.argv[1:]), Runner.getAlgorithm('DynamicFireworksAlgorithmGauss') optFunc = getOptType(pargs['optType']) if not pargs['runType']: simple_example(algo, optFunc=optFunc, **pargs) elif pargs['runType'] == 'log': logging_example(algo, optFunc=optFunc, **pargs) elif pargs['runType'] == 'plot': plot_example(algo, optFunc=optFunc, **pargs) # vim: tabstop=3 noexpandtab shiftwidth=3 softtabstop=3
optFunc=MinMB, **kn): task = TaskConvPlot(D=D, nFES=nFES, nGEN=nGEN, optType=optType, benchmark=optFunc()) algo = alg(seed=seed[0], task=task) best = algo.run() logger.info('%s %s' % (best[0], best[1])) input('Press [enter] to continue') def getOptType(otype): if otype == OptimizationType.MINIMIZATION: return MinMB elif otype == OptimizationType.MAXIMIZATION: return MaxMB else: return None if __name__ == '__main__': pargs, algo = getDictArgs(sys.argv[1:]), Runner.getAlgorithm( 'DynNPSelfAdaptiveDifferentialEvolutionAlgorithm') optFunc = getOptType(pargs['optType']) if not pargs['runType']: simple_example(algo, optFunc=optFunc, **pargs) elif pargs['runType'] == 'log': logging_example(algo, optFunc=optFunc, **pargs) elif pargs['runType'] == 'plot': plot_example(algo, optFunc=optFunc, **pargs) # vim: tabstop=3 noexpandtab shiftwidth=3 softtabstop=3