def onestep(nInput, nOutput, xlb, xub, pct, \ Xinit, Yinit, pop = 100, gen = 100, \ crossover_rate = 0.9, mu = 20, mum = 20): """ Multi-Objective Adaptive Surrogate Modelling-based Optimization One-step mode for offline optimization Do NOT call the model evaluation function nInput: number of model input nOutput: number of output objectives xlb: lower bound of input xub: upper bound of input pct: percentage of resampled points in each iteration Xinit and Yinit: initial samplers for surrogate model construction ### options for the embedded NSGA-II of MO-ASMO pop: number of population gen: number of generation crossover_rate: ratio of crossover in each generation mu: distribution index for crossover mum: distribution index for mutation """ N_resample = int(pop * pct) x = Xinit.copy() y = Yinit.copy() sm = gp.GPR_Matern(x, y, nInput, nOutput, x.shape[0], xlb, xub) bestx_sm, besty_sm, x_sm, y_sm = \ NSGA2.optimization(sm, nInput, nOutput, xlb, xub, \ pop, gen, crossover_rate, mu, mum) D = NSGA2.crowding_distance(besty_sm) idxr = D.argsort()[::-1][:N_resample] x_resample = bestx_sm[idxr, :] return x_resample
def optimization(model, nInput, nOutput, xlb, xub, niter, pct, \ Xinit = None, Yinit = None, pop = 100, gen = 100, \ crossover_rate = 0.9, mu = 20, mum = 20): """ Multi-Objective Adaptive Surrogate Modelling-based Optimization model: the evaluated model function nInput: number of model input nOutput: number of output objectives xlb: lower bound of input xub: upper bound of input niter: number of iteration pct: percentage of resampled points in each iteration Xinit and Yinit: initial samplers for surrogate model construction ### options for the embedded NSGA-II of MO-ASMO pop: number of population gen: number of generation crossover_rate: ratio of crossover in each generation mu: distribution index for crossover mum: distribution index for mutation """ N_resample = int(pop * pct) if (Xinit is None and Yinit is None): Ninit = nInput * 10 Xinit = sampling.glp(Ninit, nInput) for i in range(Ninit): Xinit[i, :] = Xinit[i, :] * (xub - xlb) + xlb Yinit = np.zeros((Ninit, nOutput)) for i in range(Ninit): Yinit[i, :] = model.evaluate(Xinit[i, :]) else: Ninit = Xinit.shape[0] icall = Ninit x = Xinit.copy() y = Yinit.copy() for i in range(niter): print('Surrogate Opt loop: %d' % i) sm = gp.GPR_Matern(x, y, nInput, nOutput, x.shape[0], xlb, xub) bestx_sm, besty_sm, x_sm, y_sm = \ NSGA2.optimization(sm, nInput, nOutput, xlb, xub, \ pop, gen, crossover_rate, mu, mum) D = NSGA2.crowding_distance(besty_sm) idxr = D.argsort()[::-1][:N_resample] x_resample = bestx_sm[idxr, :] y_resample = np.zeros((N_resample, nOutput)) for j in range(N_resample): y_resample[j, :] = model.evaluate(x_resample[j, :]) icall += N_resample x = np.vstack((x, x_resample)) y = np.vstack((y, y_resample)) xtmp = x.copy() ytmp = y.copy() xtmp, ytmp, rank, crowd = NSGA2.sortMO(xtmp, ytmp, nInput, nOutput) idxp = (rank == 0) bestx = xtmp[idxp, :] besty = ytmp[idxp, :] return bestx, besty, x, y
def main(): nIter = 50 nChr = 3 nPop = 100 pc = 0.6 pm = 0.1 etaC = 1 etaM = 1 func = function lb = -2 rb = 2 paretoPops, paretoFits = NSGA2(nIter, nChr, nPop, pc, pm, etaC, etaM, func, lb, rb) print(paretoFits) print(f"paretoFront: {paretoFits.shape}") # 理论最优解集合 x = np.linspace(-1 / np.sqrt(3), 1 / np.sqrt(3), 116).reshape(116, 1) X = np.concatenate((x, x, x), axis=1) thFits = fitness(X, function) plt.rcParams['font.sans-serif'] = 'KaiTi' # 设置显示中文 fig = plt.figure(dpi=400) ax = fig.add_subplot(111) ax.plot(thFits[:, 0], thFits[:, 1], color='green', label='理论帕累托前沿') ax.scatter(paretoFits[:, 0], paretoFits[:, 1], color='red', label='实际解集') ax.legend() fig.savefig('test.png', dpi=400) print(paretoPops)
def run_MOEA(MOEA, SIR_name, version): result = [] num_testcases = GLOB.NUM_TESTCASES[SIR_name] for i in range(GLOB.TRIALS_PER_VERSION): print(SIR_name, 'version', version, '_', MOEA, ': trial', i) if MOEA == 'NSGA2': res = NSGA2.run_NSGA2(SIR_name, version, num_testcases) result.append(res) elif MOEA == 'SPEA2': res = SPEA2.run_SPEA2(SIR_name, version, num_testcases) result.append(res) elif MOEA == 'TAEA': res = TAEA.run_TAEA(SIR_name, version, num_testcases) result.append(res) write_file(MOEA, SIR_name, version, result)
import NSGA2 import pandas as pd test = pd.read_excel("TestData.xlsx") test_pkl = pd.read_pickle("bugzilla_eclipse_log(comments)_2016meancost.pkl") print(test_pkl['Profit'].shape) print(test_pkl['Cost'].shape) test_SA = SIM_ANEAL.NRP_SA(profit=test_pkl['Profit'], cost=test_pkl['Cost'], bound=50699.701903571, iteration=100000, init_decision=np.random.randint( 0, 2, test_pkl['Profit'].shape)) test_GA = GA.NRP_GA(profit=test_pkl['Profit'], cost=test_pkl['Cost'], bound=50699.701903571, iteration=5000) test_NSGA2 = NSGA2.NRP_NSGA2(profit=test_pkl['Profit'], cost=test_pkl['Cost'], bound=50699.701903571, iteration=5000) # result_SA = test_SA.run_SA() result_GA = test_GA.run_GA() # result_NSGA2 = test_NSGA2.run_NSGA2() print(result_SA) # print(result_NSGA2)
networkTopologyStr = networkTopology cnf = config.CONFIG() cnf.topologyGraphml = networkTopology cnf.numberOfReplicatedApps = numApps system = systemmodel.SYSTEMMODEL(cnf) #no permite bucles #los servicios y fog devices deben de estar ordenador desde 0 y con ids consecutivos, sin dejar ninguno en blanco system.FGCSmodel3(cnf.modelSeed) if gatype == 'weightedga': #g = ga.GA(system,300,28) g = wga.weightedGA(system, cnf.populationSeed, cnf.evolutionSeed, cnf) if gatype == 'nsga2': g = nsga2.NSGA2(system, cnf.populationSeed, cnf.evolutionSeed, cnf) if gatype == 'moead': g = moead.MOEAD(system, cnf.populationSeed, cnf.evolutionSeed, cnf) generationSolution = list() generationPareto = list() minV = float('inf') minIdx = -1 for idx, v in enumerate(g.corega.populationPt.fitness): if v['total'] < minV: minIdx = idx minV = v['total'] if len(g.corega.populationPt.population) > 0: print g.corega.populationPt.fitness[minIdx]
file=routes) vehNr += 1 print("</routes>", file=routes) # this is the main entry point of this script if __name__ == "__main__": import time array = np.array([49, 5, 9, 17, 4, 34, 1, 56, 3, 87]) t = time.time() function_Cross_3_3(array) print("time for one function evaluation: ") print(time.time() - t) print(50 * "=") # print("optimisation") # opt1, fD1 = HC.hillClimbing(array, function_Cross_3_3, stepSize=1, functionEvaluation=1) # print("passed HC") # opt2, fD2 = CGD.ConjugateGradientDescent(array, function_Cross_3_3, epsilon=10, alpha=0.1, eta=10, h=np.finfo(np.float64).eps) # print("passed CGD") # pop = np.random.rand(5,4)*np.random.randint(-100, 100) # opt3, fD3 = DE.DifferentialEvolution(pop, function_Cross_3_3, maxFunctionEval=6, F=0.5, CR=0.1) # print("passed DE") pop = NSGA2.populationInitialisationNSGA2( function_Cross_3_3, 5, [5, 5, 5, 5, 5, 5, 5, 5, 5, 5], [70, 70, 70, 70, 70, 70, 70, 70, 70, 70]) print("finished init") opt4, fD4 = NSGA2.NSGA2(pop, function_Cross_3_3, maxGeneration=2) # print(time.time() - t) # print("passed NSGA2") # print(50*"=") # print("passed test")
sys.path.append('../../Modules') import NSGA2 import Cross_3_3_night as c33 if __name__ == "__main__": # calculating # print("start init random") # pop = NSGA2.populationInitialisationNSGA2(c33.function_Cross_3_3, 30, [5, 5, 5, 5, 5, 5, 5, 5, 5, 5], [70, 70, 70, 70, 70, 70, 70, 70, 70, 70]) print("read old pop") with open('NSGA2_SaveList.pkl', 'rb') as pickle_file: backup = pickle.load(pickle_file) pop = backup[-1][0] print("finished init") opt4, fD4 = NSGA2.NSGA2(pop, c33.function_Cross_3_3, maxGeneration=6) # plotting 3D Scatter # from mpl_toolkits.mplot3d import Axes3D # noqa: F401 unused import # import matplotlib.pyplot as plt # with open('NSGA2_SaveList.pkl', 'rb') as pickle_file: # backup = pickle.load(pickle_file) # fig = plt.figure() # ax = fig.add_subplot(111, projection='3d') # plotPoints = backup[-1][1][-30:] # for x, y, z in plotPoints: # print(str(x) + " " + str(y) + " " + str(z)) # ax.scatter(x, y, z) # ax.set_xlabel('X Label') # ax.set_ylabel('Y Label')
# load parameter name and range pf = util.read_param_file('%s.txt' % modelname) bd = np.array(pf['bounds']) nInput = pf['num_vars'] nOutput = 2 xlb = bd[:, 0] xub = bd[:, 1] # parameters for NSGA2 pop = 100 gen = 100 # run NSGA2 bestx, besty, x, y = \ NSGA2.optimization(model, nInput, nOutput, xlb, xub, pop, gen) # plot results plt.plot(y[:, 0], y[:, 1], 'b.', label='evaluated points') plt.plot(besty[:, 0], besty[:, 1], 'r.', label='NSGA2 optimal') model_true = __import__(modelname + '_true') y_true = model_true.pareto() plt.plot(y_true[:, 0], y_true[:, 1], 'k-', label='True Pareto') plt.xlabel('y1') plt.ylabel('y2') plt.legend() # save figure plt.savefig('%s/ZDT1_NSGA2.png' % respath)