y = f(x) ## Optimisation import sys sys.path.append("..") from _simulated_annealing import minimize_simulatedAnnealing cons = [{'type': 'strictIneq', 'fun': c1}] maxIter = 300 Xsa = [] for i in range(100): xi = minimize_simulatedAnnealing(f, [2.7], [7.5], maxIter=maxIter, constraints=cons, autoSetUpIter=100) Xsa.append(xi) Xsa = np.array(Xsa) Ysa = f(Xsa) ## Graphe figure1 = plt.figure(1, figsize=(8, 3)) plt.plot(x, y, color='k', label="y = f(x)") plt.plot(Xsa, Ysa, label="Solution opitmisation", marker='o', ls='',
cons = [{'type': 'ineq', 'fun': sim.contrainte1}, {'type': 'ineq', 'fun': sim.contrainte2}, {'type': 'ineq', 'fun': sim.contrainte3}, {'type': 'ineq', 'fun': sim.contrainte4}, {'type': 'ineq', 'fun': sim.contrainte5}, {'type': 'ineq', 'fun': sim.contrainte6}] maxIter = 10000 mindict = minimize_simulatedAnnealing(sim.cost, xmin, xmax, maxIter=maxIter, constraints=cons, preprocess_function=sim.simulateHeatPump, autoSetUpIter=100, config="lowTemp", verbose=False, returnDict=True, storeIterValues=True) Xsa = mindict['x'] fhistory = mindict["fHistory"] sim.printDictSim(Xsa) print(Xsa) print(sim.cost(Xsa)) plt.figure(figsize=(8,4))
sys.path.append("..") from _simulated_annealing import minimize_simulatedAnnealing cons = [{'type': 'eq', 'fun': c0}] autoSetUpIter = 250 maxIter = 10000 listXsa = [] xopt, yopt = None, None for i in range(10): print("#SOLVE : ", i) mindict = minimize_simulatedAnnealing(f0, xmin, xmax, maxIter=maxIter - autoSetUpIter, autoSetUpIter=autoSetUpIter, constraintAbsTol=0.001, penalityFactor=0.1, returnDict=True, config='highTemp', constraints=cons) Xsa = mindict["x"] Ysa = mindict["f"] if yopt is None: xopt = Xsa yopt = Ysa elif yopt > Ysa: xopt = Xsa yopt = Ysa listXsa.append(Xsa)
npop = 36 ngen = (maxIter+1)//npop ga_instance = realSingleObjectiveGA(f0,xmin,xmax,constraints=cons,tol=-1.0,constraintMethod="penality") for k in range(nloop): print("LOOP : ",k) print("") #genetic algorithm ga_instance.minimize(npop,ngen,verbose=False) fitness_ga = ga_instance.getStatOptimisation() ga_convergence.append(fitness_ga) #simulated annealing mindict_sa = minimize_simulatedAnnealing(f0,xmin,xmax,maxIter=maxIter,constraints=cons,returnDict=True,storeIterValues=True) sa_convergence.append(mindict_sa["fHistory"]) #differential evolution mindict_de = differential_evolution(f0,xmin,xmax,maxIter=ngen,popsize=npop,constraints=cons,returnDict=True,storeIterValues=True,tol=-1) de_convergence.append(mindict_de["fHistory"]) # Optimisation locale bounds = [(xi,xj) for xi,xj in zip(xmin,xmax)] startX = np.mean(bounds,axis=1) res = minimize(f0,mindict_sa["x"],bounds=bounds) fscipy = res.fun ga_convergence = np.log10((np.array(ga_convergence).T - fscipy)/abs(fscipy))
## Optimisation import sys sys.path.append("..") from _simulated_annealing import simulatedAnnealing, minimize_simulatedAnnealing from _minimize_NelderMead import * from _minimize_Powell import * cons = [] listXsa = [] maxIter = 500 for i in range(10): mindict = minimize_simulatedAnnealing(f0, xmin, xmax, maxIter=maxIter, autoSetUpIter=100, returnDict=True, storeIterValues=True) Xsa = mindict["x"] fitnessArray = mindict["fHistory"] listXsa.append(Xsa) listXsa = np.array(listXsa) # for si in mindict : # if not( si.endswith("History") ): # print(si," : ",mindict[si]) ## SCIPY # bounds = [(xi,xj) for xi,xj in zip(xmin,xmax)]