def run(self): print('Start local optimization...') maxf = self.request_data['optimization']['parameters']['maxf'] xtol = self.request_data['optimization']['parameters']['xtol'] ftol = self.request_data['optimization']['parameters']['ftol'] solver = NelderMeadSimplexSolver(len(self.initial_values)) solver.SetInitialPoints(self.initial_values) solver.SetStrictRanges([i[0] for i in self.bounds], [i[1] for i in self.bounds]) solver.SetEvaluationLimits(evaluations=maxf) solver.SetTermination(CRT(xtol=ftol, ftol=ftol)) # Inverting weights (*-1) to convert problem to minimizing solver.Solve( self.evaluate_single_solution, ExtraArgs=([weight * -1 for weight in self.weights]), callback=self.callback ) solver.enable_signal_handler() #Finally self.callback( individual=solver.Solution(), final=True ) return
def test06(terminate, func=lambda x: x[0], info=False, debug=False): from mystic.solvers import NelderMeadSimplexSolver as NM solver = NM(1) solver.SetRandomInitialPoints() solver.SetEvaluationLimits(8) solver.Solve(func, VTR()) if debug: verbosity(solver) return terminate(solver, info)
def optimize_linear(self, initial_values: List[float], function) -> List[float]: """ Function to optimize one solution linear by using the mystic library Args: initial_values: the initial solution that the solver starts with function: the callback function that sends out the task to the database, awaits the result and takes it back in Returns: solution: a linear optimized solution """ solver = NelderMeadSimplexSolver(dim=len(initial_values)) solver.SetInitialPoints(x0=initial_values) solver.SetStrictRanges(self.low, self.up) solver.SetEvaluationLimits(generations=self.maxf) solver.SetTermination(CRT(self.xtol, self.ftol)) solver.Solve(function) return list(solver.Solution())
def run(self): self.logger.info('Start local optimization...') maxf = self.request_data['optimization']['parameters']['maxf'] xtol = self.request_data['optimization']['parameters']['xtol'] ftol = self.request_data['optimization']['parameters']['ftol'] solver = NelderMeadSimplexSolver(len(self.initial_values)) solver.SetInitialPoints(self.initial_values) solver.SetStrictRanges([i[0] for i in self.bounds], [i[1] for i in self.bounds]) solver.SetEvaluationLimits(evaluations=maxf) solver.SetTermination(CRT(xtol=ftol, ftol=ftol)) solver.Solve( self.evaluate_single_solution, callback=self.callback ) solver.enable_signal_handler() self.callback( individual=solver.Solution(), final=True ) return
def runme(): # instantiate the solver _solver = NelderMeadSimplexSolver(3) lb, ub = [0., 0., 0.], [10., 10., 10.] _solver.SetRandomInitialPoints(lb, ub) _solver.SetEvaluationLimits(1000) # add a monitor stream stepmon = VerboseMonitor(1) _solver.SetGenerationMonitor(stepmon) # configure the bounds _solver.SetStrictRanges(lb, ub) # configure stop conditions term = Or(VTR(), ChangeOverGeneration()) _solver.SetTermination(term) # add a periodic dump to an archive tmpfile = 'mysolver.pkl' _solver.SetSaveFrequency(10, tmpfile) # run the optimizer _solver.Solve(rosen) # get results x = _solver.bestSolution y = _solver.bestEnergy # load the saved solver solver = LoadSolver(tmpfile) #os.remove(tmpfile) # obligatory check that state is the same assert all(x == solver.bestSolution) assert y == solver.bestEnergy # modify the termination condition term = VTR(0.0001) solver.SetTermination(term) # run the optimizer solver.Solve(rosen) os.remove(tmpfile) # check the solver serializes _s = dill.dumps(solver) return dill.loads(_s)
ub = [1000, 1000, 1000] ndim = len(lb) maxiter = 10 maxfun = 1e+6 def cost(x): ax, bx, c = x return (ax)**2 - bx + c monitor = Monitor() solver = NelderMeadSimplexSolver(ndim) solver.SetRandomInitialPoints(min=lb, max=ub) solver.SetStrictRanges(min=lb, max=ub) solver.SetEvaluationLimits(maxiter, maxfun) solver.SetGenerationMonitor(monitor) solver.Solve(cost) solved = solver.bestSolution monitor.info("solved: %s" % solved) lmon = len(monitor) assert solver.bestEnergy == monitor.y[-1] for xs, x in zip(solved, monitor.x[-1]): assert xs == x solver.SetEvaluationLimits(maxiter * 2, maxfun) solver.SetGenerationMonitor(monitor) solver.Solve(cost)
# draw frame and exact coefficients plot_exact() # select parameter bounds constraints from numpy import inf min_bounds = [0, -1, -300, -1, 0, -1, -100, -inf, -inf] max_bounds = [200, 1, 0, 1, 200, 1, 0, inf, inf] # configure monitors stepmon = VerboseMonitor(100) evalmon = Monitor() # use Nelder-Mead to solve 8th-order Chebyshev coefficients solver = NelderMeadSimplexSolver(ndim) solver.SetInitialPoints(x0) solver.SetEvaluationLimits(generations=999) solver.SetEvaluationMonitor(evalmon) solver.SetGenerationMonitor(stepmon) solver.SetStrictRanges(min_bounds, max_bounds) solver.enable_signal_handler() solver.Solve(chebyshev8cost, termination=CRT(1e-4,1e-4), \ sigint_callback=plot_solution) solution = solver.bestSolution # get solved coefficients and Chi-Squared (from solver members) iterations = solver.generations cost = solver.bestEnergy print "Generation %d has best Chi-Squared: %f" % (iterations, cost) print "Solved Coefficients:\n %s\n" % poly1d(solver.bestSolution) # compare solution with actual 8th-order Chebyshev coefficients