Esempio n. 1
0
 def test_NelderMeadSimplexSolver_CRT(self): # Default for this solver
     from mystic.solvers import NelderMeadSimplexSolver
     from mystic.termination import CandidateRelativeTolerance as CRT
     self.solver = NelderMeadSimplexSolver(self.ND)
     self.term = CRT()
     self._run_solver()
def test_rosenbrock():
    """Test the 2-dimensional Rosenbrock function.

Testing 2-D Rosenbrock:
Expected: x=[1., 1.] and f=0

Using DifferentialEvolutionSolver:
Solution:  [ 1.00000037  1.0000007 ]
f value:  2.29478683682e-13
Iterations:  99
Function evaluations:  3996
Time elapsed:  0.582273006439  seconds

Using DifferentialEvolutionSolver2:
Solution:  [ 0.99999999  0.99999999]
f value:  3.84824937598e-15
Iterations:  100
Function evaluations:  4040
Time elapsed:  0.577210903168  seconds

Using NelderMeadSimplexSolver:
Solution:  [ 0.99999921  1.00000171]
f value:  1.08732211477e-09
Iterations:  70
Function evaluations:  130
Time elapsed:  0.0190329551697  seconds

Using PowellDirectionalSolver:
Solution:  [ 1.  1.]
f value:  0.0
Iterations:  28
Function evaluations:  859
Time elapsed:  0.113857030869  seconds
"""

    print "Testing 2-D Rosenbrock:"
    print "Expected: x=[1., 1.] and f=0"
    from mystic.models import rosen as costfunc
    ndim = 2
    lb = [-5.]*ndim
    ub = [5.]*ndim
    x0 = [2., 3.]
    maxiter = 10000
    
    # DifferentialEvolutionSolver
    print "\nUsing DifferentialEvolutionSolver:"
    npop = 40
    from mystic.solvers import DifferentialEvolutionSolver
    from mystic.termination import ChangeOverGeneration as COG
    from mystic.strategy import Rand1Bin
    esow = Monitor()
    ssow = Monitor() 
    solver = DifferentialEvolutionSolver(ndim, npop)
    solver.SetInitialPoints(x0)
    solver.SetStrictRanges(lb, ub)
    solver.SetEvaluationLimits(generations=maxiter)
    solver.SetEvaluationMonitor(esow)
    solver.SetGenerationMonitor(ssow)
    term = COG(1e-10)
    time1 = time.time() # Is this an ok way of timing?
    solver.Solve(costfunc, term, strategy=Rand1Bin)
    sol = solver.Solution()
    time_elapsed = time.time() - time1
    fx = solver.bestEnergy
    print "Solution: ", sol
    print "f value: ", fx
    print "Iterations: ", solver.generations
    print "Function evaluations: ", len(esow.x)
    print "Time elapsed: ", time_elapsed, " seconds"
    assert almostEqual(fx, 2.29478683682e-13, tol=3e-3)

    # DifferentialEvolutionSolver2
    print "\nUsing DifferentialEvolutionSolver2:"
    npop = 40
    from mystic.solvers import DifferentialEvolutionSolver2
    from mystic.termination import ChangeOverGeneration as COG
    from mystic.strategy import Rand1Bin
    esow = Monitor()
    ssow = Monitor() 
    solver = DifferentialEvolutionSolver2(ndim, npop)
    solver.SetInitialPoints(x0)
    solver.SetStrictRanges(lb, ub)
    solver.SetEvaluationLimits(generations=maxiter)
    solver.SetEvaluationMonitor(esow)
    solver.SetGenerationMonitor(ssow)
    term = COG(1e-10)
    time1 = time.time() # Is this an ok way of timing?
    solver.Solve(costfunc, term, strategy=Rand1Bin)
    sol = solver.Solution()
    time_elapsed = time.time() - time1
    fx = solver.bestEnergy
    print "Solution: ", sol
    print "f value: ", fx
    print "Iterations: ", solver.generations
    print "Function evaluations: ", len(esow.x)
    print "Time elapsed: ", time_elapsed, " seconds"
    assert almostEqual(fx, 3.84824937598e-15, tol=3e-3)

    # NelderMeadSimplexSolver
    print "\nUsing NelderMeadSimplexSolver:"
    from mystic.solvers import NelderMeadSimplexSolver
    from mystic.termination import CandidateRelativeTolerance as CRT
    esow = Monitor()
    ssow = Monitor() 
    solver = NelderMeadSimplexSolver(ndim)
    solver.SetInitialPoints(x0)
    solver.SetStrictRanges(lb, ub)
    solver.SetEvaluationLimits(generations=maxiter)
    solver.SetEvaluationMonitor(esow)
    solver.SetGenerationMonitor(ssow)
    term = CRT()
    time1 = time.time() # Is this an ok way of timing?
    solver.Solve(costfunc, term)
    sol = solver.Solution()
    time_elapsed = time.time() - time1
    fx = solver.bestEnergy
    print "Solution: ", sol
    print "f value: ", fx
    print "Iterations: ", solver.generations
    print "Function evaluations: ", len(esow.x)
    print "Time elapsed: ", time_elapsed, " seconds"
    assert almostEqual(fx, 1.08732211477e-09, tol=3e-3)

    # PowellDirectionalSolver
    print "\nUsing PowellDirectionalSolver:"
    from mystic.solvers import PowellDirectionalSolver
    from mystic.termination import NormalizedChangeOverGeneration as NCOG
    esow = Monitor()
    ssow = Monitor() 
    solver = PowellDirectionalSolver(ndim)
    solver.SetInitialPoints(x0)
    solver.SetStrictRanges(lb, ub)
    solver.SetEvaluationLimits(generations=maxiter)
    solver.SetEvaluationMonitor(esow)
    solver.SetGenerationMonitor(ssow)
    term = NCOG(1e-10)
    time1 = time.time() # Is this an ok way of timing?
    solver.Solve(costfunc, term)
    sol = solver.Solution()
    time_elapsed = time.time() - time1
    fx = solver.bestEnergy
    print "Solution: ", sol
    print "f value: ", fx
    print "Iterations: ", solver.generations
    print "Function evaluations: ", len(esow.x)
    print "Time elapsed: ", time_elapsed, " seconds"
    assert almostEqual(fx, 0.0, tol=3e-3)
Esempio n. 3
0
 def test_DifferentialEvolutionSolver2_CRT(self):
     from mystic.solvers import DifferentialEvolutionSolver2
     from mystic.termination import CandidateRelativeTolerance as CRT
     self.solver = DifferentialEvolutionSolver2(self.ND, self.NP)
     self.term = CRT()
     self._run_solver()
Esempio n. 4
0
    min_bounds = [0, -1, -300, -1, 0, -1, -100, -inf, -inf]
    max_bounds = [200, 1, 0, 1, 200, 1, 0, inf, inf]

    # configure monitors
    stepmon = VerboseMonitor(100)
    evalmon = Monitor()

    # use Nelder-Mead to solve 8th-order Chebyshev coefficients
    solver = NelderMeadSimplexSolver(ndim)
    solver.SetInitialPoints(x0)
    solver.SetEvaluationLimits(generations=999)
    solver.SetEvaluationMonitor(evalmon)
    solver.SetGenerationMonitor(stepmon)
    solver.SetStrictRanges(min_bounds, max_bounds)
    solver.enable_signal_handler()
    solver.Solve(chebyshev8cost, termination=CRT(1e-4,1e-4), \
                 sigint_callback=plot_solution)
    solution = solver.bestSolution

    # get solved coefficients and Chi-Squared (from solver members)
    iterations = solver.generations
    cost = solver.bestEnergy
    print("Generation %d has best Chi-Squared: %f" % (iterations, cost))
    print("Solved Coefficients:\n %s\n" % poly1d(solver.bestSolution))

    # compare solution with actual 8th-order Chebyshev coefficients
    print("Actual Coefficients:\n %s\n" % poly1d(chebyshev8coeffs))

    # plot solution versus exact coefficients
    plot_solution(solution)
    getch()
Esempio n. 5
0
    print "desol: ", desol
    print "dstepmon 50: ", dstepmon.x[50]
    print "dstepmon 100: ", dstepmon.x[100]
    #
    # this will try to use nelder_mean from a relatively "near by" point (very sensitive)
    point = [1234., -500., 10., 0.001]  # both cg and nm does fine
    point = [1000, -100, 0, 1]  # cg will do badly on this one
    # this will try nelder-mead from an unconverged DE solution
    #point = dstepmon.x[-150]
    #
    simplex, esow = Monitor(), Monitor()
    solver = fmin(len(point))
    solver.SetInitialPoints(point)
    solver.SetEvaluationMonitor(esow)
    solver.SetGenerationMonitor(simplex)
    solver.Solve(cost_function, CRT())
    sol = solver.Solution()

    print "\nsimplex solution: ", sol
    #
    solcg = fmin_cg(cost_function, point)
    print "\nConjugate-Gradient (Polak Rubiere) : ", solcg
    #
    if leastsq:
        sollsq = leastsq(vec_cost_function, point)
        sollsq = sollsq[0]
        print "\nLeast Squares (Levenberg Marquardt) : ", sollsq
    #
    legend = [
        'Noisy data', 'Differential Evolution', 'Nelder Mead', 'Polak Ribiere'
    ]
Esempio n. 6
0
                 CrossProbability=0.3, ScalingFactor=1.0)

    solution = solver.Solution()

    print solution


if __name__ == '__main__':
    from timeit import Timer
    t = Timer("main()", "from __main__ import main")
    timetaken = t.timeit(number=1)
    print "CPU Time: %s" % timetaken

    from mystic.monitors import Monitor
    from mystic.solvers import NelderMeadSimplexSolver as fmin
    from mystic.termination import CandidateRelativeTolerance as CRT

    simplex = Monitor()
    esow = Monitor()
    xinit = [random.uniform(0, 5) for j in range(ND)]

    solver = fmin(len(xinit))
    solver.SetInitialPoints(xinit)
    solver.SetEvaluationMonitor(esow)
    solver.SetGenerationMonitor(simplex)
    solver.Solve(CostFunction, CRT())
    sol = solver.Solution()
    print "fmin solution: ", sol

# end of file
Esempio n. 7
0
def fmin(cost,
         x0,
         args=(),
         bounds=None,
         xtol=1e-4,
         ftol=1e-4,
         maxiter=None,
         maxfun=None,
         full_output=0,
         disp=1,
         retall=0,
         callback=None,
         **kwds):
    """Minimize a function using the downhill simplex algorithm.
    
Description:

    Uses a Nelder-Mead simplex algorithm to find the minimum of
    a function of one or more variables. Mimics the scipy.optimize.fmin
    interface.

Inputs:

    cost -- the Python function or method to be minimized.
    x0 -- ndarray - the initial guess.

Additional Inputs:

    args -- extra arguments for cost.
    bounds -- list - n pairs of bounds (min,max), one pair for each parameter.
    xtol -- number - acceptable relative error in xopt for convergence.
    ftol -- number - acceptable relative error in cost(xopt) for
        convergence.
    maxiter -- number - the maximum number of iterations to perform.
    maxfun -- number - the maximum number of function evaluations.
    full_output -- number - non-zero if fval and warnflag outputs are
        desired.
    disp -- number - non-zero to print convergence messages.
    retall -- number - non-zero to return list of solutions at each
        iteration.
    callback -- an optional user-supplied function to call after each
        iteration.  It is called as callback(xk), where xk is the
        current parameter vector.
    handler -- boolean - enable/disable handling of interrupt signal
    itermon -- monitor - override the default GenerationMonitor
    evalmon -- monitor - override the default EvaluationMonitor
    constraints -- an optional user-supplied function.  It is called as
        constraints(xk), where xk is the current parameter vector.
        This function must return xk', a parameter vector that satisfies
        the encoded constraints.
    penalty -- an optional user-supplied function.  It is called as
        penalty(xk), where xk is the current parameter vector.
        This function should return y', with y' == 0 when the encoded
        constraints are satisfied, and y' > 0 otherwise.

Returns: (xopt, {fopt, iter, funcalls, warnflag}, {allvecs})

    xopt -- ndarray - minimizer of function
    fopt -- number - value of function at minimum: fopt = cost(xopt)
    iter -- number - number of iterations
    funcalls -- number - number of function calls
    warnflag -- number - Integer warning flag:
        1 : 'Maximum number of function evaluations.'
        2 : 'Maximum number of iterations.'
    allvecs -- list - a list of solutions at each iteration

    """
    handler = False
    if kwds.has_key('handler'):
        handler = kwds['handler']

    from mystic.monitors import Monitor
    stepmon = Monitor()
    evalmon = Monitor()
    if kwds.has_key('itermon'):
        stepmon = kwds['itermon']
    if kwds.has_key('evalmon'):
        evalmon = kwds['evalmon']

    if xtol:  #if tolerance in x is provided, use CandidateRelativeTolerance
        from mystic.termination import CandidateRelativeTolerance as CRT
        termination = CRT(xtol, ftol)
    else:
        from mystic.termination import VTRChangeOverGeneration
        termination = VTRChangeOverGeneration(ftol)
    solver = NelderMeadSimplexSolver(len(x0))
    solver.SetInitialPoints(x0)
    solver.SetEvaluationLimits(maxiter, maxfun)
    solver.SetEvaluationMonitor(evalmon)
    solver.SetGenerationMonitor(stepmon)
    if kwds.has_key('penalty'):
        penalty = kwds['penalty']
        solver.SetPenalty(penalty)
    if kwds.has_key('constraints'):
        constraints = kwds['constraints']
        solver.SetConstraints(constraints)
    if bounds is not None:
        minb, maxb = unpair(bounds)
        solver.SetStrictRanges(minb, maxb)

    if handler: solver.enable_signal_handler()
    solver.Solve(cost,termination=termination,\
                 disp=disp, ExtraArgs=args, callback=callback)
    solution = solver.Solution()

    # code below here pushes output to scipy.optimize.fmin interface
    #x = list(solver.bestSolution)
    x = solver.bestSolution
    fval = solver.bestEnergy
    warnflag = 0
    fcalls = solver.evaluations
    iterations = solver.generations
    allvecs = stepmon.x

    if fcalls >= solver._maxfun:
        warnflag = 1
    elif iterations >= solver._maxiter:
        warnflag = 2

    if full_output:
        retlist = x, fval, iterations, fcalls, warnflag
        if retall:
            retlist += (allvecs, )
    else:
        retlist = x
        if retall:
            retlist = (x, allvecs)

    return retlist
Esempio n. 8
0
 def test_PowellDirectionalSolver_CRT(self):
     from mystic.solvers import PowellDirectionalSolver
     from mystic.termination import CandidateRelativeTolerance as CRT
     self.solver = PowellDirectionalSolver(self.ND)
     self.term = CRT()
     self._run_solver(early_terminate=True)
Esempio n. 9
0
    print("Nelder-Mead Simplex")
    print("===================")
    start = time.time()
    from mystic.monitors import Monitor, VerboseMonitor
   #stepmon = VerboseMonitor(1)
    stepmon = Monitor() #VerboseMonitor(10)
    from mystic.termination import CandidateRelativeTolerance as CRT

   #from mystic._scipyoptimize import fmin
    from mystic.solvers import fmin, NelderMeadSimplexSolver
   #print(fmin(rosen,x0,retall=0,full_output=0,maxiter=121))
    solver = NelderMeadSimplexSolver(len(x0))
    solver.SetInitialPoints(x0)
    solver.SetStrictRanges(min,max)
    solver.SetEvaluationLimits(generations=146)
    solver.SetGenerationMonitor(stepmon)
    solver.enable_signal_handler()
    solver.Solve(rosen, CRT(xtol=4e-5), disp=1)
    print(solver.bestSolution)
   #print("Current function value: %s" % solver.bestEnergy)
   #print("Iterations: %s" % solver.generations)
   #print("Function evaluations: %s" % solver.evaluations)

    times.append(time.time() - start)
    algor.append('Nelder-Mead Simplex\t')

    for k,t in zip(algor,times):
        print("%s\t -- took %s" % (k, t))

# end of file
Esempio n. 10
0
def fmin(cost,
         x0,
         args=(),
         bounds=None,
         xtol=1e-4,
         ftol=1e-4,
         maxiter=None,
         maxfun=None,
         full_output=0,
         disp=1,
         retall=0,
         callback=None,
         **kwds):
    """Minimize a function using the downhill simplex algorithm.
    
Uses a Nelder-Mead simplex algorithm to find the minimum of a function of one
or more variables. This algorithm only uses function values, not derivatives or second derivatives. Mimics the ``scipy.optimize.fmin`` interface.

This algorithm has a long history of successful use in applications. It will
usually be slower than an algorithm that uses first or second derivative
information. In practice it can have poor performance in high-dimensional
problems and is not robust to minimizing complicated functions. Additionally,
there currently is no complete theory describing when the algorithm will
successfully converge to the minimum, or how fast it will if it does. Both the
ftol and xtol criteria must be met for convergence.

Args:
    cost (func): the function or method to be minimized: ``y = cost(x)``.
    x0 (ndarray): the initial guess parameter vector ``x``.
    args (tuple, default=()): extra arguments for cost.
    bounds (list(tuple), default=None): list of pairs of bounds (min,max),
        one for each parameter.
    xtol (float, default=1e-4): acceptable absolute error in ``xopt`` for
        convergence.
    ftol (float, default=1e-4): acceptable absolute error in ``cost(xopt)``
        for convergence.
    maxiter (int, default=None): the maximum number of iterations to perform.
    maxfun (int, default=None): the maximum number of function evaluations.
    full_output (bool, default=False): True if fval and warnflag are desired.
    disp (bool, default=True): if True, print convergence messages.
    retall (bool, default=False): if True, return list of solutions at each
        iteration.
    callback (func, default=None): function to call after each iteration. The
        interface is ``callback(xk)``, with xk the current parameter vector.
    handler (bool, default=False): if True, enable handling interrupt signals.
    itermon (monitor, default=None): override the default GenerationMonitor.
    evalmon (monitor, default=None): override the default EvaluationMonitor.
    constraints (func, default=None): a function ``xk' = constraints(xk)``,
        where xk is the current parameter vector, and xk' is a parameter
        vector that satisfies the encoded constraints.
    penalty (func, default=None): a function ``y = penalty(xk)``, where xk is
        the current parameter vector, and ``y' == 0`` when the encoded
        constraints are satisfied (and ``y' > 0`` otherwise).

Returns:
    ``(xopt, {fopt, iter, funcalls, warnflag}, {allvecs})``

Notes:
    - xopt (*ndarray*): the minimizer of the cost function
    - fopt (*float*): value of cost function at minimum: ``fopt = cost(xopt)``
    - iter (*int*): number of iterations
    - funcalls (*int*): number of function calls
    - warnflag (*int*): warning flag:
        - ``1 : Maximum number of function evaluations``
        - ``2 : Maximum number of iterations``
    - allvecs (*list*): a list of solutions at each iteration
    """
    handler = kwds['handler'] if 'handler' in kwds else False

    from mystic.monitors import Monitor
    stepmon = kwds['itermon'] if 'itermon' in kwds else Monitor()
    evalmon = kwds['evalmon'] if 'evalmon' in kwds else Monitor()

    if xtol:  #if tolerance in x is provided, use CandidateRelativeTolerance
        from mystic.termination import CandidateRelativeTolerance as CRT
        termination = CRT(xtol, ftol)
    else:
        from mystic.termination import VTRChangeOverGeneration
        termination = VTRChangeOverGeneration(ftol)
    solver = NelderMeadSimplexSolver(len(x0))
    solver.SetInitialPoints(x0)
    solver.SetEvaluationLimits(maxiter, maxfun)
    solver.SetEvaluationMonitor(evalmon)
    solver.SetGenerationMonitor(stepmon)
    if 'penalty' in kwds:
        solver.SetPenalty(kwds['penalty'])
    if 'constraints' in kwds:
        solver.SetConstraints(kwds['constraints'])
    if bounds is not None:
        minb, maxb = unpair(bounds)
        solver.SetStrictRanges(minb, maxb)

    if handler: solver.enable_signal_handler()
    solver.Solve(cost, termination=termination, \
                 disp=disp, ExtraArgs=args, callback=callback)
    solution = solver.Solution()

    # code below here pushes output to scipy.optimize.fmin interface
    #x = list(solver.bestSolution)
    x = solver.bestSolution
    fval = solver.bestEnergy
    warnflag = 0
    fcalls = solver.evaluations
    iterations = solver.generations
    allvecs = stepmon.x

    if fcalls >= solver._maxfun:
        warnflag = 1
    elif iterations >= solver._maxiter:
        warnflag = 2

    if full_output:
        retlist = x, fval, iterations, fcalls, warnflag
        if retall:
            retlist += (allvecs, )
    else:
        retlist = x
        if retall:
            retlist = (x, allvecs)

    return retlist