Exemple #1
0
def test_rosenbrock():
    """Test the 2-dimensional Rosenbrock function.

Testing 2-D Rosenbrock:
Expected: x=[1., 1.] and f=0

Using DifferentialEvolutionSolver:
Solution:  [ 1.00000037  1.0000007 ]
f value:  2.29478683682e-13
Iterations:  99
Function evaluations:  3996
Time elapsed:  0.582273006439  seconds

Using DifferentialEvolutionSolver2:
Solution:  [ 0.99999999  0.99999999]
f value:  3.84824937598e-15
Iterations:  100
Function evaluations:  4040
Time elapsed:  0.577210903168  seconds

Using NelderMeadSimplexSolver:
Solution:  [ 0.99999921  1.00000171]
f value:  1.08732211477e-09
Iterations:  70
Function evaluations:  130
Time elapsed:  0.0190329551697  seconds

Using PowellDirectionalSolver:
Solution:  [ 1.  1.]
f value:  0.0
Iterations:  28
Function evaluations:  859
Time elapsed:  0.113857030869  seconds
"""

    print "Testing 2-D Rosenbrock:"
    print "Expected: x=[1., 1.] and f=0"
    from mystic.models import rosen as costfunc
    ndim = 2
    lb = [-5.]*ndim
    ub = [5.]*ndim
    x0 = [2., 3.]
    maxiter = 10000
    
    # DifferentialEvolutionSolver
    print "\nUsing DifferentialEvolutionSolver:"
    npop = 40
    from mystic.solvers import DifferentialEvolutionSolver
    from mystic.termination import ChangeOverGeneration as COG
    from mystic.strategy import Rand1Bin
    esow = Monitor()
    ssow = Monitor() 
    solver = DifferentialEvolutionSolver(ndim, npop)
    solver.SetInitialPoints(x0)
    solver.SetStrictRanges(lb, ub)
    solver.SetEvaluationLimits(generations=maxiter)
    solver.SetEvaluationMonitor(esow)
    solver.SetGenerationMonitor(ssow)
    term = COG(1e-10)
    time1 = time.time() # Is this an ok way of timing?
    solver.Solve(costfunc, term, strategy=Rand1Bin)
    sol = solver.Solution()
    time_elapsed = time.time() - time1
    fx = solver.bestEnergy
    print "Solution: ", sol
    print "f value: ", fx
    print "Iterations: ", solver.generations
    print "Function evaluations: ", len(esow.x)
    print "Time elapsed: ", time_elapsed, " seconds"
    assert almostEqual(fx, 2.29478683682e-13, tol=3e-3)

    # DifferentialEvolutionSolver2
    print "\nUsing DifferentialEvolutionSolver2:"
    npop = 40
    from mystic.solvers import DifferentialEvolutionSolver2
    from mystic.termination import ChangeOverGeneration as COG
    from mystic.strategy import Rand1Bin
    esow = Monitor()
    ssow = Monitor() 
    solver = DifferentialEvolutionSolver2(ndim, npop)
    solver.SetInitialPoints(x0)
    solver.SetStrictRanges(lb, ub)
    solver.SetEvaluationLimits(generations=maxiter)
    solver.SetEvaluationMonitor(esow)
    solver.SetGenerationMonitor(ssow)
    term = COG(1e-10)
    time1 = time.time() # Is this an ok way of timing?
    solver.Solve(costfunc, term, strategy=Rand1Bin)
    sol = solver.Solution()
    time_elapsed = time.time() - time1
    fx = solver.bestEnergy
    print "Solution: ", sol
    print "f value: ", fx
    print "Iterations: ", solver.generations
    print "Function evaluations: ", len(esow.x)
    print "Time elapsed: ", time_elapsed, " seconds"
    assert almostEqual(fx, 3.84824937598e-15, tol=3e-3)

    # NelderMeadSimplexSolver
    print "\nUsing NelderMeadSimplexSolver:"
    from mystic.solvers import NelderMeadSimplexSolver
    from mystic.termination import CandidateRelativeTolerance as CRT
    esow = Monitor()
    ssow = Monitor() 
    solver = NelderMeadSimplexSolver(ndim)
    solver.SetInitialPoints(x0)
    solver.SetStrictRanges(lb, ub)
    solver.SetEvaluationLimits(generations=maxiter)
    solver.SetEvaluationMonitor(esow)
    solver.SetGenerationMonitor(ssow)
    term = CRT()
    time1 = time.time() # Is this an ok way of timing?
    solver.Solve(costfunc, term)
    sol = solver.Solution()
    time_elapsed = time.time() - time1
    fx = solver.bestEnergy
    print "Solution: ", sol
    print "f value: ", fx
    print "Iterations: ", solver.generations
    print "Function evaluations: ", len(esow.x)
    print "Time elapsed: ", time_elapsed, " seconds"
    assert almostEqual(fx, 1.08732211477e-09, tol=3e-3)

    # PowellDirectionalSolver
    print "\nUsing PowellDirectionalSolver:"
    from mystic.solvers import PowellDirectionalSolver
    from mystic.termination import NormalizedChangeOverGeneration as NCOG
    esow = Monitor()
    ssow = Monitor() 
    solver = PowellDirectionalSolver(ndim)
    solver.SetInitialPoints(x0)
    solver.SetStrictRanges(lb, ub)
    solver.SetEvaluationLimits(generations=maxiter)
    solver.SetEvaluationMonitor(esow)
    solver.SetGenerationMonitor(ssow)
    term = NCOG(1e-10)
    time1 = time.time() # Is this an ok way of timing?
    solver.Solve(costfunc, term)
    sol = solver.Solution()
    time_elapsed = time.time() - time1
    fx = solver.bestEnergy
    print "Solution: ", sol
    print "f value: ", fx
    print "Iterations: ", solver.generations
    print "Function evaluations: ", len(esow.x)
    print "Time elapsed: ", time_elapsed, " seconds"
    assert almostEqual(fx, 0.0, tol=3e-3)
Exemple #2
0
    #ssow= VerboseMonitor(1)

    # import random
    #  xinit = [random.random() for j in range(ND)]
    xinit = [0.8, 1.2, 0.7]
    # xinit = [0.8,1.2,1.7]             #... better when using "bad" range
    min = [-0.999, -0.999, 0.999]  #XXX: behaves badly when large range
    max = [200.001, 100.001, inf]  #... for >=1 x0 out of bounds; (up xtol)
    # min = [-0.999, -0.999, -0.999]
    # max = [200.001, 100.001, inf]
    #  min = [-0.999, -0.999, 0.999]     #XXX: tight range and non-randomness
    #  max = [2.001, 1.001, 1.001]       #...: is _bad_ for DE solvers

    #print(diffev(rosen,xinit,NP,retall=0,full_output=0))
    solver = DifferentialEvolutionSolver(len(xinit), NP)
    solver.SetInitialPoints(xinit)
    solver.SetStrictRanges(min, max)
    solver.SetEvaluationLimits(generations=MAX_GENERATIONS)
    solver.SetEvaluationMonitor(esow)
    solver.SetGenerationMonitor(ssow)
    solver.Solve(rosen, VTR(0.0001), \
                 CrossProbability=0.5, ScalingFactor=0.6, disp=1)
    sol = solver.bestSolution
    print(sol)
    #print("Current function value: %s" % solver.bestEnergy)
    #print("Iterations: %s" % solver.generations)
    #print("Function evaluations: %s" % solver.evaluations)

    times.append(time.time() - start)
    algor.append('Differential Evolution\t')
Exemple #3
0
def solve(constraints, guess=None, nvars=None, solver=None, \
          lower_bounds=None, upper_bounds=None, termination=None):
    """Use optimization to find a solution to a set of constraints.

Inputs:
    constraints -- a constraints solver function or a penalty function

Additional Inputs:
    guess -- list of parameter values proposed to solve the constraints.
    lower_bounds -- list of lower bounds on solution values.
    upper_bounds -- list of upper bounds on solution values.
    nvars -- number of parameter values.
    solver -- the mystic solver to use in the optimization
    termination -- the mystic termination to use in the optimization

NOTE: The resulting constraints will likely be more expensive to evaluate
    and less accurate than writing the constraints solver from scratch.

NOTE: The ensemble solvers are available, using the default NestedSolver,
    where the keyword 'guess' can be used to set the number of solvers.

NOTE: The default solver is 'diffev', with npop=min(40, ndim*5). The default
    termination is ChangeOverGeneration(), and the default guess is randomly
    selected points between the upper and lower bounds.
    """
    npts = 8
    if type(guess) is int: npts, guess = guess, None

    ndim = 1  #XXX: better, increase in while loop catching IndexError ?
    if nvars is not None: ndim = nvars
    elif guess is not None: ndim = len(guess)
    elif lower_bounds is not None: ndim = len(lower_bounds)
    elif upper_bounds is not None: ndim = len(upper_bounds)

    def cost(x):
        return 1.

    #XXX: don't allow solver string as a short-cut? #FIXME: add ensemble solvers
    ensemble = False
    if solver is None or solver == 'diffev':
        from mystic.solvers import DifferentialEvolutionSolver as TheSolver
        solver = TheSolver(ndim, min(40, ndim * 5))
    elif solver == 'diffev2':
        from mystic.solvers import DifferentialEvolutionSolver2 as TheSolver
        solver = TheSolver(ndim, min(40, ndim * 5))
    elif solver == 'fmin_powell':  #XXX: better as the default? (it's not random)
        from mystic.solvers import PowellDirectionalSolver as TheSolver
        solver = TheSolver(ndim)
    elif solver == 'fmin':
        from mystic.solvers import NelderMeadSimplexSolver as TheSolver
        solver = TheSolver(ndim)
    elif solver == 'buckshot':
        from mystic.solvers import BuckshotSolver as TheSolver
        solver = TheSolver(ndim, max(8, npts))  #XXX: needs better default?
        ensemble = True
    elif solver == 'lattice':
        from mystic.solvers import LatticeSolver as TheSolver
        solver = TheSolver(ndim, max(8, npts))  #XXX: needs better default?
        ensemble = True

    if termination is None:
        from mystic.termination import ChangeOverGeneration as COG
        termination = COG()
    if not ensemble:
        if guess is not None:
            solver.SetInitialPoints(guess)  #XXX: nice if 'diffev' had methods
        else:
            solver.SetRandomInitialPoints(lower_bounds, upper_bounds)
    if lower_bounds or upper_bounds:
        solver.SetStrictRanges(lower_bounds, upper_bounds)
    if hasattr(constraints, 'iter') and hasattr(constraints, 'error'):
        solver.SetPenalty(constraints)  #i.e. is a penalty function
    else:  # is a constraints solver
        solver.SetConstraints(constraints)
    from numpy import seterr
    settings = seterr(all='ignore')
    solver.Solve(cost, termination)
    seterr(**settings)
    soln = solver.bestSolution

    from numpy import ndarray, array
    if isinstance(soln, ndarray) and not isinstance(guess, ndarray):
        soln = soln.tolist()
    elif isinstance(guess, ndarray) and not isinstance(soln, ndarray):
        soln = array(soln)  #XXX: or always return a list ?

    return soln  #XXX: check with 'issolution' ?
Exemple #4
0
def solve(constraints, guess=None, nvars=None, solver=None, \
          lower_bounds=None, upper_bounds=None, termination=None):
    """Use optimization to find a solution to a set of constraints.

Inputs:
    constraints -- a constraints solver function or a penalty function

Additional Inputs:
    guess -- list of parameter values proposed to solve the constraints.
    lower_bounds -- list of lower bounds on solution values.
    upper_bounds -- list of upper bounds on solution values.
    nvars -- number of parameter values.
    solver -- the mystic solver to use in the optimization
    termination -- the mystic termination to use in the optimization

NOTE: The resulting constraints will likely be more expensive to evaluate
    and less accurate than writing the constraints solver from scratch.
    """
    ndim = 1  #XXX: better, increase in while loop catching IndexError ?
    if nvars is not None: ndim = nvars
    elif guess is not None: ndim = len(guess)
    elif lower_bounds is not None: ndim = len(lower_bounds)
    elif upper_bounds is not None: ndim = len(upper_bounds)

    def cost(x):
        return 1.

    #XXX: don't allow solver string as a short-cut?
    if solver is None or solver == 'diffev':
        from mystic.solvers import DifferentialEvolutionSolver as TheSolver
        solver = TheSolver(ndim, min(40, ndim * 5))
    elif solver == 'diffev2':
        from mystic.solvers import DifferentialEvolutionSolver2 as TheSolver
        solver = TheSolver(ndim, min(40, ndim * 5))
    elif solver == 'fmin_powell':  #XXX: better as the default? (it's not random)
        from mystic.solvers import PowellDirectionalSolver as TheSolver
        solver = TheSolver(ndim)
    elif solver == 'fmin':
        from mystic.solvers import NelderMeadSimplexSolver as TheSolver
        solver = TheSolver(ndim)

    if termination is None:
        from mystic.termination import ChangeOverGeneration as COG
        termination = COG()
    if guess != None:
        solver.SetInitialPoints(guess)  #XXX: nice if 'diffev' also had methods
    else:
        solver.SetRandomInitialPoints(lower_bounds, upper_bounds)
    if lower_bounds or upper_bounds:
        solver.SetStrictRanges(lower_bounds, upper_bounds)
    if hasattr(constraints, 'iter') and hasattr(constraints, 'error'):
        solver.SetPenalty(constraints)  #i.e. is a penalty function
    else:  # is a constraints solver
        solver.SetConstraints(constraints)
    solver.Solve(cost, termination)
    soln = solver.bestSolution

    from numpy import ndarray, array
    if isinstance(soln, ndarray) and not isinstance(guess, ndarray):
        soln = soln.tolist()
    elif isinstance(guess, ndarray) and not isinstance(soln, ndarray):
        soln = array(soln)  #XXX: or always return a list ?

    return soln  #XXX: check with 'issolution' ?