Example #1
0
def test_penalize():

  from mystic.math.measures import mean, spread
  def mean_constraint(x, target):
    return mean(x) - target

  def range_constraint(x, target):
    return spread(x) - target

  @quadratic_equality(condition=range_constraint, kwds={'target':5.0})
  @quadratic_equality(condition=mean_constraint, kwds={'target':5.0})
  def penalty(x):
    return 0.0

  def cost(x):
    return abs(sum(x) - 5.0)

  from mystic.solvers import fmin
  from numpy import array
  x = array([1,2,3,4,5])
  y = fmin(cost, x, penalty=penalty, disp=False)

  assert round(mean(y)) == 5.0
  assert round(spread(y)) == 5.0
  assert round(cost(y)) == 4*(5.0)
Example #2
0
def test_penalize():

    from mystic.math.measures import mean, spread

    def mean_constraint(x, target):
        return mean(x) - target

    def range_constraint(x, target):
        return spread(x) - target

    @quadratic_equality(condition=range_constraint, kwds={'target': 5.0})
    @quadratic_equality(condition=mean_constraint, kwds={'target': 5.0})
    def penalty(x):
        return 0.0

    def cost(x):
        return abs(sum(x) - 5.0)

    from mystic.solvers import fmin
    from numpy import array
    x = array([1, 2, 3, 4, 5])
    y = fmin(cost, x, penalty=penalty, disp=False)

    assert round(mean(y)) == 5.0
    assert round(spread(y)) == 5.0
    assert round(cost(y)) == 4 * (5.0)
Example #3
0
def run_once():
    simplex = Monitor()
    solver = fmin(2)
    solver.SetRandomInitialPoints([0,0],[2,2])
    solver.SetGenerationMonitor(simplex)
    solver.Solve(Corana2, termination=CRT())
    sol = solver.Solution()
    
    for x in simplex.x:
        sam.putarray('x',x)
        sam.eval("plot(x([1,2,3,1],1),x([1,2,3,1],2),'w-')")
Example #4
0
def run_once():
    simplex = Monitor()
    solver = fmin(2)
    solver.SetRandomInitialPoints([0, 0], [7, 7])
    solver.SetGenerationMonitor(simplex)
    solver.Solve(CostFunction, termination=CRT())
    sol = solver.Solution()

    for x in simplex.x:
        sam.putarray('x', x)
        sam.eval("plot(x([1,2,3,1],1),x([1,2,3,1],2),'k-')")
Example #5
0
def run_once(x0,x1):
    simplex = Monitor()
    xinit = [x0, x1]

    solver = fmin(len(xinit))
    solver.SetInitialPoints(xinit)
    solver.SetGenerationMonitor(simplex)
    solver.Solve(rosen, termination=CRT())
    sol = solver.Solution()
    
    for x in simplex.x:
        sam.putarray('x',x)
        sam.eval("plot(x([1,2,3,1],1),x([1,2,3,1],2),'w-')")
Example #6
0
def run_once(x0, x1):
    simplex = Monitor()
    xinit = [x0, x1]

    solver = fmin(len(xinit))
    solver.SetInitialPoints(xinit)
    solver.SetGenerationMonitor(simplex)
    solver.Solve(rosen, termination=CRT())
    sol = solver.Solution()

    for x in simplex.x:
        sam.putarray('x', x)
        sam.eval("plot(x([1,2,3,1],1),x([1,2,3,1],2),'w-')")
Example #7
0
def mystic_optimize(point):
    from mystic.monitors import Monitor, VerboseMonitor
    from mystic.tools import getch, random_seed
    random_seed(123)
    from mystic.solvers import NelderMeadSimplexSolver as fmin
    from mystic.termination import CandidateRelativeTolerance as CRT
    simplex, esow = VerboseMonitor(50), Monitor()
    solver = fmin(len(point))
    solver.SetInitialPoints(point)
    solver.SetEvaluationMonitor(esow)
    solver.SetGenerationMonitor(simplex)
    solver.Solve(cost_function, CRT())
    solution = solver.Solution()
    return solution
Example #8
0
def mystic_optimize(point):
    from mystic.monitors import Monitor, VerboseMonitor
    from mystic.solvers import NelderMeadSimplexSolver as fmin
    from mystic.termination import CandidateRelativeTolerance as CRT
    simplex, esow = VerboseMonitor(50), Monitor()
    solver = fmin(len(point))
    solver.SetInitialPoints(point)
    min = [-100,-100,-100]; max = [100,100,100]
    solver.SetStrictRanges(min,max)
    solver.SetEvaluationMonitor(esow)
    solver.SetGenerationMonitor(simplex)
    solver.Solve(cost_function, CRT(1e-7,1e-7))
    solution = solver.Solution()
    return solution
Example #9
0
def mystic_optimize(point):
    from mystic.monitors import Monitor, VerboseMonitor
    from mystic.tools import getch, random_seed
    random_seed(123)
    from mystic.solvers import NelderMeadSimplexSolver as fmin
    from mystic.termination import CandidateRelativeTolerance as CRT
    simplex, esow = VerboseMonitor(50), Monitor()
    solver = fmin(len(point))
    solver.SetInitialPoints(point)
    solver.SetEvaluationMonitor(esow)
    solver.SetGenerationMonitor(simplex)
    solver.Solve(cost_function, CRT())
    solution = solver.Solution()
    return solution
def mystic_optimize(point):
    from mystic.monitors import Monitor, VerboseMonitor
    from mystic.solvers import NelderMeadSimplexSolver as fmin
    from mystic.termination import CandidateRelativeTolerance as CRT
    simplex, esow = VerboseMonitor(50), Monitor()
    solver = fmin(len(point))
    solver.SetInitialPoints(point)
    min = [-100,-100,-100]; max = [100,100,100]
    solver.SetStrictRanges(min,max)
    solver.SetEvaluationMonitor(esow)
    solver.SetGenerationMonitor(simplex)
    solver.Solve(cost_function, CRT(1e-7,1e-7))
    solution = solver.Solution()
    return solution
Example #11
0
def test_with_penalty():

  from mystic.math.measures import mean, spread
  @with_penalty(quadratic_equality, kwds={'target':5.0})
  def penalty(x, target):
    return mean(x) - target

  def cost(x):
    return abs(sum(x) - 5.0)

  from mystic.solvers import fmin
  from numpy import array
  x = array([1,2,3,4,5])
  y = fmin(cost, x, penalty=penalty, disp=False)

  assert round(mean(y)) == 5.0
  assert round(cost(y)) == 4*(5.0)
Example #12
0
def test_with_penalty():

  from mystic.math.measures import mean, spread
  @with_penalty(quadratic_equality, kwds={'target':5.0})
  def penalty(x, target):
    return mean(x) - target

  def cost(x):
    return abs(sum(x) - 5.0)

  from mystic.solvers import fmin
  from numpy import array
  x = array([1,2,3,4,5])
  y = fmin(cost, x, penalty=penalty, disp=False)

  assert round(mean(y)) == 5.0
  assert round(cost(y)) == 4*(5.0)
Example #13
0
def run_once_xv():
    simplex = Monitor()
    y1 = y0*random.uniform(0.5,1.5)
    z1 = z0*random.uniform(0.5,1.5)
    xinit = [random.uniform(x0-40,x0+40), y1, z1, random.uniform(v0-0.1,v0+0.1)]

    solver = fmin(len(xinit))
    solver.SetInitialPoints(xinit)
    solver.SetGenerationMonitor(simplex)
    solver.Solve(cost_function, termination=CRT())
    sol = solver.Solution()
    print(sol)

    for x in simplex.x:
        sam.putarray('x',x)
        sam.eval("plot(x([1,2,3,1],1),x([1,2,3,1],2),'w-','LineWidth',2)")
    return sol
Example #14
0
def test_as_penalty():

  from mystic.math.measures import mean, spread
  @with_spread(5.0)
  @with_mean(5.0)
  def constraint(x):
    return x

  penalty = as_penalty(constraint)

  from numpy import array
  x = array([1,2,3,4,5])
  
  def cost(x):
    return abs(sum(x) - 5.0)

  from mystic.solvers import fmin
  y = fmin(cost, x, penalty=penalty, disp=False)

  assert round(mean(y)) == 5.0
  assert round(spread(y)) == 5.0
  assert round(cost(y)) == 4*(5.0)
Example #15
0
def test_as_penalty():

  from mystic.math.measures import mean, spread
  @with_spread(5.0)
  @with_mean(5.0)
  def constraint(x):
    return x

  penalty = as_penalty(constraint)

  from numpy import array
  x = array([1,2,3,4,5])
  
  def cost(x):
    return abs(sum(x) - 5.0)

  from mystic.solvers import fmin
  y = fmin(cost, x, penalty=penalty, disp=False)

  assert round(mean(y)) == 5.0
  assert round(spread(y)) == 5.0
  assert round(cost(y)) == 4*(5.0)
Example #16
0

if __name__ == '__main__':
    import pylab
    from mystic.solvers import fmin
    #from mystic._scipyoptimize import fmin
    draw_contour()
    solution = main()
    print("solution: %s" % solution)
    pylab.plot([solution[0]], [solution[1]], 'wo', markersize=10)
    print("Differential Evolution: Min: %s, sol = %s" %
          (fOsc3D(solution), solution))

    print("\nTrying scipy.optimize.fmin (Nelder-Mead Simplex)...")

    m = fmin(fOsc3D, [0.1, 0.1])
    pylab.plot([m[0]], [m[1]], 'ro', markersize=5)
    print("solution w/ initial conditions (0.1,0.1): %s\n" % m)

    m = fmin(fOsc3D, [1, 1])
    pylab.plot([m[0]], [m[1]], 'ro', markersize=5)
    print("solution w/ initial conditions (1,1): %s\n" % m)

    m = fmin(fOsc3D, [-1, 1])
    print("solution w/ initial conditions (-1,1): %s\n" % m)
    pylab.plot([m[0]], [m[1]], 'ro', markersize=5)

    #   m = fmin(fOsc3D, [0, 2])
    #   print("solution w/ initial conditions (0,2): %s\n" % m)
    #   pylab.plot([m[0]],[m[1]],'ro',markersize=5)
Example #17
0
  


if __name__ == '__main__':
    import pylab
    from mystic.solvers import fmin
   #from mystic._scipyoptimize import fmin
    draw_contour()
    solution = main()
    print "solution: ", solution
    pylab.plot([solution[0]],[solution[1]],'wo',markersize=10)
    print "Differential Evolution: Min: %s, sol = %s" % (fOsc3D(solution), solution)

    print "\nTrying scipy.optimize.fmin (Nelder-Mead Simplex)..."

    m = fmin(fOsc3D, [0.1, 0.1])
    pylab.plot([m[0]],[m[1]],'ro',markersize=5)
    print "solution w/ initial conditions (0.1,0.1): %s\n" % m

    m = fmin(fOsc3D, [1, 1])
    pylab.plot([m[0]],[m[1]],'ro',markersize=5)
    print "solution w/ initial conditions (1,1): %s\n" % m

    m = fmin(fOsc3D, [-1, 1])
    print "solution w/ initial conditions (-1,1): %s\n" % m
    pylab.plot([m[0]],[m[1]],'ro',markersize=5)

#   m = fmin(fOsc3D, [0, 2])
#   print "solution w/ initial conditions (0,2): %s\n" % m
#   pylab.plot([m[0]],[m[1]],'ro',markersize=5)
Example #18
0
MAX_GENERATIONS = 2500


def main():
    solver = DifferentialEvolutionSolver(ND, NP)

    solver.SetRandomInitialPoints(min=[-5.12] * ND, max=[5.12] * ND)
    solver.SetEvaluationLimits(generations=MAX_GENERATIONS)

    solver.Solve(DeJong3, termination=VTR(0.00001), \
                 CrossProbability=0.3, ScalingFactor=1.0)

    solution = solver.Solution()

    print(solution)


if __name__ == '__main__':
    from timeit import Timer

    # optimize with DESolver
    t = Timer("main()", "from __main__ import main")
    timetaken = t.timeit(number=1)
    print("CPU Time: %s\n" % timetaken)

    # optimize with fmin
    from mystic.solvers import fmin
    print(fmin(DeJong3, [0 for i in range(ND)]))

# end of file
Example #19
0
MAX_GENERATIONS = 2500

def main():
    solver = DifferentialEvolutionSolver(ND, NP)

    solver.SetRandomInitialPoints(min = [-1.28]*ND, max = [1.28]*ND)
    solver.SetEvaluationLimits(generations=MAX_GENERATIONS)

    solver.Solve(DeJong4, termination=VTR(15), strategy=Rand1Exp, \
                 CrossProbability=0.3, ScalingFactor=1.0)

    solution = solver.Solution()
  
    print(solution)



if __name__ == '__main__':
    from timeit import Timer

    # optimize with DESolver
    t = Timer("main()", "from __main__ import main")
    timetaken =  t.timeit(number=1)
    print("CPU Time: %s\n" % timetaken)

    # optimize with fmin
    from mystic.solvers import fmin
    print(fmin(DeJong4, [0 for i in range(ND)]))

# end of file
Example #20
0
    solution = solver.Solution()

    print(solution)


if __name__ == '__main__':
    from timeit import Timer
    t = Timer("main()", "from __main__ import main")
    timetaken = t.timeit(number=1)
    print("CPU Time: %s" % timetaken)

    from mystic.monitors import Monitor
    from mystic.solvers import NelderMeadSimplexSolver as fmin
    from mystic.termination import CandidateRelativeTolerance as CRT

    import random
    simplex = Monitor()
    esow = Monitor()
    xinit = [random.uniform(0, 5) for j in range(ND)]

    solver = fmin(len(xinit))
    solver.SetInitialPoints(xinit)
    solver.SetEvaluationMonitor(esow)
    solver.SetGenerationMonitor(simplex)
    solver.Solve(CostFunction, CRT())
    sol = solver.Solution()
    print("fmin solution: %s" % sol)

# end of file
Example #21
0
    solution = solver.Solution()
  
    print(solution)



if __name__ == '__main__':
    from timeit import Timer
    t = Timer("main()", "from __main__ import main")
    timetaken =  t.timeit(number=1)
    print("CPU Time: %s" % timetaken)

    from mystic.monitors import Monitor
    from mystic.solvers import NelderMeadSimplexSolver as fmin
    from mystic.termination import CandidateRelativeTolerance as CRT

    import random
    simplex = Monitor()
    esow = Monitor()
    xinit = [random.uniform(0,5) for j in range(ND)]

    solver = fmin(len(xinit))
    solver.SetInitialPoints(xinit)
    solver.SetEvaluationMonitor(esow)
    solver.SetGenerationMonitor(simplex)
    solver.Solve(CostFunction, CRT())
    sol = solver.Solution()
    print("fmin solution: %s" % sol)

# end of file
Example #22
0
        from mystic._scipyoptimize import fmin_cg
        leastsq = None
    #
    desol, dstepmon = de_solve()
    print("desol: %s" % desol)
    print("dstepmon 50: %s" % dstepmon.x[50])
    print("dstepmon 100: %s" % dstepmon.x[100])
    #
    # this will try to use nelder_mean from a relatively "near by" point (very sensitive)
    point = [1234., -500., 10., 0.001] # both cg and nm does fine
    point = [1000,-100,0,1] # cg will do badly on this one
    # this will try nelder-mead from an unconverged DE solution 
    #point = dstepmon.x[-150]
    #
    simplex, esow = Monitor(), Monitor()
    solver = fmin(len(point))
    solver.SetInitialPoints(point)
    solver.SetEvaluationMonitor(esow)
    solver.SetGenerationMonitor(simplex)
    solver.Solve(cost_function, CRT())
    sol = solver.Solution()

    print("\nsimplex solution: %s" % sol)
    #
    solcg = fmin_cg(cost_function, point)
    print("\nConjugate-Gradient (Polak Rubiere) : %s" % solcg)
    #
    if leastsq:
        sollsq = leastsq(vec_cost_function, point)
        sollsq = sollsq[0]
        print("\nLeast Squares (Levenberg Marquardt) : %s" % sollsq)
Example #23
0
    legend = ['random points','generating circle : %f' % R0]
    pylab.axis('equal')

    # solve with mystic's differential evolution solver
    solution = solver.Solution()
    sx, sy, sr = solution
    print("DEsol : (%f, %f) @ R = %f" % (sx, sy, sr))

    # plot DEsolver solution
    c = circle(sx, sy, sr)
    pylab.plot(c[:,0],c[:,1],'b-',linewidth=2)
    legend.append('DE optimal : %f' % sr)

    # solve with scipy.fmin
    from mystic.solvers import fmin
    sol = fmin(cost, guess)
    print("scipy.fmin sol: %s" % sol)
    ax, ay, ar = sol

    # plot scipy.fmin solution
    c = circle(ax, ay, ar)
    pylab.plot(c[:,0],c[:,1],'g-',linewidth=2)
    legend.append('Nelder-Mead : %f' % ar)

    # solve with scipy.brute
   #from mystic._scipyoptimize import brute
   #ranges = tuple(zip(minrange,maxrange))
   #sol = brute(cost, ranges, Ns=NP)
   #print("scipy.brute sol: %s" % sol)
   #bx, by, br = sol
        # test iters <= maxiter
        assert my_x[2] <= maxiter
    return

if __name__ == '__main__':
    x0 = [0, 0, 0]

    # check solutions versus results based on the random_seed
    # print "comparing against known results"
    sol = solvers.diffev(rosen, x0, npop=40, disp=0, full_output=True)
    assert almostEqual(sol[1], 0.0020640145337293249, tol=3e-3)
    sol = solvers.diffev2(rosen, x0, npop=40, disp=0, full_output=True)
    assert almostEqual(sol[1], 0.0017516784703663288, tol=3e-3)
    sol = solvers.fmin_powell(rosen, x0, disp=0, full_output=True)
    assert almostEqual(sol[1], 8.3173488898295291e-23)
    sol = solvers.fmin(rosen, x0, disp=0, full_output=True)
    assert almostEqual(sol[1], 1.1605792769954724e-09)

    solver2 = 'diffev2'
    for solver in ['diffev']:
        #   print "comparing %s and %s from mystic" % (solver, solver2)
        test_solvers(solver, solver2, x0, npop=40)
        test_solvers(solver, solver2, x0, npop=40, maxiter=None, maxfun=0)
        test_solvers(solver, solver2, x0, npop=40, maxiter=None, maxfun=1)
        test_solvers(solver, solver2, x0, npop=40, maxiter=None, maxfun=2)
        test_solvers(solver, solver2, x0, npop=40, maxiter=None, maxfun=9)
        test_solvers(solver, solver2, x0, npop=40, maxiter=0)
        test_solvers(solver, solver2, x0, npop=40, maxiter=1)
        test_solvers(solver, solver2, x0, npop=40, maxiter=2)
        test_solvers(solver, solver2, x0, npop=40, maxiter=9)
Example #25
0
        from mystic._scipyoptimize import fmin_cg
        leastsq = None
    #
    desol, dstepmon = de_solve()
    print "desol: ", desol
    print "dstepmon 50: ", dstepmon.x[50]
    print "dstepmon 100: ", dstepmon.x[100]
    #
    # this will try to use nelder_mean from a relatively "near by" point (very sensitive)
    point = [1234., -500., 10., 0.001] # both cg and nm does fine
    point = [1000,-100,0,1] # cg will do badly on this one
    # this will try nelder-mead from an unconverged DE solution 
    #point = dstepmon.x[-150]
    #
    simplex, esow = Monitor(), Monitor()
    solver = fmin(len(point))
    solver.SetInitialPoints(point)
    solver.SetEvaluationMonitor(esow)
    solver.SetGenerationMonitor(simplex)
    solver.Solve(cost_function, CRT())
    sol = solver.Solution()

    print "\nsimplex solution: ", sol
    #
    solcg = fmin_cg(cost_function, point)
    print "\nConjugate-Gradient (Polak Rubiere) : ", solcg
    #
    if leastsq:
        sollsq = leastsq(vec_cost_function, point)
        sollsq = sollsq[0]
        print "\nLeast Squares (Levenberg Marquardt) : ", sollsq
Example #26
0
    solver.Solve(corana, termination=VTR(0.00000001), strategy=Rand1Exp,\
                 CrossProbability=0.5, ScalingFactor=0.9)

    solution = solver.Solution()

    print(solution)


if __name__ == '__main__':
    from timeit import Timer
    t = Timer("main()", "from __main__ import main")
    timetaken = t.timeit(number=1)
    print("CPU Time: %s" % timetaken)

    try:
        from mystic.solvers import fmin
        #from mystic._scipyoptimize import fmin
        import random
        print("\nScipy: ")
        sol = fmin(corana, [random.random() for j in range(4)],
                   full_output=0,
                   retall=1)
        print("solution: %s" % sol[-1][0])
        print("\nCorana 1 with Scipy")
        sol = fmin(corana1, [random.random()], full_output=1, retall=1)
        print("solution: %s" % sol[-1][0])
    except:
        pass

# end of file
Example #27
0
    # test iters <= maxiter
    assert my_x[2] <= maxiter
  return 

if __name__ == '__main__':
  x0 = [0,0,0]

  # check solutions versus results based on the random_seed
# print "comparing against known results"
  sol = solvers.diffev(rosen, x0, npop=40, disp=0, full_output=True)
  assert almostEqual(sol[1], 0.0020640145337293249, tol=3e-3)
  sol = solvers.diffev2(rosen, x0, npop=40, disp=0, full_output=True)
  assert almostEqual(sol[1], 0.0017516784703663288, tol=3e-3)
  sol = solvers.fmin_powell(rosen, x0, disp=0, full_output=True)
  assert almostEqual(sol[1], 8.3173488898295291e-23)
  sol = solvers.fmin(rosen, x0, disp=0, full_output=True)
  assert almostEqual(sol[1], 1.1605792769954724e-09)

  solver2 = 'diffev2'
  for solver in ['diffev']:
#   print "comparing %s and %s from mystic" % (solver, solver2)
    test_solvers(solver, solver2, x0, npop=40)
    test_solvers(solver, solver2, x0, npop=40, maxiter=None, maxfun=0)
    test_solvers(solver, solver2, x0, npop=40, maxiter=None, maxfun=1)
    test_solvers(solver, solver2, x0, npop=40, maxiter=None, maxfun=2)
    test_solvers(solver, solver2, x0, npop=40, maxiter=None, maxfun=9)
    test_solvers(solver, solver2, x0, npop=40, maxiter=0)
    test_solvers(solver, solver2, x0, npop=40, maxiter=1)
    test_solvers(solver, solver2, x0, npop=40, maxiter=2)
    test_solvers(solver, solver2, x0, npop=40, maxiter=9)
Example #28
0
    solver.Solve(corana, termination=VTR(0.00000001), strategy=Rand1Exp,\
                 CrossProbability=0.5, ScalingFactor=0.9)

    solution = solver.Solution()
  
    print(solution)



if __name__ == '__main__':
    from timeit import Timer
    t = Timer("main()", "from __main__ import main")
    timetaken =  t.timeit(number=1)
    print("CPU Time: %s" % timetaken)

    try:
        from mystic.solvers import fmin
       #from mystic._scipyoptimize import fmin
        import random
        print( "\nScipy: ")
        sol = fmin(corana, [random.random() for j in range(4)], full_output=0, retall=1)
        print("solution: %s" % sol[-1][0])
        print("\nCorana 1 with Scipy")
        sol = fmin(corana1, [random.random()], full_output=1, retall=1)
        print("solution: %s" % sol[-1][0])
    except:
        pass

# end of file
Example #29
0
# Rosenbrock function
from mystic.models import rosen

# tools
import matplotlib.pyplot as plt

if __name__ == '__main__':

    print("Nelder-Mead Simplex")
    print("===================")

    # initial guess
    x0 = [0.8, 1.2, 0.7]

    # use Nelder-Mead to minimize the Rosenbrock function
    solution = fmin(rosen, x0)
    print(solution)

    # plot the Rosenbrock function (one plot per axis)
    x = [0.01 * i for i in range(200)]
    plt.plot(x, [rosen([i, 1., 1.]) for i in x])
    plt.plot(x, [rosen([1., i, 1.]) for i in x])
    plt.plot(x, [rosen([1., 1., i]) for i in x])

    # plot the solved minimum (for x)
    plt.plot([solution[0]], [rosen(solution)], 'bo')

    # draw the plot
    plt.title("minimium of Rosenbrock's function")
    plt.xlabel("x, y, z")
    plt.ylabel("f(i) = Rosenbrock's function")
Example #30
0
MAX_GENERATIONS = 2500

def main():
    solver = DifferentialEvolutionSolver(ND, NP)

    solver.SetRandomInitialPoints(min = [-5.12]*ND, max = [5.12]*ND)
    solver.SetEvaluationLimits(generations=MAX_GENERATIONS)

    solver.Solve(DeJong3, termination=VTR(0.00001), \
                 CrossProbability=0.3, ScalingFactor=1.0)

    solution = solver.Solution()
  
    print solution



if __name__ == '__main__':
    from timeit import Timer

    # optimize with DESolver
    t = Timer("main()", "from __main__ import main")
    timetaken =  t.timeit(number=1)
    print "CPU Time: %s\n" % timetaken

    # optimize with fmin
    from mystic.solvers import fmin
    print fmin(DeJong3, [0 for i in range(ND)])

# end of file
Example #31
0
    strategy = Best1Bin
    stepmon = VerboseMonitor(1)
    solver.SetGenerationMonitor(stepmon)
    #solver.SetReducer(sum, arraylike=True) # reduce wavy's multi-valued return
    solver.Solve(wavy, ChangeOverGeneration(generations=50), \
                 strategy=strategy, CrossProbability=1.0, ScalingFactor=0.9, \
                 sigint_callback = plot_solution)

    solution = solver.Solution()

    return solution, solver


if __name__ == '__main__':
    #solution = main()
    scipysol = fmin(wavy, [0.1])
    desol, solver = main()
    #plot_solution(scipysol)
    #plot_solution(desol)
    print "fmin: ", scipysol, wavy(scipysol)
    print "dife: ", desol, wavy(desol)
    try:
        import pylab
        x = arange(-40, 40, 0.01)
        pylab.plot(x, wavy(x))
        pylab.plot(scipysol, wavy(scipysol), 'r+', markersize=8)
        pylab.plot(desol, wavy(desol), 'bo', markersize=8)
        pylab.legend(('|x + 3 sin(x+pi)|', 'fmin', 'dife'))
        if hasattr(solver, 'genealogy'):
            xx = solver.genealogy
            pylab.plot(xx[4], wavy(xx[4]), 'g-', markersize=3)
Example #32
0
def main():
    solver = DifferentialEvolutionSolver(ND, NP)
    solver.SetRandomInitialPoints(min = [-400.0]*ND, max = [400.0]*ND)
    solver.SetEvaluationLimits(generations=MAX_GENERATIONS)

    solver.Solve(Griewangk_cost, termination=VTR(0.00001), strategy=Rand1Exp,\
                 CrossProbability=0.3, ScalingFactor=1.0)

    solution = solver.Solution()
  
    print solution



if __name__ == '__main__':
    from mystic.solvers import fmin
    from timeit import Timer
    t = Timer("main()", "from __main__ import main")
    timetaken =  t.timeit(number=1)
    print "CPU Time: %s" % timetaken

    import random
    print "Scipy fmin"
    for i in [400,200,100,40,20,10,4,2,1]:
        print "\ninitializing with range (-%d, %d)" % (i,i)
        sol = fmin(Griewangk_cost, [random.uniform(-i,i) for j in range(10)])
        print "sol: ", sol
        print "cost: ", Griewangk_cost(sol)

# end of file
Example #33
0
    stepmon = VerboseMonitor(1)
    solver.SetGenerationMonitor(stepmon)
   #solver.SetReducer(sum, arraylike=True) # reduce wavy's multi-valued return
    solver.Solve(wavy, ChangeOverGeneration(generations=50), \
                 strategy=strategy, CrossProbability=1.0, ScalingFactor=0.9, \
                 sigint_callback = plot_solution)

    solution = solver.Solution()

    return solution, solver
  


if __name__ == '__main__':
    #solution = main()
    scipysol = fmin(wavy, [0.1])
    desol, solver = main()
    #plot_solution(scipysol)
    #plot_solution(desol)
    print("fmin: %s %s" % (scipysol, wavy(scipysol)))
    print("dife: %s %s" % (desol, wavy(desol)))
    try:
        import pylab
        x = arange(-40,40,0.01)
        pylab.plot(x,wavy(x))
        pylab.plot(scipysol, wavy(scipysol), 'r+',markersize=8)
        pylab.plot(desol, wavy(desol), 'bo',markersize=8)
        pylab.legend(('|x + 3 sin(x+pi)|','fmin','dife'))
        if hasattr(solver, 'genealogy'):
            xx = solver.genealogy
            pylab.plot(xx[4], wavy(xx[4]), 'g-',markersize=3)
Example #34
0
    xval.append(params[0])
    yval.append(params[1])
    zval.append(params[2])
    pylab.plot(step,xval,'b-')
    pylab.plot(step,yval,'g-')
    pylab.plot(step,zval,'r-')
    pylab.legend(["x", "y", "z"])
    pylab.draw()
    iter += 1
    return


if __name__ == '__main__':

    # initial guess
    x0 = [0.8,1.2,0.7]

    # suggest that the user interacts with the solver
    print("NOTE: while solver is running, press 'Ctrl-C' in console window")
    getch()
    plot_frame()

    # use Nelder-Mead to minimize the Rosenbrock function
    solution = fmin(rosen,x0,disp=1,callback=plot_params,handler=True)
    print(solution)

    # don't exit until user is ready
    getch()

# end of file
Example #35
0
# Nelder-Mead solver
from mystic.solvers import fmin

# Rosenbrock function
from mystic.models import rosen

# tools
import pylab

if __name__ == '__main__':

    # initial guess
    x0 = [0.8, 1.2, 0.7]

    # use Nelder-Mead to minimize the Rosenbrock function
    solution = fmin(rosen, x0, disp=0, retall=1)
    allvecs = solution[-1]

    # plot the parameter trajectories
    pylab.plot([i[0] for i in allvecs])
    pylab.plot([i[1] for i in allvecs])
    pylab.plot([i[2] for i in allvecs])

    # draw the plot
    pylab.title("Rosenbrock parameter convergence")
    pylab.xlabel("Nelder-Mead solver iterations")
    pylab.ylabel("parameter value")
    pylab.legend(["x", "y", "z"])
    pylab.show()

# end of file
Example #36
0
    xval.append(params[0])
    yval.append(params[1])
    zval.append(params[2])
    pylab.plot(step, xval, 'b-')
    pylab.plot(step, yval, 'g-')
    pylab.plot(step, zval, 'r-')
    pylab.legend(["x", "y", "z"])
    pylab.draw()
    iter += 1
    return


if __name__ == '__main__':

    # initial guess
    x0 = [0.8, 1.2, 0.7]

    # suggest that the user interacts with the solver
    print("NOTE: while solver is running, press 'Ctrl-C' in console window")
    getch()
    plot_frame()

    # use Nelder-Mead to minimize the Rosenbrock function
    solution = fmin(rosen, x0, disp=1, callback=plot_params, handler=True)
    print(solution)

    # don't exit until user is ready
    getch()

# end of file
Example #37
0
def main():
    solver = DifferentialEvolutionSolver(ND, NP)
    solver.SetRandomInitialPoints(min = [-400.0]*ND, max = [400.0]*ND)
    solver.SetEvaluationLimits(generations=MAX_GENERATIONS)

    solver.Solve(Griewangk_cost, termination=VTR(0.00001), strategy=Rand1Exp,\
                 CrossProbability=0.3, ScalingFactor=1.0)

    solution = solver.Solution()
  
    print solution



if __name__ == '__main__':
    from mystic.solvers import fmin
    from timeit import Timer
    t = Timer("main()", "from __main__ import main")
    timetaken =  t.timeit(number=1)
    print "CPU Time: %s" % timetaken

    import random
    print "Scipy fmin"
    for i in [400,200,100,40,20,10,4,2,1]:
        print "\ninitializing with range (-%d, %d)" % (i,i)
        sol = fmin(Griewangk_cost, [random.uniform(-i,i) for j in range(10)])
        print "sol: ", sol
        print "cost: ", Griewangk_cost(sol)

# end of file
Example #38
0
Testing the Corana parabola in 1D. Requires sam.
"""

import sam, numpy, mystic
#from test_corana import *
from mystic.solvers import fmin
from mystic.tools import getch

from mystic.models.corana import corana1d as Corana1

x = numpy.arange(-2., 2., 0.01)
y = [Corana1([c]) for c in x]

sam.put('x', x)
sam.put('y', y)
sam.eval("plot(x,y,'LineWidth',1); hold on")


for xinit in numpy.arange(0.1,2,0.1):
    sol = fmin(Corana1, [xinit], full_output=1, retall=1)
    xx = mystic.flatten_array(sol[-1])
    yy = [Corana1([c]) for c in xx]
    sam.put('xx', xx)
    sam.put('yy', yy)
    sam.eval("plot(xx,yy,'r-',xx,yy,'ko','LineWidth',2)")

sam.eval("axis([0 2 0 4])")
getch('press any key to exit')

# end of file
Example #39
0
from mystic.solvers import fmin

# Rosenbrock function
from mystic.models import rosen

# tools
import pylab


if __name__ == '__main__':

    # initial guess
    x0 = [0.8,1.2,0.7]

    # use Nelder-Mead to minimize the Rosenbrock function
    solution = fmin(rosen,x0,disp=0,retall=1)
    allvecs = solution[-1]

    # plot the parameter trajectories
    pylab.plot([i[0] for i in allvecs])
    pylab.plot([i[1] for i in allvecs])
    pylab.plot([i[2] for i in allvecs])

    # draw the plot
    pylab.title("Rosenbrock parameter convergence")
    pylab.xlabel("Nelder-Mead solver iterations")
    pylab.ylabel("parameter value")
    pylab.legend(["x", "y", "z"])
    pylab.show()
 
# end of file
Example #40
0
from mystic.models import rosen

# tools
import pylab


if __name__ == '__main__':

    print "Nelder-Mead Simplex"
    print "==================="

    # initial guess
    x0 = [0.8,1.2,0.7]

    # use Nelder-Mead to minimize the Rosenbrock function
    solution = fmin(rosen,x0)
    print solution
 
    # plot the Rosenbrock function (one plot per axis)
    x = [0.01*i for i in range(200)]
    pylab.plot(x,[rosen([i,1.,1.]) for i in x])
    pylab.plot(x,[rosen([1.,i,1.]) for i in x])
    pylab.plot(x,[rosen([1.,1.,i]) for i in x])

    # plot the solved minimum (for x)
    pylab.plot([solution[0]],[rosen(solution)],'bo')

    # draw the plot
    pylab.title("minimium of Rosenbrock's function")
    pylab.xlabel("x, y, z")
    pylab.ylabel("f(i) = Rosenbrock's function")
Example #41
0
#
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
# Copyright (c) 1997-2016 California Institute of Technology.
# Copyright (c) 2016-2021 The Uncertainty Quantification Foundation.
# License: 3-clause BSD.  The full license text is available at:
#  - https://github.com/uqfoundation/mystic/blob/master/LICENSE
"""
Testing the polynomial fitting problem of [1] using scipy's Nelder-Mead algorithm.

Reference:

[1] Storn, R. and Price, K. Differential Evolution - A Simple and Efficient
Heuristic for Global Optimization over Continuous Spaces. Journal of Global
Optimization 11: 341-359, 1997.
"""

from test_ffit import Chebyshev8, ChebyshevCost, plot_solution, print_solution

if __name__ == '__main__':
    import random
    from mystic.solvers import fmin
   #from mystic._scipyoptimize import fmin
    from mystic.tools import random_seed
    random_seed(123)
    x = [random.uniform(-100,100) + Chebyshev8[i] for i in range(9)]
    solution = fmin(ChebyshevCost, x)
    print_solution(solution)
    plot_solution(solution)

# end of file
Example #42
0
# License: 3-clause BSD.  The full license text is available at:
#  - http://trac.mystic.cacr.caltech.edu/project/mystic/browser/mystic/LICENSE
"""
Testing the Corana parabola in 1D. Requires matplotlib.
"""

import pylab, numpy, mystic
#from test_corana import *
from mystic.solvers import fmin
from mystic.tools import getch

from mystic.models.storn import Corana
Corana1 = Corana(1)

x = numpy.arange(-2., 2., 0.01)
y = [Corana1([c]) for c in x]

pylab.plot(x,y,linewidth=1)


for xinit in numpy.arange(0.1,2,0.1):
    sol = fmin(Corana1, [xinit], full_output=1, retall=1)
    xx = mystic.flatten_array(sol[-1])
    yy = [Corana1([c]) for c in xx]
    pylab.plot(xx,yy,'r-',xx,yy,'ko',linewidth=2)

pylab.title("Solution trajectories for fmin at different initial conditions")
pylab.show()

# end of file
Example #43
0
    legend = ['random points','generating circle : %f' % R0]
    pylab.axis('equal')

    # solve with mystic's differential evolution solver
    solution = solver.Solution()
    sx, sy, sr = solution
    print("DEsol : (%f, %f) @ R = %f" % (sx, sy, sr))

    # plot DEsolver solution
    c = circle(sx, sy, sr)
    pylab.plot(c[:,0],c[:,1],'b-',linewidth=2)
    legend.append('DE optimal : %f' % sr)

    # solve with scipy.fmin
    from mystic.solvers import fmin
    sol = fmin(cost, guess)
    print("scipy.fmin sol: %s" % sol)
    ax, ay, ar = sol

    # plot scipy.fmin solution
    c = circle(ax, ay, ar)
    pylab.plot(c[:,0],c[:,1],'g-',linewidth=2)
    legend.append('Nelder-Mead : %f' % ar)

    # solve with scipy.brute
   #from mystic._scipyoptimize import brute
   #ranges = tuple(zip(minrange,maxrange))
   #sol = brute(cost, ranges, Ns=NP)
   #print("scipy.brute sol: %s" % sol)
   #bx, by, br = sol
Example #44
0
#!/usr/bin/env python
#
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
# Copyright (c) 1997-2014 California Institute of Technology.
# License: 3-clause BSD.  The full license text is available at:
#  - http://trac.mystic.cacr.caltech.edu/project/mystic/browser/mystic/LICENSE
"""
Testing the polynomial fitting problem of [1] using scipy's Nelder-Mead algorithm.

Reference:

[1] Storn, R. and Price, K. Differential Evolution - A Simple and Efficient
Heuristic for Global Optimization over Continuous Spaces. Journal of Global
Optimization 11: 341-359, 1997.
"""

from test_ffit import Chebyshev8, ChebyshevCost, plot_solution, print_solution

if __name__ == '__main__':
    from mystic.solvers import fmin
   #from mystic._scipyoptimize import fmin
    import random
    random.seed(123)
    x = [random.uniform(-100,100) + Chebyshev8[i] for i in range(9)]
    solution = fmin(ChebyshevCost, x)
    print_solution(solution)
    plot_solution(solution)

# end of file
Example #45
0
MAX_GENERATIONS = 2500


def main():
    solver = DifferentialEvolutionSolver(ND, NP)

    solver.SetRandomInitialPoints(min=[-1.28] * ND, max=[1.28] * ND)
    solver.SetEvaluationLimits(generations=MAX_GENERATIONS)

    solver.Solve(DeJong4, termination=VTR(15), strategy=Rand1Exp, \
                 CrossProbability=0.3, ScalingFactor=1.0)

    solution = solver.Solution()

    print solution


if __name__ == '__main__':
    from timeit import Timer

    # optimize with DESolver
    t = Timer("main()", "from __main__ import main")
    timetaken = t.timeit(number=1)
    print "CPU Time: %s\n" % timetaken

    # optimize with fmin
    from mystic.solvers import fmin
    print fmin(DeJong4, [0 for i in range(ND)])

# end of file