Пример #1
0
if __name__ == '__main__':

    print("Nelder-Mead Simplex")
    print("===================")

    # initial guess
    x0 = [0.8, 1.2, 0.7]

    # use Nelder-Mead to minimize the Rosenbrock function
    solution = fmin(rosen, x0)
    print(solution)

    # plot the Rosenbrock function (one plot per axis)
    x = [0.01 * i for i in range(200)]
    plt.plot(x, [rosen([i, 1., 1.]) for i in x])
    plt.plot(x, [rosen([1., i, 1.]) for i in x])
    plt.plot(x, [rosen([1., 1., i]) for i in x])

    # plot the solved minimum (for x)
    plt.plot([solution[0]], [rosen(solution)], 'bo')

    # draw the plot
    plt.title("minimium of Rosenbrock's function")
    plt.xlabel("x, y, z")
    plt.ylabel("f(i) = Rosenbrock's function")
    plt.legend(["f(x,1,1)", "f(1,y,1)", "f(1,1,z)"])
    plt.show()

# end of file
Пример #2
0
import mystic.math.interpolate as ip
fx = ip._to_function(cost)
assert (fx(*x.T) - cost(x.T) ).sum() < 0.0001

f = ip.interpf(x, y, method='linear', arrays=True)
cf = ip._to_objective(f)
assert (f(*x.T) - cf(x.T) ).sum() < 0.0001

assert f(*x[0].T) == cf(x[0].T)
assert fx(*x[0].T) == cost(x[0].T)

assert ip.gradient(x, f, method='linear').shape \
    == ip.gradient(x, fx, method='linear').shape


from mystic.models import rosen0der as rosen
y = rosen(x.T)

f = ip.interpf(x, y, method='linear')
fx = ip._to_function(rosen)
cf = ip._to_objective(f)

assert f(*x[0].T) == cf(x[0].T)
assert fx(*x[0].T) == rosen(x[0].T)
#print( fx(*x[0].T) - rosen(x[0].T) )

assert ip.gradient(x, f, method='linear').shape \
    == ip.gradient(x, fx, method='linear').shape

Пример #3
0
import mystic.math.interpolate as ip
fx = ip._to_function(cost)
assert (fx(*x.T) - cost(x.T) ).sum() < 0.0001

f = ip.interpf(x, y, method='linear', arrays=True)
cf = ip._to_objective(f)
assert (f(*x.T) - cf(x.T) ).sum() < 0.0001

assert f(*x[0].T) == cf(x[0].T)
assert fx(*x[0].T) == cost(x[0].T)

assert ip.gradient(x, f, method='linear').shape \
    == ip.gradient(x, fx, method='linear').shape


from mystic.models import rosen0der as rosen
y = rosen(x.T)

f = ip.interpf(x, y, method='linear')
fx = ip._to_function(rosen)
cf = ip._to_objective(f)

assert f(*x[0].T) == cf(x[0].T)
assert fx(*x[0].T) == rosen(x[0].T)
#print( fx(*x[0].T) - rosen(x[0].T) )

assert ip.gradient(x, f, method='linear').shape \
    == ip.gradient(x, fx, method='linear').shape

Пример #4
0
if __name__ == '__main__':

    print "Nelder-Mead Simplex"
    print "==================="

    # initial guess
    x0 = [0.8,1.2,0.7]

    # use Nelder-Mead to minimize the Rosenbrock function
    solution = fmin(rosen,x0)
    print solution
 
    # plot the Rosenbrock function (one plot per axis)
    x = [0.01*i for i in range(200)]
    pylab.plot(x,[rosen([i,1.,1.]) for i in x])
    pylab.plot(x,[rosen([1.,i,1.]) for i in x])
    pylab.plot(x,[rosen([1.,1.,i]) for i in x])

    # plot the solved minimum (for x)
    pylab.plot([solution[0]],[rosen(solution)],'bo')

    # draw the plot
    pylab.title("minimium of Rosenbrock's function")
    pylab.xlabel("x, y, z")
    pylab.ylabel("f(i) = Rosenbrock's function")
    pylab.legend(["f(x,1,1)","f(1,y,1)","f(1,1,z)"])
    pylab.show()

# end of file