コード例 #1
0
ファイル: Week10.py プロジェクト: GGoYoungHee/Optimization
# Rosenbrock
def f(x, a=1, b=100):
    y = (a - x[0])**2 + b * (x[1] - x[0]**2)**2
    return y


x = sp.IndexedBase('x')
gradients = np.array([sp.diff(f(x), x[i]) for i in range(2)])
grads = sp.lambdify(x, gradients, 'numpy')

x_ = np.array([-2., 2.])

gd.GradientDescent(f, grads, x_, alpha=1E-1, verbose=True)

gd.ConjugateGradient(f, grads, x_)

gd.momentum(f, grads, x_, alpha=7E-4, verbose=True)

gd.nesterov(f, grads, x_, alpha=7E-4, verbose=True)
""" Huge-variate : e.g. Deep Learning """
gd.adagrad(f, grads, x_, alpha=3.05E-0, verbose=True)
""" RMSProp: Geoffrey Hinton """


def rmsprop(f,
            grads,
            x,
            alpha,
            decay=0.9,
            epsilon=1E-7,
コード例 #2
0
# Rosenbrock
def f(x, a=1, b=5):
    y = (a - x[0])**2 + b * (x[1] - x[0]**2)**2
    return y


x = sp.IndexedBase('x')
gradients = np.array([sp.diff(f(x), x[i]) for i in range(2)])
grads = sp.lambdify(x, gradients, 'numpy')

x_ = np.array([-2, 2])
alpha = 1E-2

gd.GradientDescent(f, grads, x_, alpha)

gd.ConjugateGradient(f, grads, x_)

############################################
""" 문제해결형 과제 (2) """
############################################
""" Branin Function """


def draw_branin(levels):

    a = 1
    b = 5.1 / (4 * np.pi**2)
    c = 5 / np.pi
    r = 6
    s = 10
    t = 1 / (8 * np.pi)
コード例 #3
0
    return y


x = sp.IndexedBase('x')
gradients = np.array([sp.diff(f(x), x[i]) for i in range(2)])
grads = sp.lambdify(x, gradients, 'numpy')

#### Branin fuction min value 4 ####

# Gradient Descent | ConjugateGradient

alpha = 1E-2  # learning rate

x_ = np.array([0., 15.])
gd.GradientDescent(f, grads, x_, alpha)
gd.ConjugateGradient(f, grads, x_, verbose=False)

x_ = np.array([5., 5.])
gd.GradientDescent(f, grads, x_, alpha)
gd.ConjugateGradient(f, grads, x_, verbose=False)

x_ = np.array([13., 5.])
gd.GradientDescent(f, grads, x_, alpha)
gd.ConjugateGradient(f, grads, x_, verbose=False)

x_ = np.array([20., 17.])
gd.GradientDescent(f, grads, x_, alpha)
gd.ConjugateGradient(f, grads, x_, verbose=False)  # error

# Momentum