Esempio n. 1
0
    def test_rosenbrock(self):
        def rosenbrock(x0, x1, a=1, b=100):
            y = b * (x1 - x0**2)**2 + (a - x0)**2
            return y

        x0 = Variable(np.array(0.0))
        x1 = Variable(np.array(2.0))

        lr = 0.001
        iters = 30000
        for i in range(iters):
            y = rosenbrock(x0, x1)

            x0.cleargrad()
            x1.cleargrad()
            y.backward()

            if isinstance(x0.grad, Variable):
                x0.data -= lr * x0.grad.data
                x1.data -= lr * x1.grad.data
            else:
                x0.data -= lr * x0.grad
                x1.data -= lr * x1.grad

        flg1 = np.allclose(x0.data, 1.0)
        flg2 = np.allclose(x1.data, 1.0)
        self.assertTrue(flg1 and flg2)
Esempio n. 2
0
    def test_linear_regressoion(self):
        np.random.seed(0)
        x = np.random.rand(100, 1)
        y = 5 + 2 * x + np.random.rand(100, 1)
        x, y = Variable(x), Variable(y)

        W = Variable(np.zeros((1, 1)))
        b = Variable(np.zeros(1))

        def predict(x):
            y = F.matmul(x, W) + b
            return y

        def mean_squared_error(x0, x1):
            diff = x0 - x1
            return F.sum(diff**2) / len(diff)

        lr = 0.1
        iters = 100

        for i in range(iters):
            y_pred = predict(x)
            loss = F.mean_squared_error(y, y_pred)

            W.cleargrad()
            b.cleargrad()
            loss.backward()

            W.data -= lr * W.grad.data
            b.data -= lr * b.grad.data

        print(W, b, loss)
 def test_cos_backward_twice(self):
     x = Variable(np.array(1.0))
     y = F.cos(x)
     y.backward(create_graph=True)
     gx = x.grad
     x.cleargrad()
     gx.backward(create_graph=False)
     result = x.grad.data
     assert np.allclose(result, -0.54030231)
 def test_tanh_backward_twice(self):
     x = Variable(np.array(1.0))
     y = F.tanh(x)
     y.backward(create_graph=True)
     gx = x.grad
     x.cleargrad()
     gx.backward(create_graph=False)
     result = x.grad.data
     assert np.allclose(result, -0.63970001)
 def test_sin_backward_twice(self):
     x = Variable(np.array(1.0))
     y = F.sin(x)
     y.backward(create_graph=True)
     gx = x.grad
     x.cleargrad()
     gx.backward(create_graph=False)
     result = x.grad.data
     assert np.allclose(result, -0.84147098)
Esempio n. 6
0
    def test_neural_regression(self):
        np.random.seed(0)
        x = np.random.rand(100, 1)
        y = np.sin(2 * np.pi * x) + np.random.rand(100, 1)

        I, H, O = 1, 10, 1
        W1 = Variable(0.01 * np.random.randn(I, H))
        b1 = Variable(np.zeros(H))
        W2 = Variable(0.01 + np.random.randn(H, O))
        b2 = Variable(np.zeros(O))

        def predict(x):
            y = F.linear(x, W1, b1)
            y = F.sigmoid(y)
            y = F.linear(y, W2, b2)
            return y

        lr = 0.2
        iters = 10000

        for i in range(iters):
            y_pred = predict(x)
            loss = F.mean_squared_error(y, y_pred)

            W1.cleargrad()
            b1.cleargrad()
            W2.cleargrad()
            b2.cleargrad()
            loss.backward()

            W1.data -= lr * W1.grad.data
            b1.data -= lr * b1.grad.data
            W2.data -= lr * W2.grad.data
            b2.data -= lr * b2.grad.data
Esempio n. 7
0
    def test_double_backprop(self):
        def y_(x):
            y = x**2
            return y

        def z_(gx, y):
            z = gx**3 + y
            return z

        x = Variable(np.array(2.0))
        y = y_(x)
        y.backward(create_graph=True)

        gx = x.grad
        x.cleargrad()
        z = z_(gx, y)
        z.backward()
Esempio n. 8
0
    def test_sin(self):
        x = Variable(np.linspace(-7, 7, 200))
        y = F.sin(x)
        y.backward(create_graph=True)

        logs = [y.data]

        for i in range(3):
            logs.append(x.grad.data)
            gx = x.grad
            x.cleargrad()
            gx.backward(create_graph=True)

        labels = ["y=sin(x)", "y'", "y''", "y'''"]
        for i, v in enumerate(logs):
            plt.plot(x.data, logs[i], label=labels[i])
        plt.legend(loc='lower right')
        plt.savefig('test.png')
Esempio n. 9
0
    def test_tanh(self):
        x = Variable(np.array(1.0))
        y = F.tanh(x)
        x.name = 'x'
        y.name = 'y'
        y.backward(create_graph=True)

        iters = 0

        for i in range(iters):
            gx = x.grad
            x.cleargrad()
            gx.backward(create_graph=True)

        gx = x.grad
        gx.name = 'gx' + str(iters + 1)
        txt = get_dot_graph(gx)
        with open('test.dot', 'w') as f:
            f.write(txt)
Esempio n. 10
0
    def test_newton(self):
        def f(x):
            y = x**4 - 2 * x**2
            return y

        x = Variable(np.array(2.0))
        iters = 10

        for i in range(iters):
            y = f(x)
            x.cleargrad()
            y.backward(create_graph=True)

            gx = x.grad
            x.cleargrad()
            gx.backward()
            gx2 = x.grad

            x.data -= gx.data / gx2.data
Esempio n. 11
0
    def test_newton_gd(self):
        def f(x):
            y = x**4 - 2 * x**2
            return y

        def gx2(x):
            return 12 * x**2 - 4

        x = Variable(np.array(2.0))
        iters = 10

        for i in range(iters):
            y = f(x)
            x.cleargrad()
            y.backward()

            if isinstance(x.grad, Variable):
                x.data -= x.grad.data / gx2(x.data)
            else:
                x.data -= x.grad / gx2(x.data)
Esempio n. 12
0
# coding: utf-8
if '__file__' in globals():
  import os, sys
  sys.path.append(os.path.join(os.path.dirname(__file__), '..'))

import numpy as np
from dezero import Variable

def f(x):
  y = x ** 4  - 2 * x ** 2
  return y

x = Variable(np.array(2.0))
iters=10
for i in range(iters):
  print(i, x)

  y = f(x)
  x.cleargrad()
  y.backward(create_graph=True)
  gx = x.grad
  x.cleargrad()
  gx.backward()
  gx2 = x.grad
  x.data -= gx.data/gx2.data
if '__file__' in globals():
    import os, sys
    sys.path.append(os.path.join(os.path.dirname(__file__), '..'))

import numpy as np
from dezero import Variable


def rosenbrock(x0, x1):
    y = 100 * (x1 - x0 ** 2) ** 2 + (1 - x0) ** 2
    return y


x0 = Variable(np.array(0.0))
x1 = Variable(np.array(2.0))
lr = 0.001
iters = 10000

for i in range(iters):
    print(x0, x1)

    y = rosenbrock(x0, x1)

    x0.cleargrad()
    x1.cleargrad()
    y.backward()

    x0.data -= lr * x0.grad
    x1.data -= lr * x1.grad
Esempio n. 14
0
x, y = Variable(x), Variable(y)

W = Variable(np.zeros((1, 1)))
b = Variable(np.zeros(1))


def predict(x):
    y = F.matmul(x, W) + b
    return y


def mean_squared_error(x0, x1):
    diff = x0 - x1
    return F.sum(diff**2) / len(diff)


lr = 0.1
iters = 100

for i in range(iters):
    y_pred = predict(x)
    loss = mean_squared_error(y, y_pred)

    W.cleargrad()
    b.cleargrad()
    loss.backward()

    W.data -= lr * W.grad.data
    b.data -= lr * b.grad.data
    print(W, b, loss)
def predict(x):
    y = F.linear(x, W1, b1)
    y = F.sigmoid(y)
    y = F.linear(y, W2, b2)
    return y


lr = 0.2
iters = 10000

for i in range(iters):
    y_pred = predict(x)
    loss = F.mean_squared_error(y, y_pred)

    W1.cleargrad()
    b1.cleargrad()
    W2.cleargrad()
    b2.cleargrad()
    loss.backward()

    W1.data -= lr * W1.grad.data
    b1.data -= lr * b1.grad.data
    W2.data -= lr * W2.grad.data
    b2.data -= lr * b2.grad.data
    if i % 1000 == 0:
        print(loss)

# Plot
plt.scatter(x, y, s=10)
plt.xlabel('x')
Esempio n. 16
0
if '__file__' in globals():
    import os
    import sys
    sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
    import numpy as np
    from dezero.utils import plot_dot_graph
    from dezero import Variable
    import dezero.functions as F

x = Variable(np.array(1.0))
y = F.tanh(x)
x.name = 'x'
y.name = 'y'
y.backward(create_graph=True)

iters = 3

for i in range(iters):
    gx = x.grad  # gxへshallow copy
    x.cleargrad()  # xとgxの結びつきを解除
    gx.backward(create_graph=True)

gx = x.grad
gx.name = 'gx' + str(iters + 1)
plot_dot_graph(gx, verbose=False, to_file='tanh.png')
Esempio n. 17
0
def predict(x):
    y = F.matmul(x, w1) + b1
    y = F.sigmoid(y)
    y = F.matmul(y, w2) + b2
    return y


lr = 0.2
iters = 10000

for i in range(iters):
    y_pred = predict(x)
    loss = F.mean_squared_error(y, y_pred)

    w1.cleargrad()
    b1.cleargrad()
    w2.cleargrad()
    b2.cleargrad()
    loss.backward()

    w1.data -= lr * w1.grad.data
    b1.data -= lr * b1.grad.data
    w2.data -= lr * w2.grad.data
    b2.data -= lr * b2.grad.data
    print(loss)

# グラフの描画
plt.scatter(x.data, y.data, s=10)
plt.xlabel('x')
plt.ylabel('y')