def main():
    parser = argparse.ArgumentParser()
    parser.add_argument("--lr", type=float, default=0.1)
    parser.add_argument("--iters", type=int, default=100)
    args = parser.parse_args()

    # toy dataset
    np.random.seed(0)
    x = np.random.rand(100, 1)
    y = 5 + 2 * x + np.random.rand(100, 1)
    x, y = Variable(x), Variable(y)

    W = Variable(np.zeros((1, 1)))
    b = Variable(np.zeros(1))

    def predict(x):
        y = F.matmul(x, W) + b
        return y

    def mean_squared_error(x0, x1):
        diff = x0 - x1
        return F.sum(diff**2) / len(diff)

    for i in range(args.iters):
        y_pred = predict(x)
        loss = mean_squared_error(y, y_pred)

        W.cleargrad()
        b.cleargrad()
        loss.backward()

        W.data -= args.lr * W.grad.data
        b.data -= args.lr * b.grad.data
        print(W, b, loss)
 def test_matmul(self):
     x = Variable(np.random.randn(2, 3))
     W = Variable(np.random.randn(3, 4))
     y = matmul(x, W)
     y.backward()
     self.assertEqual(x.grad.shape, (2, 3))
     self.assertEqual(W.grad.shape, (3, 4))
 def test_repetitive_backward(self):
     x = Variable(np.array(3.0))
     y = add(x, x)
     y.backward()
     self.assertEqual(x.grad.data, 2.0)
     x.cleargrad()
     y = add(add(x, x), x)
     y.backward()
     self.assertEqual(x.grad.data, 3.0)
    def test_second_order_differentiation(self):
        def f(x):
            y = x**4 - 2 * x**2
            return y

        x = Variable(np.array(2.0))
        y = f(x)
        y.backward(create_graph=True)
        self.assertEqual(x.grad.data, 24.0)

        gx = x.grad
        # 勾配が加算されないよう、リセットする
        x.cleargrad()
        gx.backward()
        self.assertEqual(x.grad.data, 44.0)
 def test_complicated_backward(self):
     x = Variable(np.array(2.0))
     a = square(x)
     y = add(square(a), square(a))
     y.backward()
     self.assertEqual(y.data, 32.0)
     self.assertEqual(x.grad.data, 64.0)
Exemple #6
0
def accuracy(y, t):
    y, t = as_variable(y), as_variable(t)

    pred = y.data.argmax(axis=1).reshape(t.shape)
    result = (pred == t.data)
    acc = result.mean()
    return Variable(as_array(acc))
 def test_gradient_check(self):
     x = Variable(np.random.rand(1))
     y = square(x)
     y.backward()
     num_grad = numerical_diff(x, square)
     flg = np.allclose(x.grad.data, num_grad)
     self.assertTrue(flg)
    def test_optim_rosenbrock(self):
        x0 = Variable(np.array(0.0))
        x1 = Variable(np.array(2.0))
        lr = 0.001
        iters = 50000

        for i in range(iters):
            y = rosenbrock(x0, x1)

            x0.cleargrad()
            x1.cleargrad()
            y.backward()

            x0.data = x0.data - lr * x0.grad.data
            x1.data = x1.data - lr * x1.grad.data

        # 50000回回すと、誤差は10の-8乗以下になる
        self.assertEqual((round(x0.data, 8), round(x1.data, 8)), (1.0, 1.0))
Exemple #9
0
def main():
    x = Variable(np.linspace(-7, 7, 100))
    y = sin(x)
    y.backward(create_graph=True)

    logs = [y.data.flatten()]

    # 3階微分
    for i in range(3):
        logs.append(x.grad.data.flatten())
        gx = x.grad
        x.cleargrad()
        gx.backward(create_graph=True)

    labels = ["y=sin(x)", "y'", "y''", "y'''"]
    for i, log in enumerate(logs):
        plt.plot(x.data, log, label=labels[i])
    plt.legend()
    plt.show()
Exemple #10
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument("--iters", type=int, default=0)
    parser.add_argument("--verbose", type=bool, default=False)
    parser.add_argument("--out", type=str, default="tanh.png")
    args = parser.parse_args()

    x = Variable(np.array(1.0))
    y = F.tanh(x)
    x.name = 'x'
    y.name = 'y'
    y.backward(create_graph=True)

    iters = args.iters

    for i in range(iters):
        gx = x.grad
        x.cleargrad()
        gx.backward(create_graph=True)

    gx = x.grad
    gx.name = 'gx' + str(iters + 1)
    plot_dot_graph(gx, verbose=args.verbose, to_file=args.out)
Exemple #11
0
    def test_newton_method(self):
        def f(x):
            y = x**4 - 2 * x**2
            return y

        x = Variable(np.array(2.0))
        iters = 10

        for i in range(iters):
            y = f(x)
            x.cleargrad()
            y.backward(create_graph=True)

            gx = x.grad
            x.cleargrad()
            gx.backward()
            gx2 = x.grad

            x.data = x.data - gx.data / gx2.data

        self.assertEqual(x.data, 1.0)
Exemple #12
0
import numpy as np
from DeZero import Variable
from DeZero.utils import plot_dot_graph


def goldstein(x, y):
    z = (1 + (x + y + 1)**2 * (19 - 14*x + 3*x**2 - 14*y + 6*x*y + 3*y**2)) * \
        (30 + (2*x - 3*y)**2 * (18 - 32*x + 12*x**2 + 48*y - 36*x*y + 27*y**2))
    return z


x = Variable(np.array(1.0))
y = Variable(np.array(1.0))
z = goldstein(x, y)
z.backward()

x.name = 'x'
y.name = 'y'
z.name = 'z'
plot_dot_graph(z, verbose=False, to_file='goldstein.png')

Exemple #13
0
 def test_sum(self):
     x = Variable(np.array([1, 2, 3, 4, 5, 6]))
     y = sum(x)
     y.backward()
     expected = [1, 1, 1, 1, 1, 1]
     self.assertEqual(all(x.grad.data), all(expected))
Exemple #14
0
 def test_backward(self):
     x = Variable(np.array(3.0))
     y = square(x)
     y.backward()
     expected = np.array(6.0)
     self.assertEqual(x.grad.data, expected)
Exemple #15
0
 def test_forward(self):
     x = Variable(np.array(2.0))
     y = square(x)
     expected = np.array(4.0)
     self.assertEqual(y.data.data, expected)
Exemple #16
0
 def test_rosenbrock(self):
     x0 = Variable(np.array(0.0))
     x1 = Variable(np.array(2.0))
     y = rosenbrock(x0, x1)
     y.backward()
     self.assertEqual(y.data, 401.0)
Exemple #17
0
 def test_sphere(self):
     x = Variable(np.array(1.0))
     y = Variable(np.array(1.0))
     z = sphere(x, y)
     z.backward()
     self.assertEqual((x.grad.data, x.grad.data), (2.0, 2.0))
Exemple #18
0
def numerical_diff(x, f, eps=1e-4):
    x0 = Variable(x.data - eps)
    x1 = Variable(x.data + eps)
    y0 = f(x0)
    y1 = f(x1)
    return (y1.data - y0.data) / (2 * eps)
Exemple #19
0
 def test_forward(self):
     a = Variable(np.array(3.0))
     b = Variable(np.array(2.0))
     y = a * b
     self.assertEqual(y.data, 6.0)
Exemple #20
0
def main():
    # dataset
    np.random.seed(0)
    x = np.random.rand(100, 1)
    y = np.sin(2 * np.pi * x) + np.random.rand(100, 1)

    # initialization of weights
    I, H, O = 1, 10, 1
    W1 = Variable(0.01 * np.random.randn(I, H))
    b1 = Variable(np.zeros(H))
    W2 = Variable(0.01 * np.random.randn(H, O))
    b2 = Variable(np.zeros(O))

    # prediction of neural net
    def predict(x):
        y = F.linear(x, W1, b1)
        y = F.sigmoid_simple(y)
        # y = F.sigmoid(y)
        y = F.linear(y, W2, b2)
        return y

    lr = 0.2
    iters = 10000

    # learning
    for i in range(iters):
        y_pred = predict(x)
        loss = F.mean_squared_error(y, y_pred)

        W1.cleargrad()
        b1.cleargrad()
        W2.cleargrad()
        b2.cleargrad()
        loss.backward()

        W1.data -= lr * W1.grad.data
        b1.data -= lr * b1.grad.data
        W2.data -= lr * W2.grad.data
        b2.data -= lr * b2.grad.data
        if i % 1000 == 0:
            print(loss)

    t = np.linspace(0.0, 1.0, 100)
    plt.plot(x.T[0], y.T[0], 'bo', label="Target dots", linewidth=None)
    plt.plot(t,
             predict(t.reshape(100, 1)).T.data[0],
             'r',
             label="Predicted curve")
    plt.xlabel("x")
    plt.ylabel("y")
    plt.legend()
    plt.show()
Exemple #21
0
 def test_backward(self):
     x = Variable(np.array(3.0))
     y = add(x, x)
     y.backward()
     self.assertEqual(y.data, 6.0)
     self.assertEqual(x.grad.data, 2.0)