def main():
    parser = argparse.ArgumentParser()
    parser.add_argument("--lr", type=float, default=0.1)
    parser.add_argument("--iters", type=int, default=100)
    args = parser.parse_args()

    # toy dataset
    np.random.seed(0)
    x = np.random.rand(100, 1)
    y = 5 + 2 * x + np.random.rand(100, 1)
    x, y = Variable(x), Variable(y)

    W = Variable(np.zeros((1, 1)))
    b = Variable(np.zeros(1))

    def predict(x):
        y = F.matmul(x, W) + b
        return y

    def mean_squared_error(x0, x1):
        diff = x0 - x1
        return F.sum(diff**2) / len(diff)

    for i in range(args.iters):
        y_pred = predict(x)
        loss = mean_squared_error(y, y_pred)

        W.cleargrad()
        b.cleargrad()
        loss.backward()

        W.data -= args.lr * W.grad.data
        b.data -= args.lr * b.grad.data
        print(W, b, loss)
 def test_repetitive_backward(self):
     x = Variable(np.array(3.0))
     y = add(x, x)
     y.backward()
     self.assertEqual(x.grad.data, 2.0)
     x.cleargrad()
     y = add(add(x, x), x)
     y.backward()
     self.assertEqual(x.grad.data, 3.0)
    def test_second_order_differentiation(self):
        def f(x):
            y = x**4 - 2 * x**2
            return y

        x = Variable(np.array(2.0))
        y = f(x)
        y.backward(create_graph=True)
        self.assertEqual(x.grad.data, 24.0)

        gx = x.grad
        # 勾配が加算されないよう、リセットする
        x.cleargrad()
        gx.backward()
        self.assertEqual(x.grad.data, 44.0)
    def test_optim_rosenbrock(self):
        x0 = Variable(np.array(0.0))
        x1 = Variable(np.array(2.0))
        lr = 0.001
        iters = 50000

        for i in range(iters):
            y = rosenbrock(x0, x1)

            x0.cleargrad()
            x1.cleargrad()
            y.backward()

            x0.data = x0.data - lr * x0.grad.data
            x1.data = x1.data - lr * x1.grad.data

        # 50000回回すと、誤差は10の-8乗以下になる
        self.assertEqual((round(x0.data, 8), round(x1.data, 8)), (1.0, 1.0))
Exemple #5
0
def main():
    x = Variable(np.linspace(-7, 7, 100))
    y = sin(x)
    y.backward(create_graph=True)

    logs = [y.data.flatten()]

    # 3階微分
    for i in range(3):
        logs.append(x.grad.data.flatten())
        gx = x.grad
        x.cleargrad()
        gx.backward(create_graph=True)

    labels = ["y=sin(x)", "y'", "y''", "y'''"]
    for i, log in enumerate(logs):
        plt.plot(x.data, log, label=labels[i])
    plt.legend()
    plt.show()
Exemple #6
0
def main():
    # dataset
    np.random.seed(0)
    x = np.random.rand(100, 1)
    y = np.sin(2 * np.pi * x) + np.random.rand(100, 1)

    # initialization of weights
    I, H, O = 1, 10, 1
    W1 = Variable(0.01 * np.random.randn(I, H))
    b1 = Variable(np.zeros(H))
    W2 = Variable(0.01 * np.random.randn(H, O))
    b2 = Variable(np.zeros(O))

    # prediction of neural net
    def predict(x):
        y = F.linear(x, W1, b1)
        y = F.sigmoid_simple(y)
        # y = F.sigmoid(y)
        y = F.linear(y, W2, b2)
        return y

    lr = 0.2
    iters = 10000

    # learning
    for i in range(iters):
        y_pred = predict(x)
        loss = F.mean_squared_error(y, y_pred)

        W1.cleargrad()
        b1.cleargrad()
        W2.cleargrad()
        b2.cleargrad()
        loss.backward()

        W1.data -= lr * W1.grad.data
        b1.data -= lr * b1.grad.data
        W2.data -= lr * W2.grad.data
        b2.data -= lr * b2.grad.data
        if i % 1000 == 0:
            print(loss)

    t = np.linspace(0.0, 1.0, 100)
    plt.plot(x.T[0], y.T[0], 'bo', label="Target dots", linewidth=None)
    plt.plot(t,
             predict(t.reshape(100, 1)).T.data[0],
             'r',
             label="Predicted curve")
    plt.xlabel("x")
    plt.ylabel("y")
    plt.legend()
    plt.show()
    def test_newton_method(self):
        def f(x):
            y = x**4 - 2 * x**2
            return y

        x = Variable(np.array(2.0))
        iters = 10

        for i in range(iters):
            y = f(x)
            x.cleargrad()
            y.backward(create_graph=True)

            gx = x.grad
            x.cleargrad()
            gx.backward()
            gx2 = x.grad

            x.data = x.data - gx.data / gx2.data

        self.assertEqual(x.data, 1.0)
Exemple #8
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument("--iters", type=int, default=0)
    parser.add_argument("--verbose", type=bool, default=False)
    parser.add_argument("--out", type=str, default="tanh.png")
    args = parser.parse_args()

    x = Variable(np.array(1.0))
    y = F.tanh(x)
    x.name = 'x'
    y.name = 'y'
    y.backward(create_graph=True)

    iters = args.iters

    for i in range(iters):
        gx = x.grad
        x.cleargrad()
        gx.backward(create_graph=True)

    gx = x.grad
    gx.name = 'gx' + str(iters + 1)
    plot_dot_graph(gx, verbose=args.verbose, to_file=args.out)