def test_higher_derivative_sin():
    x = Variable(np.array(1))
    y = sin(x)
    y.backward(create_graph=True)

    for i in range(3):
        gx = x.grad
        x.clear_grad()
        gx.backward(create_graph=True)
        LOGGER.debug('{} {}'.format(i, x.grad))

    assert x.grad.data == pytest.approx(0.8414709)
Example #2
0
def test_gradient_step14():
    x = Variable(np.array(3.0))
    y = x + x
    y.backward()

    assert y.data == 6
    assert x.grad.data == 2
    x.clear_grad()  # teardown 1st tests

    y = (x + x) + x
    y.backward()

    assert x.grad.data == 3
Example #3
0
def test_newtons_method():
    f = lambda t: t**4 - 2 * t**2

    x = Variable(np.array(2.0))
    iters = 10

    for i in range(iters):
        LOGGER.debug('{} {}'.format(i, x))

        y = f(x)
        x.clear_grad()
        y.backward(create_graph=True)

        gx = x.grad
        x.clear_grad()
        gx.backward()
        gx2 = x.grad

        x.data -= gx.data / gx2.data

    assert x.data == 1
Example #4
0
x, y = Variable(x), Variable(y)

W = Variable(np.zeros((1, 1)))
b = Variable(np.zeros(1))


def predict(x):
    y = F.matmul(x, W) + b
    return y


def mean_squared_error(x0, x1):
    diff = x0 - x1
    return F.sum(diff**2) / len(diff)


lr = 0.1
iters = 10000

for i in range(iters):
    y_pred = predict(x)
    loss = mean_squared_error(y, y_pred)

    W.clear_grad()
    b.clear_grad()
    loss.backward()

    W.data -= lr * W.grad.data
    b.data -= lr * b.grad.data
    if i % 1000 == 0:
        print(W, b, loss)
Example #5
0
import numpy as np
import matplotlib.pyplot as plt
from dezero import Variable
import dezero.functions as F

x = Variable(np.linspace(-7, 7, 200))
y = F.sin(x)
y.backward(create_graph=True)

logs = [y.data]

for i in range(3):
    logs.append(x.grad.data)
    gx = x.grad
    x.clear_grad()
    gx.backward(create_graph=True)

# draw graph
labels = ["y=sin(x)", "y'", "y''", "y'''"]
for i, v in enumerate(logs):
    plt.plot(x.data, logs[i], label=labels[i])
plt.legend(loc='lower right')
plt.show()
Example #6
0
import numpy as np
from dezero import Variable


def rosenbrock(x0, x1):
    y = 100 * (x1 - x0**2)**2 + (x0 - 1)**2
    return y


x0 = Variable(np.array(0.0))
x1 = Variable(np.array(2.0))

lr = 0.001
iters = 1000

for i in range(iters):
    print(x0, x1)

    y = rosenbrock(x0, x1)

    x0.clear_grad()
    x1.clear_grad()
    y.backward()

    x0.data -= lr * x0.grad
    x1.data -= lr * x1.grad
def predict(x):
    y = F.linear(x, W1, b1)
    y = F.sigmoid(y)
    y = F.linear(y, W2, b2)
    return y


lr = 0.2
iters = 10000

for i in range(iters):
    y_pred = predict(x)
    loss = F.mean_squared_error(y, y_pred)

    W1.clear_grad()
    b1.clear_grad()
    W2.clear_grad()
    b2.clear_grad()
    loss.backward()

    W1.data -= lr * W1.grad.data
    b1.data -= lr * b1.grad.data
    W2.data -= lr * W2.grad.data
    b2.data -= lr * b2.grad.data
    if i % 1000 == 0:
        print(loss)

# Plot
plt.scatter(x.data, y.data, s=10)
plt.xlabel('x')