Ejemplo n.º 1
0
def test_fc2():
    X = np.random.random((4, 2, 1, 1))
    Y1 = np.random.random((4, 10))
    Y2 = np.random.random((4, 10))

    [x, y1, y2] = L.Data([X, Y1, Y2])
    fc1 = L.FC(x, dim_out=10)
    fc2 = L.FC(x, dim_out=10)
    loss1 = L.MSE(fc1, label=Y1)
    loss2 = L.MSE(fc2, label=Y2)

    net = mobula.Net()
    loss = L.L1Loss(loss1 + loss2)
    net.set_loss(loss)
    L1Loss = mobula.get_layer("L1Loss")
    Add = mobula.get_layer(L1Loss.model.name)

    net.lr = 0.5
    for i in range(30):
        net.forward()
        net.backward()
        print("Iter: %d, Cost: %f" % (i, loss.Y))
    # check forward
    t1 = np.dot(X.reshape((4, 2)), fc1.W.T) + fc1.b.T
    t2 = np.dot(X.reshape((4, 2)), fc2.W.T) + fc2.b.T
    # forward one more time, because of the change of weights last backward
    net.forward()
    assert np.allclose(fc1.Y, t1)
    assert np.allclose(fc2.Y, t2)
Ejemplo n.º 2
0
def test_fc():
    X = np.zeros((4, 2, 1, 1))
    X[0, :, 0, 0] = [0., 0.]
    X[1, :, 0, 0] = [0., 1.]
    X[2, :, 0, 0] = [1., 0.]
    X[3, :, 0, 0] = [1., 1.]

    Y = np.array([8., 10., 12., 14.]).reshape((-1, 1))

    data, label = L.Data([X, Y], "Data")()
    fc1 = L.FC(data, "fc1", dim_out=1)
    loss = L.MSE(fc1, "MSE", label=label)

    fc1.reshape()

    fc1.W = np.array([1.0, 3.0]).reshape(fc1.W.shape)
    fc1.b = np.array([0.0]).reshape(fc1.b.shape)

    net = mobula.Net()
    net.set_loss(loss)

    net.lr = 0.5
    for i in range(30):
        net.forward()
        net.backward()
        print("Iter: %d, Cost: %f" % (i, loss.Y))

    # forward one more time, because of the change of weights last backward
    net.forward()
    target = np.dot(X.reshape((4, 2)), fc1.W.T) + fc1.b
    print(target, fc1.Y)
    assert np.allclose(fc1.Y, target)
Ejemplo n.º 3
0
def test_mse():
    N, C, H, W = 2, 3, 4, 5
    a = np.random.random((N, C, H, W)) - 0.5
    b = np.random.random((N, C, H, W)) - 0.5

    l = L.MSE(a, label=b)
    y = l.eval()
    d = a - b
    assert np.allclose(np.mean(np.square(d)), l.Y)
    l.dY = np.random.random(l.Y.shape)
    l.backward()
    assert np.allclose(l.dX, 2 * d * l.dY)
Ejemplo n.º 4
0
def test_net():
    X = np.random.random((4, 2, 1, 1))
    Y1 = np.random.random((4, 5))
    Y2 = np.random.random((4, 5))

    [x, y1, y2] = L.Data([X, Y1, Y2])
    fc0 = L.FC(x, dim_out=10)
    fc1 = L.FC(fc0, dim_out=5)
    fc2 = L.FC(fc0, dim_out=5)

    loss1 = L.MSE(fc1, label=y1)
    loss2 = L.MSE(fc2, label=y2)

    net = mobula.Net()
    net.set_loss(loss1 + loss2)

    net.lr = 0.01
    for i in range(10):
        net.forward()
        net.backward()
        net.time()
        print("Iter: %d, Cost: %f" % (i, loss1.Y + loss2.Y))

    assert np.allclose(fc0.dY, fc1.dX + fc2.dX)
Ejemplo n.º 5
0
def test_saver():
    filename = "tmp.net"

    X = np.random.random((4,2,1,1))
    Y = np.random.random((4, 10))
    x, y = L.Data([X, Y])
    fc = L.FC(x, dim_out = 10) 
    with M.name_scope("mobula"): 
        prelu = L.PReLU(fc)
    loss = L.MSE(prelu, label = y)

    net = M.Net()
    net.set_loss(loss)

    init_params(fc)
    init_params(prelu)
    # save mobula
    M.save_scope(filename, "mobula")

    params_f = clear_params(fc)
    params_p = clear_params(prelu)
    for p in fc.params + prelu.params:
        assert np.isnan(p).all()
    M.load_scope(filename)
    for p in fc.params:
        assert np.isnan(p).all()
    for i, p in enumerate(prelu.params):
        assert np.allclose(p, params_p[i])

    init_params(fc)
    init_params(prelu)
    # save all
    M.save_scope(filename)

    params_f = clear_params(fc)
    params_p = clear_params(prelu)

    for p in fc.params + prelu.params:
        assert np.isnan(p).all()
    M.load_scope(filename)
    for i, p in enumerate(fc.params):
        assert np.allclose(p, params_f[i])
    for i, p in enumerate(prelu.params):
        assert np.allclose(p, params_p[i])
    os.remove(filename)
Ejemplo n.º 6
0
def test_net_saver():
    filename = "tmp.net"

    X = np.random.random((4,2,1,1))
    Y = np.random.random((4, 10))
    x, y = L.Data([X, Y])
    x = L.FC(x, dim_out = 10) 
    with M.name_scope("mobula"): 
        x = L.PReLU(x)
    loss = L.MSE(x, label = y)

    net = M.Net()
    net.set_loss(loss)
    net.lr = 0.01

    for i in range(10):
        net.forward()
        net.backward()

    net.save(filename)
    # random init layers
    lst = M.get_layers("/")
    assert len(lst) == 4 # Data, FC, PReLU, MSE 

    k = 0
    rec = []
    for l in lst:
        for i in range(len(l.params)):
            rec.append(l.params[i])
            l.params[i][...] = None
            k += 1
    assert k == 3 # FC.W, FC.b, PReLU.a 

    for l in lst:
        for i in range(len(l.params)):
            assert np.isnan(l.params[i]).all()

    net.load(filename)
    h = 0
    for l in lst:
        for i in range(len(l.params)):
            assert np.allclose(rec[h], l.params[i])
            h += 1
    os.remove(filename)
Ejemplo n.º 7
0
im = imresize(im, target_size)

# TO GRAY
im = im[:, :, 0] * 0.299 + im[:, :, 1] * 0.587 + im[:, :, 2] * 0.114
h, w = im.shape

t = 1
Y = im.reshape((1, h, w, t)).transpose((0, 3, 1, 2))
X = np.random.random((1, t, h, w)) - 0.5

data, label = L.Data([X, Y])
conv = L.Conv(data, dim_out=42, kernel=3, pad=1)
relu = L.ReLU(conv)
convt = L.ConvT(relu, dim_out=t, kernel=3, pad=1)
relu2 = L.ReLU(convt)
loss = L.MSE(relu2, label=label)

# Net Instance
net = mobula.Net()
# Set Loss Layer
net.set_loss(loss)
# Set Solver
net.set_solver(S.Momentum())

# Learning Rate
net.lr = 2e-6

start_iter = 0
max_iter = 10000
plt.ion()
for i in range(start_iter, max_iter + 1):