Exemplo n.º 1
0
def make_test_graph():
    x_tsr = np.arange(4).reshape([2, 2, 1]).astype(np.float)
    w_tsr = np.arange(6).reshape([3, 2]).astype(np.float)
    b_tsr = np.arange(3).reshape([3, 1]).astype(np.float)
    ylabel_tsr = np.arange(6).reshape([2, 3, 1]).astype(np.float)

    x = dg.identity(x_tsr, name='x')
    w = dg.identity(w_tsr, name='w')
    b = dg.identity(b_tsr, name='b')
    y = dg.mat_add(dg.mat_mul(w, x, 'mat_mul'), b, name='y')
    y_label = dg.identity(ylabel_tsr, name='ylabel')
    w_l2 = dg.l2(w, name='w_l2')

    loss = dg.mse(y_label, y, name='MSEloss')
    loss_reg = dg.mat_add(w_l2, loss, name='loss_reg')

    print('Constructed the following graph:')

    for n in dg.forward_iter(loss_reg):
        print(n.name)

    return [x, w, b, y_label, loss, loss_reg]
Exemplo n.º 2
0
train_iter = iter(trainloader)

x_data, y_data = train_iter.next()

x_data = x_data.reshape([15, 28, 28, 1]).numpy()
y_data = label2onehot(y_data).reshape([15, 10, 1])

print('type(x_data):', type(x_data))
print('x_data.shape:', x_data.shape)
print('type(y_data):', type(y_data))
print('y_data.shape:', y_data.shape)

print()

img_in = dg.identity(x_data)
y_label = dg.identity(y_data)
conv1 = dg.cnn.conv(img_in, 3, 6)
relu1 = dg.relu(conv1)
pool1 = dg.cnn.max_pool(relu1, 3, stride=1)
print('pool1.shape:', pool1.shape())
fl = dg.reshape(pool1, (15, 3456, 1))
print('fl.shape:', fl.shape())
w = dg.identity(np.random.randn(10, 3456))
print('w shape:', w.shape())
fc = dg.mat_mul(w, fl)
print('fc.shape:', fc.shape())
b = dg.identity(np.random.randn(10, 1))
out = dg.mat_add(fc, b)
print('out.shape:', out.shape())
loss = dg.softmax_cross(out, y_label)
Exemplo n.º 3
0
def construct_conv():
    img = dg.identity(np.random.randn(5,7,7,3))
    conv = dg.cnn.conv(img, 3, 6, stride = (2,1), padding = (1,2))
    print('Info about the conv layer:')
    print(conv.op.to_str())
    return conv
Exemplo n.º 4
0
                                     download=True,
                                     transform=transform)

# In[7]:

testloader = torch.utils.data.DataLoader(testset,
                                         batch_size=batch_size,
                                         shuffle=False,
                                         num_workers=2)

# In[8]:

w1_stdev = 0.1
w2_stdev = 0.01

x = dg.identity(np.random.randn(batch_size, 784, 1), name='x')
w_1 = dg.identity(np.random.randn(500, 784) * w1_stdev, name='w_1')
b_1 = dg.identity(np.random.randn(500, 1), name='b_1')
w_2 = dg.identity(np.random.randn(10, 500) * w2_stdev, name='w_2')
b_2 = dg.identity(np.random.randn(10, 1), name='b_2')
y_label = dg.identity(np.random.randn(batch_size, 10, 1), name='ylabel')

y_1 = dg.mat_add(dg.mat_mul(w_1, x, 'mat_mul_1'), b_1, name='y_1')
y_2 = dg.mat_add(dg.mat_mul(w_2, y_1, 'mat_mul_2'), b_2, name='y_2')

loss = dg.mse(y_label, y_2, name='MSEloss')

# In[26]:

sgd_optim = dg.optim.SGD(loss, [w_1, w_2, b_1, b_2], 0.0001)
train_iter = iter(trainloader)
Exemplo n.º 5
0
import dg
import numpy as np

if __name__ == '__main__':
    y_tsr = np.array([[10, 4], [13, 7]])
    ylabel_tsr = np.array([[1, 0], [0, 1]])
    y = dg.identity(y_tsr)
    ylabel = dg.identity(ylabel_tsr)

    soft = dg.SoftmaxCrossEntropyOp(y, ylabel)

    print('soft.data():')
    print(soft.data())

    print('Calculate step by step')
    print('cross 1:')
    c1 = -np.log(np.exp(10) / (np.exp(10) + np.exp(4)))
    print(c1)
    print('cross 2:')
    c2 = -np.log(np.exp(7) / (np.exp(7) + np.exp(13)))
    print(c2)

    print('agv:')
    print((c1 + c2) / 2)