w4 = net.variable(guass(0., std, (1024, 10))) b4 = net.variable(np.ones((10, )) * .1) conv1 = net.conv2d(image, k1, pad=(2, 2), stride=(1, 1)) conv1 = net.batch_norm(conv1, net.variable(guass(0., std, (32, ))), is_training) conv1 = net.plus_b(conv1, b1) conv1 = net.relu(conv1) pool1 = net.maxpool2(conv1) conv2 = net.conv2d(pool1, k2, (2, 2), (1, 1)) conv2 = net.plus_b(conv2, b2) conv2 = net.relu(conv2) pool2 = net.maxpool2(conv2) flat = net.reshape(pool2, (7 * 7 * 64, )) fc1 = net.plus_b(net.matmul(flat, w3), b3) fc1 = net.relu(fc1) fc2 = net.plus_b(net.matmul(fc1, w4), b4) loss = net.softmax_crossent(fc2, label) net.optimize(loss, 'adam', 1e-3) mnist_data = read_mnist() batch = 128 for count in range(5): batch_num = int(mnist_data.train.num_examples / batch) for i in range(batch_num): feed, target = mnist_data.train.next_batch(batch) feed = feed.reshape(batch, 28, 28, 1).astype(np.float64)
batch = 8 out_dim = inp_dim = 10 hid_dim = 300 mem_size = 128 vec_size = 30 max_seq_len = 20 start_symbol = np.zeros([batch, 1, inp_dim]) stop_symbol = np.zeros([batch, 1, inp_dim]) start_symbol[:, 0, 0] = np.ones([batch]) stop_symbol[:, 0, 1] = np.ones([batch]) net = Net() x = net.portal((inp_dim, )) y = net.portal((inp_dim, )) y = net.reshape(y, [-1, inp_dim], over_batch=True) ntm_out = net.turing(x, out_dim, mem_size, vec_size, hid_dim, shift=1) start, end = net.portal(), net.portal() copy = net.dynamic_slice(ntm_out, start=start, end=end, axis=0) logits = net.reshape(copy, [-1, inp_dim], over_batch=True) loss = net.logistic(logits, y) net.optimize(loss, 'adam', 1e-2) def generate_random_input(batch, seq_length, inp_dim): x = np.random.rand(batch, seq_length, inp_dim).round() x[:, :, :2] = np.zeros(x[:, :, :2].shape) return x, np.zeros(x.shape)