batch = 128 for count in range(5): batch_num = int(mnist_data.train.num_examples / batch) for i in range(batch_num): feed, target = mnist_data.train.next_batch(batch) feed = feed.reshape(batch, 28, 28, 1).astype(np.float64) target = target.astype(np.float64) pred, cost = net.train([fc2], { image: feed, label: target, is_training: True }) predict = pred.argmax(1) truth = target.argmax(1) accuracy = np.equal(predict, truth).mean() print('Step {} Loss {} Accuracy {}'.format(i + 1 + count * batch_num, cost, accuracy)) predict = net.forward([fc2], { image: mnist_data.test.images.reshape((-1, 28, 28, 1)), is_training: False })[0] true_labels = mnist_data.test.labels.argmax(1) pred_labels = predict.argmax(1) accuracy = np.equal(true_labels, pred_labels).mean() print('Accuracy on test set:', accuracy)
image = net.portal((224, 224, 3)) question = net.portal((40, 300)) real_len = net.portal((1, )) vgg16_feat = build_vgg16(net, image) lstm3_feat = build_lstm3(net, question, real_len) infer_feat = net.concat([lstm3_feat, vgg16_feat]) answer = build_infer(net, infer_feat) image_feed = read_image('test.jpg') queries = [u"What is the animal in the picture?"] import time query_feed = list() start_time = time.time() for query in queries: query_feed.append(glove_embed(query)) image_feed = [image_feed] * len(queries) image_feed = np.array(image_feed) query_feed = np.array(query_feed) predicts, = net.forward([answer], { image: image_feed, question: query_feed, real_len: [30] * len(queries) }) for i, predict in enumerate(predicts): print('Q: {:<40}. A: {}'.format(queries[i], to_word(predict))) print('Answered in {}s'.format(time.time() - start_time))
vanilla_loss = net.softmax_crossent(predict, y) regularized_loss = net.weighted_loss((vanilla_loss, 1.0), (regularizer1, .1), (regularizer2, .1)) net.optimize(regularized_loss, 'rmsprop', 1e-3) # Helper function def real_len(x_batch): return [np.argmin(s + [0]) for s in x_batch] # Training batch = int(64) epoch = int(15) step = int(0) for sentences, label in dat.yield_batch(batch, epoch): pred, loss = net.train([predict], { x: sentences, y: label, keep: .8, lens: real_len(sentences), center: 0. }) acc = accuracy(pred, label) print('Step {}, Loss {}, Accuracy {}%'.format(step + 1, loss, acc * 100)) step += 1 x_test, y_test = dat.yield_test() pred = net.forward([predict], {x: x_test, keep: 1., lens: real_len(x_test)})[0] acc = accuracy(pred, y_test) print('Accuracy on test set: {}'.format(acc))
fc1 = net.matmul(image, w1) bias = net.plus_b(fc1, b1) relu = net.relu(bias) dropped = net.dropout(relu, keep_prob) fc2 = net.matmul(dropped, w2) bias = net.plus_b(fc2, b2) loss = net.softmax_crossent(bias, target) net.optimize(loss, 'sgd', lr) mnist_data = read_mnist() s = time.time() for count in range(30): batch_num = int(mnist_data.train.num_examples / batch) for i in range(batch_num): img, lab = mnist_data.train.next_batch(batch) loss = net.train([], { image: img, target: lab, keep_prob: .5, })[0] print('Epoch {} loss {}'.format(count, loss)) print('Total time elapsed: {}'.format(time.time() - s)) bias_out = net.forward([bias], {image: mnist_data.test.images})[0] true_labels = mnist_data.test.labels.argmax(1) pred_labels = bias_out.argmax(1) accuracy = np.equal(true_labels, pred_labels).mean() print('Accuracy on test set', accuracy)
dropped = net.dropout(relu, keep_prob) fc2 = net.matmul(dropped, w2) bias = net.plus_b(fc2, b2) loss = net.softmax_crossent(bias, target) net.optimize(loss, 'sgd', lr) mnist_data = read_mnist() s = time.time() for count in range(30): batch_num = int(mnist_data.train.num_examples / batch) for i in range(batch_num): img, lab = mnist_data.train.next_batch(batch) loss = net.train([], { image: img, target: lab, keep_prob: .5, })[0] print('Epoch {} loss {}'.format(count, loss)) print('Total time elapsed: {}'.format(time.time() - s)) bias_out = net.forward([bias], { image: mnist_data.test.images, keep_prob: 1.0 })[0] true_labels = mnist_data.test.labels.argmax(1) pred_labels = bias_out.argmax(1) accuracy = np.equal(true_labels, pred_labels).mean() print('Accuracy on test set', accuracy)