def test(): sess = fl.Session() sess.fan_in = 3 sess.fan_out = 1 x = fl.Placeholder(sess, train_x, 'x') y = fl.Placeholder(sess, train_y, 'y') # We can choose to apply kernel to x_input or not x2 = fl.concat(x, fl.square(x), 1) # x2 = x S0, W0, b0 = fl.fully_conntected(x2, 30, activation=fl.sigmoid, initializer=fl.xavier_initializer()) S1, W1, b1 = fl.fully_conntected(S0, 1, activation=fl.sigmoid, initializer=fl.xavier_initializer()) y_ = S1 E = fl.l2loss(y, y_) optimizer = fl.AdamOptimizer(sess, [E], lr=0.01) anim = fl.make_animation2d(x, y, y_, E, optimizer, (0, 7), (0, 7), interval=1, blit=True) plt.show()
def test(): num_images, images, num_rows, num_cols = read_x('data/mnist/train_x') _, labels = read_y('data/mnist/train_y') vec_size = 28 * 28 batch_size = 6000 class_num = 10 hidden_sizes = 256, 128 lr = 0.001 epoch = 15 num_test, test_images, _, _ = read_x('data/mnist/test_x') _, test_labels = read_y('data/mnist/test_y') print(num_images, num_test, 'Images Read') images = np.reshape(images, (num_images, vec_size)) test_images = np.reshape(test_images, (num_test, vec_size)) sess = fl.Session() sess.fan_in = vec_size sess.fan_out = class_num input_x = fl.Placeholder(sess, (None, vec_size), 'x') output_y = fl.Placeholder(sess, (None, class_num), 'y') H = input_x for hs in hidden_sizes: H, _, _ = fl.fully_conntected(H, hs, activation=fl.relu, initializer=fl.xavier_initializer()) y_, _, _ = fl.fully_conntected(H, class_num, activation=fl.relu, initializer=fl.xavier_initializer()) E = fl.softmax_cross_entropy_loss(output_y, y_, 1) # E = fl.l2loss(output_y, y_) optimizer = fl.AdamOptimizer(sess, [E], lr=lr) for _ in range(epoch): for batch_x, batch_y in mini_batch(images, labels, batch_size): input_x.set_result(batch_x) output_y.set_result(batch_y) optimizer.minimize() print('E:', E.get_result() / batch_size) input_x.set_result(test_images) output_y.set_result(test_labels) print('E:', E.get_result() / num_test) print( 'acc:', np.sum( np.argmax(y_.get_result(), axis=1) == np.argmax(test_labels, 1)) / num_test)
def test(): sess = fl.Session() # Vx + b = y train_x = np.array([[0, 0], [0, 1], [1, 0], [1, 1]]) train_y = np.array([[0], [1], [1], [0]]) x = fl.Placeholder(sess, train_x, 'x') y = fl.Placeholder(sess, train_y, 'y') def initializer(*shape): return fl.xavier(shape, 2, 2) V0 = fl.Variable(sess, initializer(2, 2)) b0 = fl.Variable(sess, np.zeros(2)) S0 = fl.sigmoid(fl.matmul(x, V0) + b0) V1 = fl.Variable(sess, initializer(2, 1)) b1 = fl.Variable(sess, np.zeros(1)) S1 = fl.sigmoid(fl.matmul(S0, V1) + b1) y_ = S1 E = fl.sum(fl.square(y_ - y), axis=0) optimizer = fl.AdamOptimizer(sess, [E], lr=0.1) if True: # Pre-calculate before animation print('start error:', E.get_result()) epoch = 1000 with pb.ProgressBar(max_value=epoch) as bar: for i in range(epoch): optimizer.minimize() bar.update(i) print('last error:', E.get_result()) anim = fl.make_animation2d(x, y, y_, E, optimizer, (-1, 2), (-1, 2), epoch_per_frame=50, frames=50, interval=80, blit=True) if True: plt.show() else: anim.save('static/xor.gif', writer='imagemagick')
def test(train=True): sess = fl.Session() sess.fan_in = 1 sess.fan_out = 1 x = fl.Placeholder(sess, train_x, 'x') y = fl.Placeholder(sess, train_y, 'y') S = x for _ in range(7): S, _, _ = fl.fully_conntected(S, 100, activation=fl.tanh, initializer=fl.xavier_initializer()) S, _, _ = fl.fully_conntected(S, 1, activation=None, initializer=fl.xavier_initializer()) y_ = S # E = fl.avg(fl.avg(fl.square(y - y_), 0), 0) E = fl.l2loss(y, y_) optimizer = fl.AdamOptimizer(sess, [E], lr=0.001) if False: # Pre-training before animation for _ in pb.progressbar(range(epoch)): train() anim = fl.make_animation1d(x, y, y_, E, optimizer, (-4, 4), (-2, 2), answer, interval=1, blit=True) plt.show()
def ones(sess, shape, name='ones'): return fl.Placeholder(sess, np.ones(shape), name)
def zeros(sess, shape, name='zero'): return fl.Placeholder(sess, np.zeros(shape), name)
def empty_like(sess, a, name='empty'): return fl.Placeholder(sess, np.empty_like(a.shape), name)
def ones_like(sess, a, name='ones'): return fl.Placeholder(sess, np.ones_like(a.shape), name)
def zeros_like(sess, a, name='zero'): return fl.Placeholder(sess, np.zeros_like(a.shape), name)
def empty(sess, shape, name='empty'): return fl.Placeholder(sess, np.empty(shape), name)