def test_forward(self): val = np.random.random((1, 2, 3)) x = ad.variable(val) y = x[0, 1, 0] actual = y.forward() expect = val[0, 1, 0] self.assertEqual(expect.shape, y.shape) self.assertTrue(np.allclose(expect, actual), (expect, actual)) y = x[0] actual = y.forward() expect = val[0] self.assertEqual(expect.shape, y.shape) self.assertTrue(np.allclose(expect, actual), (expect, actual)) y = x[0, 1:] actual = y.forward() expect = val[0, 1:] self.assertEqual(expect.shape, y.shape) self.assertTrue(np.allclose(expect, actual), (expect, actual)) y = x[0, 1:, :2] actual = y.forward() expect = val[0, 1:, :2] self.assertEqual(expect.shape, y.shape) self.assertTrue(np.allclose(expect, actual), (expect, actual)) y = x[0, :, :2:] actual = y.forward() expect = val[0, :, :2:] self.assertEqual(expect.shape, y.shape) self.assertTrue(np.allclose(expect, actual), (expect, actual)) y = x[0, :-2:-1, ::-2] actual = y.forward() expect = val[0, :-2:-1, ::-2] self.assertEqual(expect.shape, y.shape) self.assertTrue(np.allclose(expect, actual), (expect, actual)) x = ad.placeholder(shape=(None, 12)) y = x[:, 3:5] self.assertEqual((None, 2), y.shape) x = ad.placeholder(shape=(None, 12)) y = x[1:, 3:5] self.assertEqual((None, 2), y.shape) x = ad.placeholder(shape=(None, 12)) y = x[4:6, 3:5] self.assertEqual( (None, 2), y.shape) # Because the first dimension may be less than 6
def test_forward(self): x = ad.placeholder(shape=(2, 3)) feed_dict = {x: np.random.random((2, 3))} actual = ad.Session().run(x, feed_dict=feed_dict) expect = feed_dict[x] self.assertTrue(np.allclose(expect, actual), (expect, actual)) feed_dict = {x: np.random.random((2, 3))} actual = ad.Session().run(x, feed_dict=feed_dict) expect = feed_dict[x] self.assertTrue(np.allclose(expect, actual), (expect, actual))
def gen_linear_model(config: dict, verbose=False): """Generate a linear model. :param config: Configuration. :param verbose: Print loss and gradients if it is True. :return: Model, loss, placeholders and variables. """ x = ad.placeholder(shape=(None, config['input_len']), name='X') y = ad.placeholder(shape=(None, ), name='Y') w1 = ad.variable( initializer=ad.inits.random_normal(), shape=(config['input_len'], config['hidden_dim']), name='W1', ) b1 = ad.variable( initializer=ad.inits.zeros, shape=config['hidden_dim'], name='b1', ) v = ad.acts.leaky_relu(ad.dot(x, w1) + b1) w2 = ad.variable( initializer=ad.inits.random_normal(), shape=(config['hidden_dim'], 2), name='W2', ) b2 = ad.variable( initializer=ad.inits.zeros, shape=2, name='b2', ) y_pred = ad.acts.softmax(ad.dot(v, w2) + b2) loss = ad.square(y - y_pred).mean() if verbose: print('Loss:', loss) return y_pred, loss, [x, y], [w1, b1, w2, b2]
def gen_linear_model(config: dict, verbose=False): """Generate a linear model. :param config: Configuration. :param verbose: Print loss and gradients if it is True. :return: Model, loss, placeholders and variables. """ x = ad.placeholder(shape=(None, config['input_len']), name='X') y = ad.placeholder(shape=(None, ), name='Y') w = ad.variable( initializer=ad.inits.random_normal(), shape=config['input_len'], name='W', ) b = ad.variable(0.0, name='b') y_pred = ad.dot(x, w) + b loss = ad.square(y - y_pred).mean() if verbose: print('Loss:', loss) return y_pred, loss, [x, y], [w, b]
def test_forward(self): val = np.random.random((3, 5)) x = ad.variable(val) y = ad.pad(x, 2) actual = y.forward() expect = np.pad(val, 2, mode='constant') self.assertEqual(expect.shape, y.shape) self.assertTrue(np.allclose(expect, actual), (expect, actual)) val = np.random.random((3, 5)) x = ad.variable(val) y = ad.pad(x, (1, 2)) actual = y.forward() expect = np.pad(val, (1, 2), mode='constant') self.assertEqual(expect.shape, y.shape) self.assertTrue(np.allclose(expect, actual), (expect, actual)) val = np.random.random((3, 5)) x = ad.variable(val) y = ad.pad(x, ((1,), (2,))) actual = y.forward() expect = np.pad(val, ((1,), (2,)), mode='constant') self.assertEqual(expect.shape, y.shape) self.assertTrue(np.allclose(expect, actual), (expect, actual)) val = np.random.random((3, 5)) x = ad.variable(val) y = ad.pad(x, ((1, 2), (3, 4))) actual = y.forward() expect = np.pad(val, ((1, 2), (3, 4)), mode='constant') self.assertEqual(expect.shape, y.shape) self.assertTrue(np.allclose(expect, actual), (expect, actual)) val = np.random.random((3, 5)) x = ad.placeholder(shape=(None, 5)) y = ad.pad(x, 1) actual = y.forward({x: val}) expect = np.pad(val, 1, mode='constant') self.assertEqual((None, 7), y.shape) self.assertTrue(np.allclose(expect, actual), (expect, actual))
def test_backward(self): ad.placeholder(shape=(2, 3)).backward()
def __init__(self, shape, **kwargs): super(Input, self).__init__(**kwargs) self.shape = shape self.placeholder = ad.placeholder(shape) self._outputs = self.call(None) self._output_shapes = self.compute_output_shape(None)
def test_forward_variable_shape(self): x = ad.placeholder(shape=(None, 3)) y = ad.random(ad.shape(x)) z = y.forward({x: np.random.random((5, 3))}) self.assertEqual((5, 3), z.shape)