Exemplo n.º 1
0
 def _gen_random_and_result(x_shape, y_shape):
     x_val = np.random.random(x_shape)
     y_val = np.random.random(y_shape)
     x = ad.variable(x_val, name='X%s' % str(x_shape))
     y = ad.variable(y_val, name='Y%s' % str(y_shape))
     z = ad.maximum(x, y)
     expect = np.maximum(x_val, y_val)
     return z, [x, y], expect
Exemplo n.º 2
0
 def _gen_random_and_result(x_shape, y_shape):
     x_val = np.random.randint(0, 10, x_shape)
     y_val = np.random.randint(0, 10, y_shape)
     x = ad.variable(x_val, name='X%s' % str(x_shape))
     y = ad.variable(y_val, name='Y%s' % str(y_shape))
     z = ad.equal(x, y)
     expect = (x_val == y_val).astype(dtype=np.float64)
     return z, [x, y], expect
Exemplo n.º 3
0
 def test_backward(self):
     x = ad.variable([[1, 1], [1, 0]])
     y = ad.while_loop(
         cond=lambda inputs: ad.less(inputs[0], ad.constant(64)),
         body=lambda inputs: [inputs[0] * 2,
                              ad.dot(inputs[1], x)],
         loop_vars=[ad.variable(1),
                    ad.variable([[1, 0], [0, 1]])],
         output_index=1,
     )
     self.numeric_gradient_check(y, {}, [x])
Exemplo n.º 4
0
 def _gen_random_and_result(x_shape, y_shape, call_type=True):
     x_val = np.random.random(x_shape)
     y_val = np.random.random(y_shape)
     x = ad.variable(x_val, name='X%s' % str(x_shape))
     y = ad.variable(y_val, name='Y%s' % str(y_shape))
     if call_type:
         z = x < y
     else:
         z = ad.less(x, y)
     expect = (x_val < y_val).astype(dtype=np.float64)
     return z, [x, y], expect
Exemplo n.º 5
0
 def test_cross_entropy_vector(self):
     for _ in range(100):
         x_val = np.random.random((np.random.randint(1, 11)))
         y_val = np.zeros_like(x_val)
         y_val[np.random.randint(0, y_val.shape[0])] = 1.0
         x = ad.variable(x_val, name='X')
         y_pred = ad.acts.softmax(x)
         y_true = ad.variable(y_val, name='Y')
         loss = ad.losses.cross_entropy(y_true, y_pred)
         self.assertEqual((), loss.shape)
         self.numeric_gradient_check(loss, {}, [x])
Exemplo n.º 6
0
 def test_relu(self):
     x = ad.variable([1.0, -1.2, 0.0], name='X')
     y = ad.acts.sigmoid(x)
     actual = y.forward()
     expect = np.array([0.73105858, 0.23147522, 0.5])
     self.assertTrue(np.allclose(actual, expect), (actual, expect))
     self.numeric_gradient_check(y, {}, [x])
     for _ in range(100):
         x = ad.variable(np.random.random((np.random.randint(1, 11), np.random.randint(1, 11))) - 0.5, name='X')
         y = ad.acts.sigmoid(x)
         self.numeric_gradient_check(y, {}, [x])
Exemplo n.º 7
0
 def _gen_random_and_result(x_shape, y_shape, call_type=True):
     x_val = np.random.random(x_shape)
     y_val = np.random.random(y_shape)
     x = ad.variable(x_val, name='X%s' % str(x_shape))
     y = ad.variable(y_val, name='Y%s' % str(y_shape))
     if call_type:
         z = x * y
     else:
         z = ad.multiply(x, y)
     expect = x_val * y_val
     return z, [x, y], expect
Exemplo n.º 8
0
 def test_forward(self):
     val = np.arange(6)
     wr = ad.variable(val).reshape(shape=(1, 2, 3))
     actual = wr.forward()
     expect = np.array([[[0, 1, 2], [3, 4, 5]]])
     self.assertEqual((1, 2, 3), wr.shape)
     self.assertTrue(np.allclose(expect, actual), (expect, actual))
     wr = ad.variable(val).reshape(shape=(-1, ))
     actual = wr.forward()
     expect = np.array([0, 1, 2, 3, 4, 5])
     self.assertEqual((6, ), wr.shape)
     self.assertTrue(np.allclose(expect, actual), (expect, actual))
Exemplo n.º 9
0
 def _gen_random_and_result(cond_shape, x_shape, y_shape):
     cond_val = np.random.randint(0, 2, cond_shape) == np.random.randint(
         0, 2, cond_shape)
     x_val = np.random.random(x_shape)
     y_val = np.random.random(y_shape)
     cond = ad.variable(cond_val.astype(np.float64),
                        name='C%s' % str(cond_shape))
     x = ad.variable(x_val, name='X%s' % str(x_shape))
     y = ad.variable(y_val, name='Y%s' % str(y_shape))
     z = ad.where(cond, x, y)
     expect = np.where(cond_val, x_val, y_val)
     return z, [x, y], expect
Exemplo n.º 10
0
 def test_mean_square_error(self):
     for _ in range(100):
         n, m = np.random.randint(1, 11), np.random.randint(1, 11)
         x_val = np.random.random((n, m))
         y_val = np.zeros((n, m))
         classes = np.random.randint(0, m, (n, ))
         y_val[np.arange(n), classes] = 1.0
         x = ad.variable(x_val, name='X')
         y_pred = ad.acts.softmax(x)
         y_true = ad.variable(y_val, name='Y')
         loss = ad.losses.mean_square_error(y_true, y_pred)
         self.assertEqual((n, ), loss.shape)
         self.numeric_gradient_check(loss, {}, [x])
Exemplo n.º 11
0
    def test_backward(self):
        x = ad.variable([1, 2, 3, 4, 5, 6])
        y = ad.map_fn(lambda x: x * x, x)
        self.numeric_gradient_check(y, {}, [x])

        x = ad.variable([1, 2, 3])
        y = ad.variable([-1, 1, -1])
        z = ad.map_fn(lambda x: x[0] * x[1], (x, y))
        self.numeric_gradient_check(z, {}, [x, y])

        x = ad.variable([1, 2, 3])
        y = ad.map_fn(lambda x: (x, -x), x)
        z = y[0] * y[1]
        self.numeric_gradient_check(z, {}, [x])
Exemplo n.º 12
0
 def test_forward_multi(self):
     val = np.ones((2, 1, 1, 3, 1))
     we = ad.variable(val).squeeze(axis=(-1, -3, -4))
     actual = we.forward()
     expect = np.ones((2, 3))
     self.assertTrue(np.allclose(expect, actual), (expect, actual))
     we = ad.variable(val).squeeze(axis=(-4, -1, -3))
     actual = we.forward()
     self.assertTrue(np.allclose(expect, actual), (expect, actual))
     we = ad.variable(val).squeeze(axis=(1, -1, 2))
     actual = we.forward()
     self.assertTrue(np.allclose(expect, actual), (expect, actual))
     we = ad.variable(val).squeeze(axis=(1, 2, 4))
     actual = we.forward()
     self.assertTrue(np.allclose(expect, actual), (expect, actual))
Exemplo n.º 13
0
 def test_leaky_relu(self):
     alpha = 1e-2
     x = ad.variable([1.0, -1.2, 0.0], name='X')
     y = ad.acts.leaky_relu(x, alpha=alpha)
     actual = y.forward()
     expect = np.array([1.0, -0.012, 0.0])
     self.assertTrue(np.allclose(actual, expect), (actual, expect))
     self.numeric_gradient_check(y, {}, [x])
     for _ in range(100):
         alpha = np.random.random()
         x = ad.variable(np.random.random(
             (np.random.randint(1, 11), np.random.randint(1, 11))) - 0.5,
                         name='X')
         y = ad.acts.leaky_relu(x, alpha=alpha)
         self.numeric_gradient_check(y, {}, [x])
Exemplo n.º 14
0
 def test_forward_default(self):
     val = np.array([[1, 2, 3], [4, 5, 6]])
     wt = ad.variable(val).transpose()
     actual = wt.forward()
     expect = np.array([[1, 4], [2, 5], [3, 6]])
     self.assertEqual((3, 2), wt.shape)
     self.assertTrue(np.allclose(expect, actual), (expect, actual))
Exemplo n.º 15
0
 def test_forward(self):
     val = np.arange(6).reshape((1, 2, 3))
     wf = ad.variable(val).flatten()
     actual = wf.forward()
     expect = np.array([0, 1, 2, 3, 4, 5])
     self.assertEqual((6, ), wf.shape)
     self.assertTrue(np.allclose(expect, actual), (expect, actual))
Exemplo n.º 16
0
 def test_update_scalar(self):
     w = ad.variable(1.2)
     w.update(2.4)
     w.update_add(-3.6)
     self.assertAlmostEqual(-1.2, w.forward())
     with self.assertRaises(ValueError):
         w.update(np.array([1.0]))
Exemplo n.º 17
0
 def test_forward_axes(self):
     val = np.arange(6).reshape((1, 2, 3))
     wt = ad.transpose(ad.variable(val), axes=(1, 0, 2))
     actual = wt.forward()
     expect = np.array([[[0, 1, 2]], [[3, 4, 5]]])
     self.assertEqual((2, 1, 3), wt.shape)
     self.assertTrue(np.allclose(expect, actual), (expect, actual))
Exemplo n.º 18
0
 def test_forward(self):
     x_val = np.random.random((3, 4))
     x = ad.variable(x_val)
     y = ad.setitem(x, (1, 2), ad.constant(5.0))
     actual = y.forward()[1, 2]
     expect = 5.0
     self.assertEqual(x.shape, y.shape)
     self.assertTrue(np.allclose(expect, actual), (expect, actual))
Exemplo n.º 19
0
 def test_softmax_vector(self):
     for _ in range(100):
         x = ad.variable(np.random.random((np.random.randint(1, 11))),
                         name='X')
         y = ad.acts.softmax(x)
         s = y.sum(axis=-1).forward()
         self.assertTrue(np.allclose(np.ones_like(s), s), (s, ))
         self.numeric_gradient_check(y, {}, [x])
Exemplo n.º 20
0
 def test_backward(self):
     val = np.random.random((3, 5))
     w = ad.variable(val, name='W')
     y = w.transpose().sum()
     self.numeric_gradient_check(y, {}, [w])
     y = w.transpose().sum(axis=-1)
     self.numeric_gradient_check(y, {}, [w])
     y = w.transpose().sum(axis=0)
     self.numeric_gradient_check(y, {}, [w])
     y = w.transpose().sum(axis=(0, -1))
     self.numeric_gradient_check(y, {}, [w])
     val = np.random.random((3, 4, 5))
     w = ad.variable(val, name='W')
     y = w.transpose().sum()
     self.numeric_gradient_check(y, {}, [w])
     y = w.transpose().sum(axis=(0, 2)).sum(axis=0)
     self.numeric_gradient_check(y, {}, [w])
Exemplo n.º 21
0
 def test_backward(self):
     z, variables, _ = self._gen_random_and_result((3, 4), (3, 1), False)
     self.numeric_gradient_check(z, {}, variables, atol=1e-5)
     x = ad.variable(np.random.random((2, 3)), name='X')
     z = 1.0 // x
     self.numeric_gradient_check(z, {}, [x], atol=1e-5)
     z = x // 1.0
     self.numeric_gradient_check(z, {}, [x], atol=1e-5)
Exemplo n.º 22
0
 def test_backward(self):
     z, variables, _ = self._gen_random_and_result((4, ), (3, 4))
     self.numeric_gradient_check(z, {}, variables)
     x = ad.variable(np.random.random((2, 3)), name='X')
     z = ad.power(ad.constant(2.0), x)
     self.numeric_gradient_check(z, {}, [x])
     z = ad.power(x, ad.constant(3.0))
     self.numeric_gradient_check(z, {}, [x])
Exemplo n.º 23
0
    def test_zeros(self):
        weights = ad.inits.zeros(shape=(3, 5))
        self.assertEqual((3, 5), weights.shape)
        self.assertEqual(0.0, np.max(weights))
        self.assertEqual(0.0, np.min(weights))

        weights = ad.variable(ad.inits.zeros, shape=3)
        self.assertEqual((3, ), weights.shape)
Exemplo n.º 24
0
 def test_broadcast_failed(self):
     with self.assertRaises(ValueError):
         self._gen_random_and_result((1, 3, 4), (1, 4, 1))
     x = ad.variable(np.random.random((2, 3)), name='X')
     z = 3.0 + x
     self.numeric_gradient_check(z, {}, [x])
     z = x + 3.0
     self.numeric_gradient_check(z, {}, [x])
Exemplo n.º 25
0
 def test_forward(self):
     x_val = np.random.random((3, 4))
     x = ad.variable(x_val)
     y = ad.log(x)
     actual = y.forward()
     expect = np.log(x_val)
     self.assertEqual(expect.shape, y.shape)
     self.assertTrue(np.allclose(expect, actual), (expect, actual))
Exemplo n.º 26
0
 def test_backward_keepdims(self):
     val = np.random.random((3, 5))
     w = ad.variable(val, name='W')
     y = ad.prod(w.transpose(), keepdims=True)
     self.numeric_gradient_check(y, {}, [w])
     y = w.transpose().prod(axis=-1, keepdims=True)
     self.numeric_gradient_check(y, {}, [w])
     y = w.transpose().prod(axis=0, keepdims=True)
     self.numeric_gradient_check(y, {}, [w])
     y = w.transpose().prod(axis=(0, -1), keepdims=True)
     self.numeric_gradient_check(y, {}, [w])
     val = np.random.random((3, 4, 5))
     w = ad.variable(val, name='W')
     y = w.transpose().prod(keepdims=True)
     self.numeric_gradient_check(y, {}, [w])
     y = w.transpose().prod(axis=(0, 2), keepdims=True).prod(axis=1, keepdims=True)
     self.numeric_gradient_check(y, {}, [w])
Exemplo n.º 27
0
 def test_backward(self):
     z, variables, _ = self._gen_random_and_result((4, ), (3, 4), False)
     self.numeric_gradient_check(z, {}, variables)
     x = ad.variable(np.random.random((2, 3)), name='X')
     z = 2.0 * x
     self.numeric_gradient_check(z, {}, [x])
     z = x * 2.0
     self.numeric_gradient_check(z, {}, [x])
Exemplo n.º 28
0
def gen_linear_model(config: dict, verbose=False):
    """Generate a linear model.

    :param config: Configuration.
    :param verbose: Print loss and gradients if it is True.
    :return: Model, loss, placeholders and variables.
    """
    x = ad.placeholder(shape=(None, config['input_len']), name='X')
    y = ad.placeholder(shape=(None, ), name='Y')

    w1 = ad.variable(
        initializer=ad.inits.random_normal(),
        shape=(config['input_len'], config['hidden_dim']),
        name='W1',
    )
    b1 = ad.variable(
        initializer=ad.inits.zeros,
        shape=config['hidden_dim'],
        name='b1',
    )

    v = ad.acts.leaky_relu(ad.dot(x, w1) + b1)

    w2 = ad.variable(
        initializer=ad.inits.random_normal(),
        shape=(config['hidden_dim'], 2),
        name='W2',
    )
    b2 = ad.variable(
        initializer=ad.inits.zeros,
        shape=2,
        name='b2',
    )

    y_pred = ad.acts.softmax(ad.dot(v, w2) + b2)

    loss = ad.square(y - y_pred).mean()

    if verbose:
        print('Loss:', loss)

    return y_pred, loss, [x, y], [w1, b1, w2, b2]
Exemplo n.º 29
0
 def test_backward_multi(self):
     val = np.ones((2, 1, 1, 3, 1))
     w = ad.variable(val)
     we = ad.squeeze(w, axis=(-1, -3, -4))
     self.numeric_gradient_check(we, {}, [w])
     we = w.squeeze(axis=(-4, -1, -3))
     self.numeric_gradient_check(we, {}, [w])
     we = w.squeeze(axis=(1, -1, 2))
     self.numeric_gradient_check(we, {}, [w])
     we = w.squeeze(axis=(1, 2, 4))
     self.numeric_gradient_check(we, {}, [w])
Exemplo n.º 30
0
 def add_weight(self,
                name,
                shape,
                initializer=None,
                trainable=True) -> ad.OpVariable:
     var = ad.variable(initializer, shape=shape, name=name)
     if trainable:
         self._trainable_weights.append(var)
     else:
         self._non_trainable_weights.append(var)
     return var