Exemple #1
0
    def test_forward(self):
        y = ad.while_loop(
            cond=lambda inputs: ad.less(inputs[0], ad.constant(10)),
            body=lambda inputs: [inputs[0] + 1],
            loop_vars=[ad.variable(0.0)],
        )
        actual = y.forward()
        expect = np.arange(1, 11)
        self.assertEqual((None, ), y.shape)
        self.assertTrue(np.allclose(expect, actual), (expect, actual))

        y = ad.while_loop(
            cond=lambda inputs: ad.less(inputs[0], ad.constant(5)),
            body=lambda inputs: [inputs[0] + 1, (inputs[0] + 1) * inputs[1]],
            loop_vars=[ad.variable(0.0), ad.variable(1.0)],
            output_index=1,
        )
        actual = y.forward()
        expect = np.array([1, 2, 6, 24, 120])
        self.assertEqual((None, ), y.shape)
        self.assertTrue(np.allclose(expect, actual), (expect, actual))

        y = ad.while_loop(
            cond=lambda inputs: ad.less(inputs[0], ad.constant(64)),
            body=lambda inputs:
            [inputs[0] * 2,
             ad.dot(inputs[1], ad.variable([[1, 1], [1, 0]]))],
            loop_vars=[ad.variable(1),
                       ad.variable([[1, 0], [0, 1]])],
            output_index=1,
        )
        actual = y.forward()
        expect = np.array([1, 2, 3, 5, 8, 13])
        self.assertEqual((None, 2, 2), y.shape)
        self.assertTrue(np.allclose(expect, actual[:, 0, 0]), (expect, actual))
Exemple #2
0
 def test_forward_only_condition(self):
     actual = ad.where(
         ad.equal(
             ad.constant([[1, 2], [3, 4]]),
             ad.constant([[2, 1], [3, 4]]),
         )).forward()
     expect = np.array([[0., 0.], [1., 1.]])
     self.assertTrue(np.allclose(expect, actual), (expect, actual))
Exemple #3
0
 def test_backward(self):
     z, variables, _ = self._gen_random_and_result((4, ), (3, 4))
     self.numeric_gradient_check(z, {}, variables)
     x = ad.variable(np.random.random((2, 3)), name='X')
     z = ad.power(ad.constant(2.0), x)
     self.numeric_gradient_check(z, {}, [x])
     z = ad.power(x, ad.constant(3.0))
     self.numeric_gradient_check(z, {}, [x])
Exemple #4
0
 def test_forward(self):
     x_val = np.random.random((3, 4))
     x = ad.variable(x_val)
     y = ad.setitem(x, (1, 2), ad.constant(5.0))
     actual = y.forward()[1, 2]
     expect = 5.0
     self.assertEqual(x.shape, y.shape)
     self.assertTrue(np.allclose(expect, actual), (expect, actual))
Exemple #5
0
 def test_backward(self):
     x = ad.variable([[1, 1], [1, 0]])
     y = ad.while_loop(
         cond=lambda inputs: ad.less(inputs[0], ad.constant(64)),
         body=lambda inputs: [inputs[0] * 2,
                              ad.dot(inputs[1], x)],
         loop_vars=[ad.variable(1),
                    ad.variable([[1, 0], [0, 1]])],
         output_index=1,
     )
     self.numeric_gradient_check(y, {}, [x])
Exemple #6
0
 def test_twice(self):
     sess = ad.Session()
     op = ad.constant(np.array(1.0))
     sess.run(op)
     sess.run(op)
Exemple #7
0
def relu(x: ad.Operation) -> ad.Operation:
    """ReLU"""
    return ad.maximum(x, ad.constant(0.0))
Exemple #8
0
def leaky_relu(x: ad.Operation, alpha=0.01) -> ad.Operation:
    """Leaky ReLU"""
    return ad.maximum(x, ad.constant(0.0)) + ad.minimum(
        x, ad.constant(0.0)) * ad.constant(alpha)
Exemple #9
0
 def test_backward(self):
     with self.assertRaises(NotImplementedError):
         x_val = np.random.random((3, 4))
         x = ad.variable(x_val)
         y = ad.setitem(x, (1, 2), ad.constant(5.0))
         self.numeric_gradient_check(y, {}, [x])