Example #1
0
 def test_op_removed(self):
     x = theano.tensor.matrix("x")
     y = x * gradient.consider_constant(x)
     f = theano.function([x], y)
     # need to refer to theano.gradient.consider_constant_ here,
     # theano.gradient.consider_constant is a wrapper function!
     assert gradient.consider_constant_ not in [node.op for node in f.maker.fgraph.toposort()]
Example #2
0
 def test_op_removed(self):
     x = theano.tensor.matrix('x')
     y = x * gradient.consider_constant(x)
     f = theano.function([x], y)
     # need to refer to theano.gradient.consider_constant_ here,
     # theano.gradient.consider_constant is a wrapper function!
     assert gradient.consider_constant_ not in \
         [node.op for node in f.maker.fgraph.toposort()]
Example #3
0
    def test_grad(self):
        T = theano.tensor
        a = np.asarray(self.rng.randn(5, 5), dtype=config.floatX)

        x = T.matrix("x")

        expressions_gradients = [
            (x * gradient.consider_constant(x), x),
            (x * gradient.consider_constant(T.exp(x)), T.exp(x)),
            (gradient.consider_constant(x), T.constant(0.0)),
            (x**2 * gradient.consider_constant(x), 2 * x**2),
        ]

        for expr, expr_grad in expressions_gradients:
            g = gradient.grad(expr.sum(), x)
            # gradient according to theano
            f = theano.function([x], g, on_unused_input="ignore")
            # desired gradient
            f2 = theano.function([x], expr_grad, on_unused_input="ignore")

            assert np.allclose(f(a), f2(a))
Example #4
0
    def test_grad(self):
        T = theano.tensor
        a = np.asarray(self.rng.randn(5, 5), dtype=config.floatX)

        x = T.matrix("x")

        expressions_gradients = [
            (x * gradient.consider_constant(x), x),
            (x * gradient.consider_constant(T.exp(x)), T.exp(x)),
            (gradient.consider_constant(x), T.constant(0.0)),
            (x ** 2 * gradient.consider_constant(x), 2 * x ** 2),
        ]

        for expr, expr_grad in expressions_gradients:
            g = gradient.grad(expr.sum(), x)
            # gradient according to theano
            f = theano.function([x], g, on_unused_input="ignore")
            # desired gradient
            f2 = theano.function([x], expr_grad, on_unused_input="ignore")

            assert np.allclose(f(a), f2(a))