Ejemplo n.º 1
0
def test_undefined_grad_opt():
    # Make sure that undefined grad get removed in optimized graph.
    random = MRG_RandomStream(np.random.randint(1, 2147462579))
    pvals = shared(np.random.rand(10, 20).astype(config.floatX))
    pvals = pvals / pvals.sum(axis=1)
    pvals = zero_grad(pvals)
    samples = random.multinomial(pvals=pvals, n=1)
    samples = cast(samples, pvals.dtype)
    samples = zero_grad(samples)
    cost = tt_sum(samples + pvals)
    grad_out = grad(cost, samples)
    f = function([], grad_out)
    assert not any(
        [isinstance(node.op, UndefinedGrad) for node in f.maker.fgraph.apply_nodes]
    )
Ejemplo n.º 2
0
 def test_op_removed(self):
     x = matrix("x")
     y = x * zero_grad(x)
     f = aesara.function([x], y)
     # need to refer to aesara.zero_grad here,
     # aesara.zero_grad is a wrapper function!
     assert zero_grad_ not in [node.op for node in f.maker.fgraph.toposort()]
Ejemplo n.º 3
0
def test_undefined_grad_opt():
    # Make sure that undefined grad get removed in optimized graph.
    random = RandomStreams(np.random.randint(1, 2147462579))
    pvals = aesara.shared(np.random.rand(10, 20).astype(aesara.config.floatX))
    pvals = pvals / pvals.sum(axis=1)
    pvals = gradient.zero_grad(pvals)
    samples = random.multinomial(pvals=pvals, n=1)
    samples = aesara.tensor.cast(samples, pvals.dtype)
    samples = gradient.zero_grad(samples)
    cost = aesara.tensor.sum(samples + pvals)
    grad = aesara.tensor.grad(cost, samples)
    f = aesara.function([], grad)
    aesara.printing.debugprint(f)
    assert not any([
        isinstance(node.op, gradient.UndefinedGrad)
        for node in f.maker.fgraph.apply_nodes
    ])
Ejemplo n.º 4
0
def test_undefined_grad_opt():
    # Make sure that undefined grad get removed in optimized graph.
    random = MRG_RandomStream(np.random.default_rng().integers(1, 2147462579))

    pvals = aesara.shared(np.random.random((10, 20)).astype(config.floatX))
    pvals = pvals / pvals.sum(axis=1)
    pvals = zero_grad(pvals)

    samples = random.multinomial(pvals=pvals, n=1)
    samples = at.cast(samples, pvals.dtype)
    samples = zero_grad(samples)

    cost = at_sum(samples + pvals)
    grad_res = grad(cost, samples)

    f = aesara.function([], grad_res)
    assert not any(
        isinstance(node.op, UndefinedGrad) for node in f.maker.fgraph.apply_nodes
    )
Ejemplo n.º 5
0
    def test_grad(self):
        a = np.asarray(self.rng.randn(5, 5), dtype=config.floatX)

        x = matrix("x")

        expressions_gradients = [
            (x * zero_grad(x), x),
            (x * zero_grad(exp(x)), exp(x)),
            (zero_grad(x), aet.constant(0.0)),
            (x**2 * zero_grad(x), 2 * x**2),
        ]

        for expr, expr_grad in expressions_gradients:
            g = grad(expr.sum(), x)
            # gradient according to aesara
            f = aesara.function([x], g, on_unused_input="ignore")
            # desired gradient
            f2 = aesara.function([x], expr_grad, on_unused_input="ignore")

            assert np.allclose(f(a), f2(a))
Ejemplo n.º 6
0
    def test_rop(self):
        x = vector()
        v = vector()
        y = zero_grad(x)

        rop = Rop(y, x, v)
        f = aesara.function([x, v], rop, on_unused_input="ignore")

        a = np.asarray(self.rng.randn(5), dtype=config.floatX)
        u = np.asarray(self.rng.randn(5), dtype=config.floatX)

        assert np.count_nonzero(f(a, u)) == 0