Exemple #1
0
def test_stochasticoptimization():

    # this optimization alternates between triggering and not triggering.

    last_time_replaced = [False]

    @local_optimizer([add])
    def insert_broken_add_sometimes(fgraph, node):
        if node.op == add:
            last_time_replaced[0] = not last_time_replaced[0]
            if last_time_replaced[0]:
                return [off_by_half(*node.inputs)]
        return False

    edb = EquilibriumDB()
    edb.register("insert_broken_add_sometimes", insert_broken_add_sometimes,
                 "all")
    opt = edb.query("+all")

    a = dvector()
    b = dvector()

    with pytest.raises(StochasticOrder):
        aesara.function(
            [a, b],
            add(a, b),
            mode=DebugMode(
                optimizer=opt,
                check_c_code=True,
                stability_patience=max(2, config.DebugMode__patience),
            ),
        )
Exemple #2
0
    def test_softmax_optimizations_w_bias2(self):
        x = matrix("x")
        b = vector("b")
        c = vector("c")
        one_of_n = lvector("one_of_n")
        op = crossentropy_categorical_1hot

        fgraph = FunctionGraph([x, b, c, one_of_n],
                               [op(softmax_legacy(add(x, b, c)), one_of_n)])
        assert fgraph.outputs[0].owner.op == op

        optdb.query(OPT_FAST_RUN).optimize(fgraph)

        assert len(fgraph.toposort()) == 2
        assert fgraph.outputs[
            0].owner.op == crossentropy_softmax_argmax_1hot_with_bias
Exemple #3
0
    def test_logsoftmax_grad_true_div_elemwise(self):
        # Checks that the gradient of an expression similar to a log(softmax)
        # but with a different elemwise operation than true_div is not
        # optimized.

        x = matrix("x")
        y = log(softmax(x))
        g = grad(y.sum(), x)

        softmax_grad_node = g.owner
        assert softmax_grad_node.op == softmax_grad_legacy
        true_div_node = softmax_grad_node.inputs[0].owner
        assert true_div_node.op == true_div

        # We replace the elemwise true_div op by an elemwise add.
        new_g = softmax_grad_legacy(add(*true_div_node.inputs),
                                    softmax_grad_node.inputs[1])

        fgraph = FunctionGraph([x], [new_g])
        optdb.query(OPT_FAST_RUN).optimize(fgraph)

        assert softmax_grad_legacy in [n.op for n in fgraph.toposort()]