Ejemplo n.º 1
0
    def test_borrow_output(self):
        a = dmatrix()
        f = function([a], Out(a, borrow=False))
        o = np.ones((3, 3))
        assert o is not f(
            o)  # function no longer permits aliasing outputs to inputs

        f = function([a], Out(a * 4, borrow=False))
        o = np.ones((3, 3))
        four = f(o)
        assert np.all(four == 4)
        f(o + 0.1)  # should not clobber the memory used to store four
        assert np.all(four == 4)

        f = function([a],
                     Out(a * 4, borrow=True),
                     mode=Mode("c|py_nogc", "fast_run"))
        o = np.ones((3, 3))
        four = f(o)
        assert np.all(four == 4)
        f(o + 0.1)  # should clobber the memory used to store four
        if config.cxx:
            assert not np.all(four == 4)
        else:
            # The Elemwise.perform method don't reuse memory
            # as some numpy version don't support that correctly.
            assert np.all(four == 4)
Ejemplo n.º 2
0
def test_bug_2009_07_17_borrowed_output():
    # Regression test for a bug where output was borrowed by mistake.
    a = dmatrix()
    b = dmatrix()
    # The output should *NOT* be borrowed.
    g = function([a, b], Out(dot(a, b), borrow=False))

    x = np.zeros((1, 2))
    y = np.ones((2, 5))

    z = g(x, y)
    # print(z)  # Should be zero.
    x.fill(1)
    # print(g(x, y))  # Should be non-zero.
    # print(z)  # Should still be zero.
    assert np.linalg.norm(z) == 0

    # The code above was supposed to fail when it was written (or, more
    # accurately, on the next revision, i.e. when it was merged with the
    # rest of the code, i.e. on revision cac9c9e9f08e).
    # However, for some reason, it does not fail anymore when at this revision.
    # Thus, a new test (below) was added that exhibits the same issue. Note
    # that it may better be moved into the test_nnet.py test file if it turns
    # out the bug was caused by 'crossentropy_softmax_argmax_1hot_with_bias',
    # and was not a more general issue.
    test_output_activation_no_bias = dmatrix()
    test_b2 = dvector()
    test_target = ivector()
    nll_softmax_argmax = crossentropy_softmax_argmax_1hot_with_bias(
        test_output_activation_no_bias, test_b2, test_target)
    output = nll_softmax_argmax[1]
    g = function(
        [test_output_activation_no_bias, test_b2, test_target],
        Out(output, borrow=False),
    )

    a = np.zeros((1, 5))
    b = np.ones(5)
    c = np.zeros(1, dtype=np.int32)

    z = g(a, b, c)
    z_backup = copy.copy(z)
    id_z = id(z)
    # print(f"Output z after first call: {z}")
    a[0, 0] = 1
    id_other = id(g(a, b, c))
    # print(f"Output z after second call: {z}")
    # Ensure that calling the function again returns a pointer towards a new
    # array.
    assert id_z != id_other
    # Just to be 100% sure, ensure that z was not altered.
    assert (z == z_backup).all()
Ejemplo n.º 3
0
    def test_constant_output(self):
        # Test that if the output is a constant, we respect the aesara memory interface
        f = function([], aet.constant([4]))
        # print f.maker.fgraph.toposort()
        out = f()
        assert (out == 4).all()
        out[0] = 3
        out2 = f()
        # If the following 2 asserts fail it mean Aesara broke it's memory contract.
        assert out2 is not out
        assert (out2 == 4).all()

        # Test that if the output is a constant and borrow, we respect the aesara memory interface
        f = function([], Out(aet.constant([4]), borrow=True))
        # print f.maker.fgraph.toposort()
        out = f()
        assert (out == 4).all()
        out[0] = 3
        out2 = f()

        if isinstance(get_default_mode(), DebugMode):
            # In DebugMode, we don't implement optimization based on borrow on the output.
            assert (out2 == 4).all()
        else:
            assert out2 is out
            assert (out2 == 3).all()
Ejemplo n.º 4
0
    def test_borrow_input(self):
        # Tests that the contract for io.In is respected. When borrow=False, it should be
        # impossible for outputs to be aliased to the input variables provided by the user,
        # either through a view-map or a destroy map. New tests should be added in the future
        # when borrow=True is implemented.

        a = dmatrix()
        aval = np.random.rand(3, 3)

        # when borrow=False, test that a destroy map cannot alias output to input
        f = function([In(a, borrow=False)], Out(a + 1, borrow=True))
        assert np.all(f(aval) == aval + 1)
        assert not np.may_share_memory(aval, f(aval))

        # when borrow=False, test that a viewmap cannot alias output to input
        f = function([In(a, borrow=False)], Out(a[0, :], borrow=True))
        assert np.all(f(aval) == aval[0, :])
        assert not np.may_share_memory(aval, f(aval))
Ejemplo n.º 5
0
def rebuild_collect_shared(
    outputs,
    inputs=None,
    replace=None,
    updates=None,
    rebuild_strict=True,
    copy_inputs_over=True,
    no_default_updates=False,
    clone_inner_graphs=False,
):
    r"""Replace subgraphs of a computational graph.

    It returns a set of dictionaries and lists which collect (partial?)
    different information about shared variables. This info is required by
    `pfunc`.

    Parameters
    ----------
    outputs : list of Aesara Variables (or Aesara expressions)
        List of Aesara variables or expressions representing the outputs of the
        computational graph.
    inputs : list of Aesara Variables (or Aesara expressions)
        List of Aesara variables or expressions representing the inputs of the
        computational graph (or None).
    replace : dict
        Dictionary describing which subgraphs should be replaced by what.
        orig_value => new_value
    updates : dict
        Dictionary describing updates expressions for shared variables.
    rebuild_strict : bool
        Flag, if true the type of all inputs should be the same as the one for
        the current node.
    copy_inputs_over : bool
        Flag; if False it will clone inputs.
    no_default_updates : either bool or list of Variables
        If True, do not perform any automatic update on Variables.
        If False (default), perform them all.
        Else, perform automatic updates on all Variables that are neither in
        "updates" nor in "no_default_updates".
    clone_inner_graphs : bool
        If ``True``, clone `Op`\s that are subclasses of `HasInnerGraph` and their
        inner-graphs.

    """

    if isinstance(outputs, tuple):
        outputs = list(outputs)

    # This function implements similar functionality as graph.clone
    # and it should be merged with that
    clone_d = {}
    update_d = {}
    update_expr = []
    # list of shared inputs that are used as inputs of the graph
    shared_inputs = []

    def clone_v_get_shared_updates(v, copy_inputs_over):
        """
        Clones a variable and its inputs recursively until all are in clone_d.
        Also appends all shared variables met along the way to shared inputs,
        and their default_update (if applicable) to update_d and update_expr.

        """
        # this co-recurses with clone_a
        assert v is not None
        if v in clone_d:
            return clone_d[v]
        if v.owner:
            owner = v.owner
            if owner not in clone_d:
                for i in owner.inputs:
                    clone_v_get_shared_updates(i, copy_inputs_over)
                clone_node_and_cache(
                    owner,
                    clone_d,
                    strict=rebuild_strict,
                    clone_inner_graphs=clone_inner_graphs,
                )
            return clone_d.setdefault(v, v)
        elif isinstance(v, SharedVariable):
            if v not in shared_inputs:
                shared_inputs.append(v)
            if hasattr(v, "default_update"):
                # Check that v should not be excluded from the default
                # updates list
                if no_default_updates is False or (
                        isinstance(no_default_updates, list)
                        and v not in no_default_updates):
                    # Do not use default_update if a "real" update was
                    # provided
                    if v not in update_d:
                        v_update = v.type.filter_variable(v.default_update,
                                                          allow_convert=False)
                        if not v.type.is_super(v_update.type):
                            raise TypeError(
                                "An update must have a type compatible with "
                                "the original shared variable")
                        update_d[v] = v_update
                        update_expr.append((v, v_update))
        if not copy_inputs_over:
            return clone_d.setdefault(v, v.clone())
        else:
            return clone_d.setdefault(v, v)

    # initialize the clone_d mapping with the replace dictionary
    if replace is None:
        replace = []
    try:
        replace_pairs = list(replace.items())
    except Exception:
        replace_pairs = replace

    for v_orig, v_repl in replace_pairs:
        if not isinstance(v_orig, Variable):
            raise TypeError("`givens` keys must be Variables")
        if not isinstance(v_repl, Variable):
            v_repl = shared(v_repl)

        if v_orig in clone_d:
            raise AssertionError(
                "When using 'givens' or 'replace' with several "
                "(old_v, new_v) replacement pairs, you can not have a "
                "new_v variable depend on an old_v one. For instance, "
                "givens = {a:b, b:(a+1)} is not allowed. Here, the old_v "
                f"{v_orig} is used to compute other new_v's, but it is scheduled "
                f"to be replaced by {v_repl}.")

        clone_d[v_orig] = clone_v_get_shared_updates(v_repl, copy_inputs_over)

    if inputs is None:
        inputs = []

    def clone_inputs(i):
        if not copy_inputs_over:
            return clone_d.setdefault(i, i.clone())
        else:
            return clone_d.setdefault(i, i)

    input_variables = [clone_inputs(i) for i in inputs]

    # It was decided, as a first step, to prevent shared variables from
    # being used as function inputs. Although it is technically possible,
    # it is also not clear when/how to use the value of that shared
    # variable (is it a default? ignored?, if the shared variable changes,
    # does that function default also change?).
    for v in input_variables:
        if isinstance(v, SharedVariable):
            raise TypeError(f"Cannot use a shared variable ({v}) as explicit "
                            "input. Consider substituting a non-shared"
                            " variable via the `givens` parameter")

    # Fill update_d and update_expr with provided updates
    if updates is None:
        updates = []
    for (store_into, update_val) in iter_over_pairs(updates):
        if not isinstance(store_into, SharedVariable):
            raise TypeError("update target must be a SharedVariable",
                            store_into)
        if store_into in update_d:
            raise ValueError(
                "this shared variable already has an update "
                "expression",
                (store_into, update_d[store_into]),
            )

        # filter_variable ensure smooth conversion of cpu Types
        try:
            update_val = store_into.type.filter_variable(update_val,
                                                         allow_convert=False)
        except TypeError:
            err_msg = (
                "An update must have the same type as the"
                f" original shared variable (shared_var={store_into},"
                f" shared_var.type={store_into.type},"
                f" update_val={update_val}, update_val.type={getattr(update_val, 'type', None)})."
            )
            err_sug = ("If the difference is related to the broadcast pattern,"
                       " you can call the"
                       " tensor.unbroadcast(var, axis_to_unbroadcast[, ...])"
                       " function to remove broadcastable dimensions.")

            raise TypeError(err_msg, err_sug)
        assert store_into.type.is_super(update_val.type)

        update_d[store_into] = update_val
        update_expr.append((store_into, update_val))

    # Elements of "outputs" are here cloned to "cloned_outputs"
    if isinstance(outputs, list):
        cloned_outputs = []
        for v in outputs:
            if isinstance(v, Variable):
                cloned_v = clone_v_get_shared_updates(v, copy_inputs_over)
                cloned_outputs.append(cloned_v)
            elif isinstance(v, Out):
                cloned_v = clone_v_get_shared_updates(v.variable,
                                                      copy_inputs_over)
                cloned_outputs.append(Out(cloned_v, borrow=v.borrow))
            else:
                raise TypeError("Outputs must be aesara Variable or "
                                "Out instances. Received " + str(v) +
                                " of type " + str(type(v)))
            # computed_list.append(cloned_v)
    else:
        if isinstance(outputs, Variable):
            cloned_v = clone_v_get_shared_updates(outputs, copy_inputs_over)
            cloned_outputs = cloned_v
            # computed_list.append(cloned_v)
        elif isinstance(outputs, Out):
            cloned_v = clone_v_get_shared_updates(outputs.variable,
                                                  copy_inputs_over)
            cloned_outputs = Out(cloned_v, borrow=outputs.borrow)
            # computed_list.append(cloned_v)
        elif outputs is None:
            cloned_outputs = []  # TODO: get Function.__call__ to return None
        else:
            raise TypeError(
                "output must be an Aesara Variable or Out "
                "instance (or list of them)",
                outputs,
            )

    # Iterate over update_expr, cloning its elements, and updating
    # shared_inputs, update_d and update_expr from the SharedVariables
    # we discover.
    # If the variable to be updated is a shared variable not already
    # in shared_inputs, add it.
    # Note: we extend update_expr while iterating over it.

    i = 0
    while i < len(update_expr):
        v, v_update = update_expr[i]
        cloned_v_update = clone_v_get_shared_updates(v_update,
                                                     copy_inputs_over)
        update_d[v] = cloned_v_update
        if isinstance(v, SharedVariable) and v not in shared_inputs:
            shared_inputs.append(v)
        i += 1

    return (
        input_variables,
        cloned_outputs,
        [clone_d, update_d, update_expr, shared_inputs],
    )
Ejemplo n.º 6
0
 def fn():
     x, s = scalars("xs")
     function([s], Out(x))
Ejemplo n.º 7
0
 def fn():
     x, s = scalars("xs")
     # Ignore unused input s, as it hides the other error
     function([s], Out(x), on_unused_input="ignore")