Beispiel #1
0
def test_reallocation():
    x = scalar("x")
    y = scalar("y")
    z = tanh(3 * x + y) + cosh(x + 5 * y)
    # The functinality is currently implement for non lazy and non c VM only.
    for linker in [
            VMLinker(allow_gc=False, lazy=False, use_cloop=False),
            VMLinker(allow_gc=True, lazy=False, use_cloop=False),
    ]:
        m = get_mode(Mode(linker=linker))
        m = m.excluding("fusion", "inplace")

        f = function([x, y], z, name="test_reduce_memory", mode=m)
        output = f(1, 2)
        assert output
        storage_map = f.fn.storage_map

        def check_storage(storage_map):
            for i in storage_map:
                if not isinstance(i, TensorConstant):
                    keys_copy = list(storage_map.keys())[:]
                    keys_copy.remove(i)
                    for o in keys_copy:
                        if storage_map[i][
                                0] and storage_map[i][0] is storage_map[o][0]:
                            return [True, storage_map[o][0]]
            return [False, None]

        assert check_storage(storage_map)[0]
        assert len({id(v) for v in storage_map.values()}) < len(storage_map)
Beispiel #2
0
    def test_not_lazy_if_inplace(self):
        # Tests that if the outputs are scalars and the graph is big,
        # we disable the inplace opt to speed up optimization
        x = vector("x", dtype=self.dtype)
        y = vector("y", dtype=self.dtype)
        c = iscalar("c")
        mode = get_mode(self.mode).excluding(
            # Disable many opt to keep the graph big enough to disable
            # the opt.
            "fusion",
            "local_add_canonizer",
            "inplace",
            "constant_folding",
            "constant_folding",
        )
        y2 = reduce(lambda x, y: x + y, [y] + list(range(200)))
        f = function([c, x, y], ifelse(c, x, y2), mode=mode)
        # For not inplace ifelse
        ifnode = [n for n in f.maker.fgraph.toposort() if isinstance(n.op, IfElse)]
        assert len(ifnode) == 1
        assert not ifnode[0].op.as_view
        rng = np.random.RandomState(utt.fetch_seed())

        xlen = rng.randint(200)
        ylen = rng.randint(200)

        vx = np.asarray(rng.uniform(size=(xlen,)), self.dtype)
        vy = np.asarray(rng.uniform(size=(ylen,)), self.dtype)

        assert np.allclose(vx, f(1, vx, vy))
        assert np.allclose(vy + sum(range(200)), f(0, vx, vy))
Beispiel #3
0
def compile_rv_inplace(inputs, outputs, mode=None, **kwargs):
    """Use ``aesara.function`` with the random_make_inplace optimization always enabled.

    Using this function ensures that compiled functions containing random
    variables will produce new samples on each call.
    """
    mode = get_mode(mode)
    opt_qry = mode.provided_optimizer.including("random_make_inplace")
    mode = Mode(linker=mode.linker, optimizer=opt_qry)
    aesara_function = aesara.function(inputs, outputs, mode=mode, **kwargs)
    return aesara_function
Beispiel #4
0
def compile_pymc(inputs, outputs, mode=None, **kwargs):
    """Use ``aesara.function`` with specialized pymc rewrites always enabled.

    Included rewrites
    -----------------
    random_make_inplace
        Ensures that compiled functions containing random variables will produce new
        samples on each call.
    local_check_parameter_to_ninf_switch
        Replaces Aeppl's CheckParameterValue assertions is logp expressions with Switches
        that return -inf in place of the assert.

    Optional rewrites
    -----------------
    local_remove_check_parameter
        Replaces Aeppl's CheckParameterValue assertions is logp expressions. This is used
        as an alteranative to the default local_check_parameter_to_ninf_switch whenenver
        this function is called within a model context and the model `check_bounds` flag
        is set to False.
    """

    # Avoid circular dependency
    from pymc.distributions import NoDistribution

    # Set the default update of a NoDistribution RNG so that it is automatically
    # updated after every function call
    output_to_list = outputs if isinstance(outputs, list) else [outputs]
    for rv in (
        node
        for node in walk_model(output_to_list, walk_past_rvs=True)
        if node.owner and isinstance(node.owner.op, NoDistribution)
    ):
        rng = rv.owner.inputs[0]
        if not hasattr(rng, "default_update"):
            rng.default_update = rv.owner.outputs[0]

    # If called inside a model context, see if check_bounds flag is set to False
    try:
        from pymc.model import modelcontext

        model = modelcontext(None)
        check_bounds = model.check_bounds
    except TypeError:
        check_bounds = True
    check_parameter_opt = (
        "local_check_parameter_to_ninf_switch" if check_bounds else "local_remove_check_parameter"
    )

    mode = get_mode(mode)
    opt_qry = mode.provided_optimizer.including("random_make_inplace", check_parameter_opt)
    mode = Mode(linker=mode.linker, optimizer=opt_qry)
    aesara_function = aesara.function(inputs, outputs, mode=mode, **kwargs)
    return aesara_function
Beispiel #5
0
    def test_composite_c_code(self):
        """Make sure this `Op`'s `c_code` works within a `Composite`."""
        x = matrix("x")
        mode = get_mode("FAST_RUN").including("local_ultra_fast_sigmoid")
        f = aesara.function([x], sigmoid(x) + sigmoid(x + 1), mode=mode)
        topo = f.maker.fgraph.toposort()

        assert isinstance(topo[0].op, Elemwise)
        assert isinstance(topo[0].op.scalar_op, Composite)
        assert ultra_fast_scalar_sigmoid in set(
            node.op for node in topo[0].op.scalar_op.fgraph.toposort())
        assert len(topo) == 1
Beispiel #6
0
 def result(inp):
     dtype = inp.dtype
     ctx_name = _name_for_ctx(inp.context)
     key = (dtype, ctx_name)
     f = result.cache.get(key, None)
     if f is None:
         guard_in = GpuArrayType(str(dtype), (False, ),
                                 context_name=ctx_name)()
         mode = get_mode("FAST_RUN").including("gpuarray")
         f = aesara.function([guard_in],
                             op(guard_in),
                             mode=mode,
                             profile=False)
         result.cache[key] = f
     return f(inp)
Beispiel #7
0
    def get_mode(self, excluding=None):
        """
        Return appropriate mode for the tests.

        :param excluding: List of optimizations to exclude.

        :return: The current default mode unless the `config.mode` option is
        set to 'FAST_COMPILE' (in which case it is replaced by the 'FAST_RUN'
        mode), without the optimizations specified in `excluding`.
        """
        if excluding is None:
            excluding = []
        m = config.mode
        if m == "FAST_COMPILE":
            mode = get_mode("FAST_RUN")
        else:
            mode = get_default_mode()
        if excluding:
            return mode.excluding(*excluding)
        else:
            return mode
Beispiel #8
0
def compile_rv_inplace(inputs, outputs, mode=None, **kwargs):
    """Use ``aesara.function`` with the random_make_inplace optimization always enabled.

    Using this function ensures that compiled functions containing random
    variables will produce new samples on each call.
    """

    # Avoid circular dependency
    from pymc.distributions import NoDistribution

    # Set the default update of a NoDistribution RNG so that it is automatically
    # updated after every function call
    output_to_list = outputs if isinstance(outputs, list) else [outputs]
    for rv in (node for node in walk_model(output_to_list, walk_past_rvs=True)
               if node.owner and isinstance(node.owner.op, NoDistribution)):
        rng = rv.owner.inputs[0]
        if not hasattr(rng, "default_update"):
            rng.default_update = rv.owner.outputs[0]

    mode = get_mode(mode)
    opt_qry = mode.provided_optimizer.including("random_make_inplace")
    mode = Mode(linker=mode.linker, optimizer=opt_qry)
    aesara_function = aesara.function(inputs, outputs, mode=mode, **kwargs)
    return aesara_function
Beispiel #9
0
def compile_pymc(
    inputs,
    outputs,
    random_seed: SeedSequenceSeed = None,
    mode=None,
    **kwargs,
) -> Callable[..., Union[np.ndarray, List[np.ndarray]]]:
    """Use ``aesara.function`` with specialized pymc rewrites always enabled.

    This function also ensures shared RandomState/Generator used by RandomVariables
    in the graph are updated across calls, to ensure independent draws.

    Parameters
    ----------
    inputs: list of TensorVariables, optional
        Inputs of the compiled Aesara function
    outputs: list of TensorVariables, optional
        Outputs of the compiled Aesara function
    random_seed: int, array-like of int or SeedSequence, optional
        Seed used to override any RandomState/Generator shared variables in the graph.
        If not specified, the value of original shared variables will still be overwritten.
    mode: optional
        Aesara mode used to compile the function

    Included rewrites
    -----------------
    random_make_inplace
        Ensures that compiled functions containing random variables will produce new
        samples on each call.
    local_check_parameter_to_ninf_switch
        Replaces Aeppl's CheckParameterValue assertions is logp expressions with Switches
        that return -inf in place of the assert.

    Optional rewrites
    -----------------
    local_remove_check_parameter
        Replaces Aeppl's CheckParameterValue assertions is logp expressions. This is used
        as an alteranative to the default local_check_parameter_to_ninf_switch whenenver
        this function is called within a model context and the model `check_bounds` flag
        is set to False.
    """
    # Create an update mapping of RandomVariable's RNG so that it is automatically
    # updated after every function call
    rng_updates = {}
    output_to_list = outputs if isinstance(outputs,
                                           (list, tuple)) else [outputs]
    for random_var in (
            var for var in vars_between(inputs, output_to_list)
            if var.owner and isinstance(var.owner.op, (
                RandomVariable, MeasurableVariable)) and var not in inputs):
        if isinstance(random_var.owner.op, RandomVariable):
            rng = random_var.owner.inputs[0]
            if not hasattr(rng, "default_update"):
                rng_updates[rng] = random_var.owner.outputs[0]
            else:
                rng_updates[rng] = rng.default_update
        else:
            update_fn = getattr(random_var.owner.op, "update", None)
            if update_fn is not None:
                rng_updates.update(update_fn(random_var.owner))

    # We always reseed random variables as this provides RNGs with no chances of collision
    if rng_updates:
        reseed_rngs(rng_updates.keys(), random_seed)

    # If called inside a model context, see if check_bounds flag is set to False
    try:
        from pymc.model import modelcontext

        model = modelcontext(None)
        check_bounds = model.check_bounds
    except TypeError:
        check_bounds = True
    check_parameter_opt = ("local_check_parameter_to_ninf_switch"
                           if check_bounds else "local_remove_check_parameter")

    mode = get_mode(mode)
    opt_qry = mode.provided_optimizer.including("random_make_inplace",
                                                check_parameter_opt)
    mode = Mode(linker=mode.linker, optimizer=opt_qry)
    aesara_function = aesara.function(
        inputs,
        outputs,
        updates={
            **rng_updates,
            **kwargs.pop("updates", {})
        },
        mode=mode,
        **kwargs,
    )
    return aesara_function
Beispiel #10
0
def compile_pymc(
    inputs, outputs, mode=None, **kwargs
) -> Callable[..., Union[np.ndarray, List[np.ndarray]]]:
    """Use ``aesara.function`` with specialized pymc rewrites always enabled.

    Included rewrites
    -----------------
    random_make_inplace
        Ensures that compiled functions containing random variables will produce new
        samples on each call.
    local_check_parameter_to_ninf_switch
        Replaces Aeppl's CheckParameterValue assertions is logp expressions with Switches
        that return -inf in place of the assert.

    Optional rewrites
    -----------------
    local_remove_check_parameter
        Replaces Aeppl's CheckParameterValue assertions is logp expressions. This is used
        as an alteranative to the default local_check_parameter_to_ninf_switch whenenver
        this function is called within a model context and the model `check_bounds` flag
        is set to False.
    """
    # Create an update mapping of RandomVariable's RNG so that it is automatically
    # updated after every function call
    # TODO: This won't work for variables with InnerGraphs (Scan and OpFromGraph)
    rng_updates = {}
    output_to_list = outputs if isinstance(outputs, (list, tuple)) else [outputs]
    for random_var in (
        var
        for var in vars_between(inputs, output_to_list)
        if var.owner
        and isinstance(var.owner.op, (RandomVariable, MeasurableVariable))
        and var not in inputs
    ):
        if isinstance(random_var.owner.op, RandomVariable):
            rng = random_var.owner.inputs[0]
            if not hasattr(rng, "default_update"):
                rng_updates[rng] = random_var.owner.outputs[0]
        else:
            update_fn = getattr(random_var.owner.op, "update", None)
            if update_fn is not None:
                rng_updates.update(update_fn(random_var.owner))

    # If called inside a model context, see if check_bounds flag is set to False
    try:
        from pymc.model import modelcontext

        model = modelcontext(None)
        check_bounds = model.check_bounds
    except TypeError:
        check_bounds = True
    check_parameter_opt = (
        "local_check_parameter_to_ninf_switch" if check_bounds else "local_remove_check_parameter"
    )

    mode = get_mode(mode)
    opt_qry = mode.provided_optimizer.including("random_make_inplace", check_parameter_opt)
    mode = Mode(linker=mode.linker, optimizer=opt_qry)
    aesara_function = aesara.function(
        inputs,
        outputs,
        updates={**rng_updates, **kwargs.pop("updates", {})},
        mode=mode,
        **kwargs,
    )
    return aesara_function