コード例 #1
0
ファイル: model_graph.py プロジェクト: sthagen/pymc3
    def vars_to_plot(
            self,
            var_names: Optional[Iterable[VarName]] = None) -> List[VarName]:
        if var_names is None:
            return self._all_var_names

        selected_names = set(var_names)

        # .copy() because sets cannot change in size during iteration
        for var_name in selected_names.copy():
            if var_name not in self._all_var_names:
                raise ValueError(f"{var_name} is not in this model.")

            for model_var in self.var_list:
                if hasattr(model_var.tag, "observations"):
                    if model_var.tag.observations == self.model[var_name]:
                        selected_names.add(model_var.name)

        selected_ancestors = set(
            filter(
                lambda rv: rv.name in self._all_var_names,
                list(
                    ancestors([
                        self.model[var_name] for var_name in selected_names
                    ])),
            ))

        for var in selected_ancestors.copy():
            if hasattr(var.tag, "observations"):
                selected_ancestors.add(var.tag.observations)

        # ordering of self._all_var_names is important
        return [var.name for var in selected_ancestors]
コード例 #2
0
ファイル: test_smc.py プロジェクト: t-triobox/pymc3
 def count_rvs(end_node):
     return len(
         [
             node
             for node in ancestors([end_node])
             if node.owner is not None and isinstance(node.owner.op, RandomVariable)
         ]
     )
コード例 #3
0
ファイル: test_basic.py プロジェクト: mgorny/aesara
def test_ancestors():

    r1, r2, r3 = MyVariable(1), MyVariable(2), MyVariable(3)
    o1 = MyOp(r1, r2)
    o1.name = "o1"
    o2 = MyOp(r3, o1)
    o2.name = "o2"

    res = ancestors([o2], blockers=None)
    res_list = list(res)
    assert res_list == [o2, r3, o1, r1, r2]

    res = ancestors([o2], blockers=None)
    assert r3 in res
    res_list = list(res)
    assert res_list == [o1, r1, r2]

    res = ancestors([o2], blockers=[o1])
    res_list = list(res)
    assert res_list == [o2, r3, o1]
コード例 #4
0
    def run(replay, log=None):

        if not replay:
            log = StringIO()
        else:
            log = StringIO(log)
        record = Record(replay=replay, file_object=log)

        disturb_mem()

        mode = RecordMode(record=record)

        b = sharedX(np.zeros((2, )), name="b")
        channels = OrderedDict()

        disturb_mem()

        v_max = b.max(axis=0)
        v_min = b.min(axis=0)
        v_range = v_max - v_min

        updates = []
        for i, val in enumerate([
                v_max.max(),
                v_max.min(),
                v_range.max(),
        ]):
            disturb_mem()
            s = sharedX(0.0, name="s_" + str(i))
            updates.append((s, val))

        for var in basic.ancestors(update for _, update in updates):
            if var.name is not None and var.name != "b":
                if var.name[0] != "s" or len(var.name) != 2:
                    var.name = None

        for key in channels:
            updates.append((s, channels[key]))
        f = function([],
                     mode=mode,
                     updates=updates,
                     on_unused_input="ignore",
                     name="f")
        for output in f.maker.fgraph.outputs:
            mode.record.handle_line(var_descriptor(output) + "\n")
        disturb_mem()
        f()

        mode.record.f.flush()

        if not replay:
            return log.getvalue()
コード例 #5
0
def test_change_rv_size():
    loc = at.as_tensor_variable([1, 2])
    rv = normal(loc=loc)
    assert rv.ndim == 1
    assert tuple(rv.shape.eval()) == (2, )

    with pytest.raises(ShapeError, match="must be ≤1-dimensional"):
        change_rv_size(rv, new_size=[[2, 3]])
    with pytest.raises(ShapeError, match="must be ≤1-dimensional"):
        change_rv_size(rv, new_size=at.as_tensor_variable([[2, 3], [4, 5]]))

    rv_new = change_rv_size(rv, new_size=(3, ), expand=True)
    assert rv_new.ndim == 2
    assert tuple(rv_new.shape.eval()) == (3, 2)

    # Make sure that the shape used to determine the expanded size doesn't
    # depend on the old `RandomVariable`.
    rv_new_ancestors = set(ancestors((rv_new, )))
    assert loc in rv_new_ancestors
    assert rv not in rv_new_ancestors

    rv_newer = change_rv_size(rv_new, new_size=(4, ), expand=True)
    assert rv_newer.ndim == 3
    assert tuple(rv_newer.shape.eval()) == (4, 3, 2)

    # Make sure we avoid introducing a `Cast` by converting the new size before
    # constructing the new `RandomVariable`
    rv = normal(0, 1)
    new_size = np.array([4, 3], dtype="int32")
    rv_newer = change_rv_size(rv, new_size=new_size, expand=False)
    assert rv_newer.ndim == 2
    assert isinstance(rv_newer.owner.inputs[1], Constant)
    assert tuple(rv_newer.shape.eval()) == (4, 3)

    rv = normal(0, 1)
    new_size = at.as_tensor(np.array([4, 3], dtype="int32"))
    rv_newer = change_rv_size(rv, new_size=new_size, expand=True)
    assert rv_newer.ndim == 2
    assert tuple(rv_newer.shape.eval()) == (4, 3)

    rv = normal(0, 1)
    new_size = at.as_tensor(2, dtype="int32")
    rv_newer = change_rv_size(rv, new_size=new_size, expand=True)
    assert rv_newer.ndim == 1
    assert tuple(rv_newer.shape.eval()) == (2, )
コード例 #6
0
    def test_upstream_rngs_not_in_compiled_logp(self):
        smc = IMH(model=self.SMABC_test)
        smc.initialize_population()
        smc._initialize_kernel()
        likelihood_func = smc.likelihood_logp_func

        # Test graph is stochastic
        inarray = floatX(np.array([0, 0]))
        assert likelihood_func(inarray) != likelihood_func(inarray)

        # Test only one shared RNG is present
        compiled_graph = likelihood_func.maker.fgraph.outputs
        shared_rng_vars = [
            node for node in ancestors(compiled_graph)
            if isinstance(node, (RandomStateSharedVariable,
                                 RandomGeneratorSharedVariable))
        ]
        assert len(shared_rng_vars) == 1
コード例 #7
0
def assert_no_rvs(var):
    assert not any(isinstance(v.owner.op, RandomVariable) for v in ancestors([var]) if v.owner)
    return var
コード例 #8
0
def compile_pymc(
        inputs,
        outputs,
        mode=None,
        **kwargs) -> Callable[..., Union[np.ndarray, List[np.ndarray]]]:
    """Use ``aesara.function`` with specialized pymc rewrites always enabled.

    Included rewrites
    -----------------
    random_make_inplace
        Ensures that compiled functions containing random variables will produce new
        samples on each call.
    local_check_parameter_to_ninf_switch
        Replaces Aeppl's CheckParameterValue assertions is logp expressions with Switches
        that return -inf in place of the assert.

    Optional rewrites
    -----------------
    local_remove_check_parameter
        Replaces Aeppl's CheckParameterValue assertions is logp expressions. This is used
        as an alteranative to the default local_check_parameter_to_ninf_switch whenenver
        this function is called within a model context and the model `check_bounds` flag
        is set to False.
    """
    # Create an update mapping of RandomVariable's RNG so that it is automatically
    # updated after every function call
    # TODO: This won't work for variables with InnerGraphs (Scan and OpFromGraph)
    rng_updates = {}
    output_to_list = outputs if isinstance(outputs,
                                           (list, tuple)) else [outputs]
    for rv in (node for node in ancestors(output_to_list)
               if node.owner and isinstance(node.owner.op, RandomVariable)):
        rng = rv.owner.inputs[0]
        if not hasattr(rng, "default_update"):
            rng_updates[rng] = rv.owner.outputs[0]

    # If called inside a model context, see if check_bounds flag is set to False
    try:
        from pymc.model import modelcontext

        model = modelcontext(None)
        check_bounds = model.check_bounds
    except TypeError:
        check_bounds = True
    check_parameter_opt = ("local_check_parameter_to_ninf_switch"
                           if check_bounds else "local_remove_check_parameter")

    mode = get_mode(mode)
    opt_qry = mode.provided_optimizer.including("random_make_inplace",
                                                check_parameter_opt)
    mode = Mode(linker=mode.linker, optimizer=opt_qry)
    aesara_function = aesara.function(
        inputs,
        outputs,
        updates={
            **rng_updates,
            **kwargs.pop("updates", {})
        },
        mode=mode,
        **kwargs,
    )
    return aesara_function