Esempio n. 1
0
def rec_conv_to_rv(v, replacements, model, rand_state=None):
    """Recursively convert a PyMC3 random variable to a Theano graph."""
    if v in replacements:
        return walk(v, replacements)
    elif v.name and pm.util.is_transformed_name(v.name):
        untrans_name = pm.util.get_untransformed_name(v.name)
        v_untrans = getattr(model, untrans_name)

        rv_new = rec_conv_to_rv(v_untrans, replacements, model, rand_state=rand_state)
        replacements[v] = rv_new
        return rv_new
    elif hasattr(v, "distribution"):
        rv = pymc3_var_to_rv(v, rand_state=rand_state)

        rv_ins = []
        for i in tt_inputs([rv]):
            i_rv = rec_conv_to_rv(i, replacements, model, rand_state=rand_state)

            if i_rv is not None:
                replacements[i] = i_rv
                rv_ins.append(i_rv)
            else:
                rv_ins.append(i)

        _ = replace_input_nodes(rv_ins, [rv], memo=replacements, clone_inputs=False)

        rv_new = walk(rv, replacements)

        replacements[v] = rv_new

        return rv_new
    else:
        return None
Esempio n. 2
0
def model_graph(pymc_model,
                output_vars=None,
                rand_state=None,
                attach_memo=True):
    """Convert a PyMC3 model into a Theano `FunctionGraph`.

    Parameters
    ----------
    pymc_model: `Model`
        A PyMC3 model object.
    output_vars: list (optional)
        Variables to use as `FunctionGraph` outputs.  If not specified,
        the model's observed random variables are used.
    rand_state: Numpy rng (optional)
        When converting to `RandomVariable`s, use this random state object.
    attach_memo: boolean (optional)
        Add a property to the returned `FunctionGraph` name `memo` that
        contains the mappings between PyMC and `RandomVariable` terms.

    Results
    -------
    out: `FunctionGraph`

    """
    model = pm.modelcontext(pymc_model)
    replacements = {}

    if output_vars is None:
        output_vars = list(model.observed_RVs)
    if rand_state is None:
        rand_state = theano.shared(np.random.RandomState())

    replacements = {}
    # First pass...
    for i, o in enumerate(output_vars):
        _ = rec_conv_to_rv(o, replacements, model, rand_state=rand_state)
        output_vars[i] = walk(o, replacements)

    output_vars = [walk(o, replacements) for o in output_vars]

    fg_features = [tt.opt.ShapeFeature()]
    model_fg = FunctionGraph(
        [i for i in tt_inputs(output_vars) if not isinstance(i, tt.Constant)],
        output_vars,
        clone=True,
        memo=replacements,
        features=fg_features,
    )
    if attach_memo:
        model_fg.memo = replacements

    return model_fg
Esempio n. 3
0
 def constant_neq_goal(s):
     lvar_val = walk(lvar, s)
     if isinstance(lvar_val, (tt.Constant, TheanoMetaConstant)):
         if lvar_val.data != val:
             yield s
     else:
         yield s
Esempio n. 4
0
def _reify_ConstrainedState(u, S):
    u_res = walk(u, S.data)

    if u_res is u:
        yield ConstrainedVar(u_res, S)
    else:
        yield _reify(u_res, S)
Esempio n. 5
0
 def _goal(s):
     lvar_val = walk(lvar, s)
     if isinstance(lvar_val, (tt.Constant, MetaConstant)):
         data = lvar_val.data
         if ((isinstance(val, np.ndarray) and not np.array_equal(data, val))
                 or not all(np.atleast_1d(data) == val)):
             yield s
     else:
         yield s
Esempio n. 6
0
def reify_stack(u, s):

    u_ = walk(u, s)

    if u_ is not u:
        return reify_stack(u_, s)

    if isinstance(u_, (tuple, list)):
        return type(u_)(reify_stack(i_u, s) for i_u in u_)

    return u_
Esempio n. 7
0
def unify_stack(u, v, s):

    u = walk(u, s)
    v = walk(v, s)

    if u == v:
        return s
    if isvar(u):
        return assoc(s, u, v)
    if isvar(v):
        return assoc(s, v, u)

    if isinstance(u, (tuple, list)) and type(u) == type(v):
        for i_u, i_v in zip(u, v):
            s = unify_stack(i_u, i_v, s)
            if s is False:
                return s

        return s

    return False
Esempio n. 8
0
def _unify_ConstrainedVar_object(u, v, s):
    u_w = walk(u, s)

    if isvar(v):
        v_w = walk(v, s)
    else:
        v_w = v

    if u_w == v_w:
        yield s
    elif isvar(u_w):
        if (not isvar(v_w) and isinstance(u_w, ConstrainedVar)
                and not u_w.constraint(eval_if_etuple(v_w))):
            yield False
            return
        yield assoc(s, u_w, v_w)
    elif isvar(v_w):
        if (not isvar(u_w) and isinstance(v_w, ConstrainedVar)
                and not v_w.constraint(eval_if_etuple(u_w))):
            yield False
            return
        yield assoc(s, v_w, u_w)
    else:
        yield _unify(u_w, v_w, s)
Esempio n. 9
0
def assocunify(u, v, s, eq=core.eq, n=None):
    """ Associative Unification

    See Also:
        eq_assoccomm
    """
    uop, uargs = op_args(u)
    vop, vargs = op_args(v)

    if not uop and not vop:
        res = unify(u, v, s)
        if res is not False:
            # return Stream((res, ))
            return (res, )  # TODO: iterate through all possibilities

    if uop and vop:
        s = unify(uop, vop, s)
        if s is False:
            return ().__iter__()
        op = walk(uop, s)

        sm, lg = (uargs, vargs) if len(uargs) <= len(vargs) else (vargs, uargs)
        ops = assocsized(op, lg, len(sm))
        goal = condeseq([(eq, a, b) for a, b, in zip(sm, lg2)] for lg2 in ops)
        return goaleval(goal)(s)

    if uop:
        op, tail = uop, uargs
        b = v
    if vop:
        op, tail = vop, vargs
        b = u

    ns = [n] if n else range(2, len(tail) + 1)
    knowns = (build(op, x) for n in ns for x in assocsized(op, tail, n))

    goal = condeseq([(core.eq, b, k)] for k in knowns)
    return goaleval(goal)(s)
Esempio n. 10
0
def assocunify(u, v, s, eq=core.eq, n=None):
    """ Associative Unification

    See Also:
        eq_assoccomm
    """
    uop, uargs = op_args(u)
    vop, vargs = op_args(v)

    if not uop and not vop:
        res = unify(u, v, s)
        if res is not False:
            return (res, )  # TODO: iterate through all possibilities

    if uop and vop:
        s = unify(uop, vop, s)
        if s is False:
            return ().__iter__()
        op = walk(uop, s)

        sm, lg = (uargs, vargs) if len(uargs) <= len(vargs) else (vargs, uargs)
        ops = assocsized(op, lg, len(sm))
        goal = condeseq([(eq, a, b) for a, b, in zip(sm, lg2)] for lg2 in ops)
        return goaleval(goal)(s)

    if uop:
        op, tail = uop, uargs
        b = v
    if vop:
        op, tail = vop, vargs
        b = u

    ns = [n] if n else range(2, len(tail) + 1)
    knowns = (build(op, x) for n in ns for x in assocsized(op, tail, n))

    goal = condeseq([(core.eq, b, k)] for k in knowns)
    return goaleval(goal)(s)
Esempio n. 11
0
def test_pymc_normal_model():
    """Conduct a more in-depth test of PyMC3/Theano conversions for a specific model."""
    tt.config.compute_test_value = 'ignore'

    mu_X = tt.dscalar('mu_X')
    sd_X = tt.dscalar('sd_X')
    mu_Y = tt.dscalar('mu_Y')
    mu_X.tag.test_value = np.array(0., dtype=tt.config.floatX)
    sd_X.tag.test_value = np.array(1., dtype=tt.config.floatX)
    mu_Y.tag.test_value = np.array(1., dtype=tt.config.floatX)

    # We need something that uses transforms...
    with pm.Model() as model:
        X_rv = pm.Normal('X_rv', mu_X, sd=sd_X)
        S_rv = pm.HalfCauchy('S_rv',
                             beta=np.array(0.5, dtype=tt.config.floatX))
        Y_rv = pm.Normal('Y_rv', X_rv * S_rv, sd=S_rv)
        Z_rv = pm.Normal('Z_rv', X_rv + Y_rv, sd=sd_X, observed=10.)

    fgraph = model_graph(model, output_vars=[Z_rv])

    Z_rv_tt = canonicalize(fgraph, return_graph=False)

    # This will break comparison if we don't reuse it
    rng = Z_rv_tt.owner.inputs[1].owner.inputs[-1]

    mu_X_ = mt.dscalar('mu_X')
    sd_X_ = mt.dscalar('sd_X')
    tt.config.compute_test_value = 'ignore'
    X_rv_ = mt.NormalRV(mu_X_, sd_X_, None, rng, name='X_rv')
    S_rv_ = mt.HalfCauchyRV(np.array(0., dtype=tt.config.floatX),
                            np.array(0.5, dtype=tt.config.floatX),
                            None,
                            rng,
                            name='S_rv')
    Y_rv_ = mt.NormalRV(mt.mul(X_rv_, S_rv_), S_rv_, None, rng, name='Y_rv')
    Z_rv_ = mt.NormalRV(mt.add(X_rv_, Y_rv_), sd_X, None, rng, name='Z_rv')
    obs_ = mt(Z_rv.observations)
    Z_rv_obs_ = mt.observed(obs_, Z_rv_)

    Z_rv_meta = mt(canonicalize(Z_rv_obs_.reify(), return_graph=False))

    assert mt(Z_rv_tt) == Z_rv_meta

    # Now, let's try that with multiple outputs.
    fgraph.disown()
    fgraph = model_graph(model, output_vars=[Y_rv, Z_rv])

    assert len(fgraph.variables) == 25

    Y_new_rv = walk(Y_rv, fgraph.memo)
    S_new_rv = walk(S_rv, fgraph.memo)
    X_new_rv = walk(X_rv, fgraph.memo)
    Z_new_rv = walk(Z_rv, fgraph.memo)

    # Make sure our new vars are actually in the graph and where
    # they should be.
    assert Y_new_rv == fgraph.outputs[0]
    assert Z_new_rv == fgraph.outputs[1]
    assert X_new_rv in fgraph.variables
    assert S_new_rv in fgraph.variables
    assert isinstance(Z_new_rv.owner.op, Observed)

    # Let's only look at the variables involved in the `Z_rv` subgraph.
    Z_vars = theano.gof.graph.variables(theano.gof.graph.inputs([Z_new_rv]),
                                        [Z_new_rv])

    # Let's filter for only the `RandomVariables` with names.
    Z_vars_count = Counter([
        n.name for n in Z_vars
        if n.name and n.owner and isinstance(n.owner.op, RandomVariable)
    ])

    # Each new RV should be present and only occur once.
    assert Y_new_rv.name in Z_vars_count.keys()
    assert X_new_rv.name in Z_vars_count.keys()
    assert Z_new_rv.owner.inputs[1].name in Z_vars_count.keys()
    assert all(v == 1 for v in Z_vars_count.values())
Esempio n. 12
0
def replace_input_nodes(inputs,
                        outputs,
                        replacements=None,
                        memo=None,
                        clone_inputs=True):
    """Recreate a graph, replacing input variables according to a given map.

    This is helpful if you want to replace the variable dependencies of
    an existing variable according to a `clone_get_equiv` map and/or
    replacement variables that already exist within a `FunctionGraph`.

    The latter is especially annoying, because you can't simply make a
    `FunctionGraph` for the variable to be adjusted and then use that to
    perform the replacement; if the variables to be replaced are already in a
    `FunctionGraph` any such replacement will err-out saying "...these
    variables are already owned by another graph..."

    Parameters
    ----------
    inputs: list
        List of input nodes.
    outputs: list
        List of output nodes.  Everything between `inputs` and these `outputs`
        is the graph under consideration.
    replacements: dict (optional)
        A dictionary mapping existing nodes to their new ones.
        These values in this map will be used instead of newly generated
        clones.  This dict is not altered.
    memo: dict (optional)
        A dictionary to update with the initial `replacements` and maps from
        any old-to-new nodes arising from an actual replacement.
        It serves the same role as `replacements`, but it is updated
        as elements are cloned.
    clone_inputs: bool (optional)
        If enabled, clone all the input nodes that aren't mapped in
        `replacements`.  These cloned nodes are mapped in `memo`, as well.

    Results
    -------
    out: memo

    """
    if memo is None:
        memo = {}
    if replacements is not None:
        memo.update(replacements)
    for apply in io_toposort(inputs, outputs):

        walked_inputs = []
        for i in apply.inputs:
            if clone_inputs:
                # TODO: What if all the inputs are in the memo?
                walked_inputs.append(memo.setdefault(i, i.clone()))
            else:
                walked_inputs.append(walk(i, memo))

        if any(w != i for w, i in zip(apply.inputs, walked_inputs)):
            new_apply = apply.clone_with_new_inputs(walked_inputs)

            memo.setdefault(apply, new_apply)
            for output, new_output in zip(apply.outputs, new_apply.outputs):
                memo.setdefault(output, new_output)
    return memo