示例#1
0
 def __init__(self, raw_dist, raw_params, expected_value_domain, xfail_reason=""):
     self.raw_dist = raw_dist
     self.raw_params = raw_params
     self.expected_value_domain = expected_value_domain
     for name, raw_param in self.raw_params:
         if get_backend() != "numpy":
             # we need direct access to these tensors for gradient tests
             setattr(self, name, eval(raw_param))
     TEST_CASES.append(self if not xfail_reason else xfail_param(self, reason=xfail_reason))
示例#2
0
    ('i->', 'i'),
    (',i->', 'i'),
    ('ai->', 'i'),
    (',ai,abij->', 'ij'),
    ('a,ai,bij->', 'ij'),
    ('ai,abi,bci,cdi->', 'i'),
    ('aij,abij,bcij->', 'ij'),
    ('a,abi,bcij,cdij->', 'ij'),
]


@pytest.mark.parametrize('equation,plates', EINSUM_EXAMPLES)
@pytest.mark.parametrize('backend', ['torch', 'pyro.ops.einsum.torch_log'])
@pytest.mark.parametrize('einsum_impl,same_lazy', [
    (einsum, True),
    (einsum, xfail_param(False, reason="nested interpreters?")),
    (naive_plated_einsum, True),
    (naive_plated_einsum, False)
])
def test_einsum_complete_sharing(equation, plates, backend, einsum_impl, same_lazy):
    inputs, outputs, sizes, operands, funsor_operands = make_einsum_example(equation)

    with interpretation(reflect):
        lazy_expr1 = einsum_impl(equation, *funsor_operands, backend=backend, plates=plates)
        lazy_expr2 = lazy_expr1 if same_lazy else \
            einsum_impl(equation, *funsor_operands, backend=backend, plates=plates)

    with memoize():
        expr1 = reinterpret(lazy_expr1)
        expr2 = reinterpret(lazy_expr2)
    expr3 = reinterpret(lazy_expr1)
示例#3
0
    call_count += 1
    batch_shape = gate_rate.shape[:-1]
    event_shape = (2, num_origins, num_destins)
    gate, rate = gate_rate.reshape(batch_shape + event_shape).unbind(-3)
    rate = bounded_exp(rate, bound=1e4)
    gate = torch.stack((torch.zeros_like(gate), gate), dim=-1)
    return gate, rate


unpack_gate_rate_0 = unpack_gate_rate[0].fn
unpack_gate_rate_1 = unpack_gate_rate[1].fn


@pytest.mark.parametrize('analytic_kl', [
    False,
    xfail_param(True, reason="missing pattern"),
],
                         ids=['monte-carlo-kl', 'analytic-kl'])
def test_bart(analytic_kl):
    global call_count
    call_count = 0

    with interpretation(reflect):
        q = Independent(
            Independent(
                Contraction(
                    ops.nullop,
                    ops.add,
                    frozenset(),
                    (
                        Tensor(
示例#4
0
    "a,b->",
    "ab,a->",
    "a,b,c->",
    "a,a->",
    "a,a,a,ab->",
    "abc,bcd,cde->",
    "ab,bc,cd->",
    "ab,b,bc,c,cd,d->",
]


@pytest.mark.parametrize('einsum_impl', [naive_einsum, einsum])
@pytest.mark.parametrize('equation', EINSUM_EXAMPLES)
@pytest.mark.parametrize('backend', [
    'pyro.ops.einsum.torch_marginal',
    xfail_param('pyro.ops.einsum.torch_map', reason="wrong adjoint"),
])
def test_einsum_adjoint(einsum_impl, equation, backend):
    inputs, outputs, sizes, operands, funsor_operands = make_einsum_example(
        equation)
    sum_op, prod_op = BACKEND_ADJOINT_OPS[backend]

    with AdjointTape() as tape:  # interpretation(reflect):
        fwd_expr = einsum_impl(equation, *funsor_operands, backend=backend)
    actuals = tape.adjoint(sum_op, prod_op, fwd_expr, funsor_operands)

    for operand in operands:
        pyro_require_backward(operand)
    expected_out = pyro_einsum(equation,
                               *operands,
                               modulo_total=True,
示例#5
0
    def guide():
        q = pyro.param("q",
                       torch.randn(3).exp(),
                       constraint=constraints.simplex)
        pyro.sample("x", dist.Categorical(q))

    with pyro_backend(backend):
        Elbo = infer.JitTrace_ELBO if jit else infer.Trace_ELBO
        elbo = Elbo(ignore_jit_warnings=True)
        assert_ok(model, guide, elbo)


@pytest.mark.parametrize("backend", [
    "pyro",
    xfail_param("funsor", reason="missing patterns"),
])
def test_mean_field_ok(backend):
    def model():
        x = pyro.sample("x", dist.Normal(0., 1.))
        pyro.sample("y", dist.Normal(x, 1.))

    def guide():
        loc = pyro.param("loc", torch.tensor(0.))
        x = pyro.sample("x", dist.Normal(loc, 1.))
        pyro.sample("y", dist.Normal(x, 1.))

    with pyro_backend(backend):
        elbo = infer.TraceMeanField_ELBO()
        assert_ok(model, guide, elbo)
示例#6
0

def backend_to_einsum_backends(backend):
    backends = [
        BACKEND_TO_EINSUM_BACKEND[get_backend()],
        BACKEND_TO_LOGSUMEXP_BACKEND[get_backend()]
    ]
    return backends[:1]


@pytest.mark.parametrize('equation,plates', EINSUM_EXAMPLES)
@pytest.mark.parametrize('backend', backend_to_einsum_backends(get_backend()))
@pytest.mark.parametrize(
    'einsum_impl,same_lazy',
    [(einsum, True),
     (einsum, xfail_param(False, reason="nested interpreters?")),
     (naive_plated_einsum, True), (naive_plated_einsum, False)])
def test_einsum_complete_sharing(equation, plates, backend, einsum_impl,
                                 same_lazy):
    inputs, outputs, sizes, operands, funsor_operands = make_einsum_example(
        equation)

    with interpretation(reflect):
        lazy_expr1 = einsum_impl(equation,
                                 *funsor_operands,
                                 backend=backend,
                                 plates=plates)
        lazy_expr2 = lazy_expr1 if same_lazy else \
            einsum_impl(equation, *funsor_operands, backend=backend, plates=plates)

    with memoize():
示例#7
0
    inputs = OrderedDict((k, bint(v)) for k, v in zip(batch_dims, batch_shape))

    concentration = Tensor(torch.rand(batch_shape), inputs)
    rate = Tensor(torch.rand(batch_shape), inputs)
    funsor_dist = (dist.Gamma if reparametrized else
                   dist.NonreparameterizedGamma)(concentration, rate)

    _check_sample(funsor_dist,
                  sample_inputs,
                  inputs,
                  num_samples=200000,
                  atol=5e-2 if reparametrized else 1e-1)


@pytest.mark.parametrize(
    "with_lazy", [True, xfail_param(False, reason="missing pattern")])
@pytest.mark.parametrize('sample_inputs', [(), ('ii', ), ('ii', 'jj'),
                                           ('ii', 'jj', 'kk')])
@pytest.mark.parametrize('batch_shape', [(), (5, ), (2, 3)], ids=str)
@pytest.mark.parametrize('reparametrized', [True, False])
def test_normal_sample(with_lazy, batch_shape, sample_inputs, reparametrized):
    batch_dims = ('i', 'j', 'k')[:len(batch_shape)]
    inputs = OrderedDict((k, bint(v)) for k, v in zip(batch_dims, batch_shape))

    loc = Tensor(torch.randn(batch_shape), inputs)
    scale = Tensor(torch.rand(batch_shape), inputs)
    with interpretation(lazy if with_lazy else eager):
        funsor_dist = (dist.Normal if reparametrized else
                       dist.NonreparameterizedNormal)(loc, scale)

    _check_sample(funsor_dist,
示例#8
0
@pytest.mark.parametrize('batch_shape', [(), (5,), (2, 3)], ids=str)
@pytest.mark.parametrize('reparametrized', [True, False])
def test_gamma_sample(batch_shape, sample_inputs, reparametrized):
    batch_dims = ('i', 'j', 'k')[:len(batch_shape)]
    inputs = OrderedDict((k, Bint[v]) for k, v in zip(batch_dims, batch_shape))

    concentration = rand(batch_shape)
    rate = rand(batch_shape)
    funsor_dist_class = (dist.Gamma if reparametrized else dist.NonreparameterizedGamma)
    params = (concentration, rate)

    _check_sample(funsor_dist_class, params, sample_inputs, inputs, num_samples=200000,
                  atol=5e-2 if reparametrized else 1e-1)


@pytest.mark.parametrize("with_lazy", [True, xfail_param(False, reason="missing pattern")])
@pytest.mark.parametrize('sample_inputs', [(), ('ii',), ('ii', 'jj'), ('ii', 'jj', 'kk')])
@pytest.mark.parametrize('batch_shape', [(), (5,), (2, 3)], ids=str)
@pytest.mark.parametrize('reparametrized', [True, False])
def test_normal_sample(with_lazy, batch_shape, sample_inputs, reparametrized):
    batch_dims = ('i', 'j', 'k')[:len(batch_shape)]
    inputs = OrderedDict((k, Bint[v]) for k, v in zip(batch_dims, batch_shape))

    loc = randn(batch_shape)
    scale = rand(batch_shape)
    funsor_dist_class = (dist.Normal if reparametrized else dist.NonreparameterizedNormal)
    params = (loc, scale)

    _check_sample(funsor_dist_class, params, sample_inputs, inputs, num_samples=200000,
                  atol=1e-2 if reparametrized else 1e-1, with_lazy=with_lazy)