Exemple #1
0
def model_5(sequences, lengths, args, batch_size=None, include_prior=True):
    with ignore_jit_warnings():
        num_sequences, max_length, data_dim = map(int, sequences.shape)
        assert lengths.shape == (num_sequences, )
        assert lengths.max() <= max_length

    # Initialize a global module instance if needed.
    global tones_generator
    if tones_generator is None:
        tones_generator = TonesGenerator(args, data_dim)
    pyro.module("tones_generator", tones_generator)

    with handlers.mask(mask=include_prior):
        probs_x = pyro.sample(
            "probs_x",
            dist.Dirichlet(0.9 * torch.eye(args.hidden_dim) + 0.1).to_event(1))
    with pyro.plate("sequences", num_sequences, batch_size, dim=-2) as batch:
        lengths = lengths[batch]
        x = 0
        y = torch.zeros(data_dim)
        for t in pyro.markov(range(max_length if args.jit else lengths.max())):
            with handlers.mask(mask=(t < lengths).unsqueeze(-1)):
                x = pyro.sample("x_{}".format(t),
                                dist.Categorical(probs_x[x]),
                                infer={"enumerate": "parallel"})
                # Note that since each tone depends on all tones at a previous time step
                # the tones at different time steps now need to live in separate plates.
                with pyro.plate("tones_{}".format(t), data_dim, dim=-1):
                    y = pyro.sample(
                        "y_{}".format(t),
                        dist.Bernoulli(logits=tones_generator(x, y)),
                        obs=sequences[batch, t])
Exemple #2
0
def model_2(sequences, lengths, args, batch_size=None, include_prior=True):
    with ignore_jit_warnings():
        num_sequences, max_length, data_dim = map(int, sequences.shape)
        assert lengths.shape == (num_sequences, )
        assert lengths.max() <= max_length
    with handlers.mask(mask=include_prior):
        probs_x = pyro.sample(
            "probs_x",
            dist.Dirichlet(0.9 * torch.eye(args.hidden_dim) + 0.1).to_event(1))
        probs_y = pyro.sample(
            "probs_y",
            dist.Beta(0.1, 0.9).expand([args.hidden_dim, 2,
                                        data_dim]).to_event(3))
    tones_plate = pyro.plate("tones", data_dim, dim=-1)
    with pyro.plate("sequences", num_sequences, batch_size, dim=-2) as batch:
        lengths = lengths[batch]
        x, y = 0, 0
        for t in pyro.markov(range(max_length if args.jit else lengths.max())):
            with handlers.mask(mask=(t < lengths).unsqueeze(-1)):
                x = pyro.sample("x_{}".format(t),
                                dist.Categorical(probs_x[x]),
                                infer={"enumerate": "parallel"})
                # Note the broadcasting tricks here: to index probs_y on tensors x and y,
                # we also need a final tensor for the tones dimension. This is conveniently
                # provided by the plate associated with that dimension.
                with tones_plate as tones:
                    y = pyro.sample("y_{}".format(t),
                                    dist.Bernoulli(probs_y[x, y, tones]),
                                    obs=sequences[batch, t]).long()
Exemple #3
0
def model_3(sequences, lengths, args, batch_size=None, include_prior=True):
    with ignore_jit_warnings():
        num_sequences, max_length, data_dim = map(int, sequences.shape)
        assert lengths.shape == (num_sequences, )
        assert lengths.max() <= max_length
    hidden_dim = int(args.hidden_dim**0.5)  # split between w and x
    with handlers.mask(mask=include_prior):
        probs_w = pyro.sample(
            "probs_w",
            dist.Dirichlet(0.9 * torch.eye(hidden_dim) + 0.1).to_event(1))
        probs_x = pyro.sample(
            "probs_x",
            dist.Dirichlet(0.9 * torch.eye(hidden_dim) + 0.1).to_event(1))
        probs_y = pyro.sample(
            "probs_y",
            dist.Beta(0.1, 0.9).expand([hidden_dim, hidden_dim,
                                        data_dim]).to_event(3))
    tones_plate = pyro.plate("tones", data_dim, dim=-1)
    with pyro.plate("sequences", num_sequences, batch_size, dim=-2) as batch:
        lengths = lengths[batch]
        w, x = 0, 0
        for t in pyro.markov(range(max_length if args.jit else lengths.max())):
            with handlers.mask(mask=(t < lengths).unsqueeze(-1)):
                w = pyro.sample("w_{}".format(t),
                                dist.Categorical(probs_w[w]),
                                infer={"enumerate": "parallel"})
                x = pyro.sample("x_{}".format(t),
                                dist.Categorical(probs_x[x]),
                                infer={"enumerate": "parallel"})
                with tones_plate as tones:
                    pyro.sample("y_{}".format(t),
                                dist.Bernoulli(probs_y[w, x, tones]),
                                obs=sequences[batch, t])
Exemple #4
0
    def model():
        x_plate = pyro.plate("x_plate",
                             5,
                             subsample_size=2 if subsampling else None,
                             dim=-1)
        y_plate = pyro.plate("y_plate",
                             6,
                             subsample_size=3 if subsampling else None,
                             dim=-2)
        with pyro.plate("num_particles", 50, dim=-3):
            with x_plate:
                b = pyro.sample(
                    "b", dist.Beta(torch.tensor(1.1), torch.tensor(1.1)))
            with y_plate:
                c = pyro.sample("c", dist.Bernoulli(0.5))
            with x_plate, y_plate:
                d = pyro.sample("d", dist.Bernoulli(b))

        # check shapes
        if enumerate_ == "parallel":
            assert b.shape == (50, 1, x_plate.subsample_size)
            assert c.shape == (2, 1, 1, 1)
            assert d.shape == (2, 1, 1, 1, 1)
        elif enumerate_ == "sequential":
            assert b.shape == (50, 1, x_plate.subsample_size)
            assert c.shape in ((), (1, 1, 1))  # both are valid
            assert d.shape in ((), (1, 1, 1))  # both are valid
        else:
            assert b.shape == (50, 1, x_plate.subsample_size)
            assert c.shape == (50, y_plate.subsample_size, 1)
            assert d.shape == (50, y_plate.subsample_size,
                               x_plate.subsample_size)
Exemple #5
0
def model_0(data, history, vectorized):
    x_dim = 3
    init = pyro.param("init",
                      lambda: torch.rand(x_dim),
                      constraint=constraints.simplex)
    trans = pyro.param("trans",
                       lambda: torch.rand((x_dim, x_dim)),
                       constraint=constraints.simplex)
    locs = pyro.param("locs", lambda: torch.rand(x_dim))

    with pyro.plate("sequences", data.shape[0], dim=-3) as sequences:
        sequences = sequences[:, None]
        x_prev = None
        markov_loop = \
            pyro.vectorized_markov(name="time", size=data.shape[1], dim=-2, history=history) if vectorized \
            else pyro.markov(range(data.shape[1]), history=history)
        for i in markov_loop:
            x_curr = pyro.sample(
                "x_{}".format(i),
                dist.Categorical(
                    init if isinstance(i, int) and i < 1 else trans[x_prev]))
            with pyro.plate("tones", data.shape[2], dim=-1):
                pyro.sample("y_{}".format(i),
                            dist.Normal(Vindex(locs)[..., x_curr], 1.),
                            obs=Vindex(data)[sequences, i])
            x_prev = x_curr
Exemple #6
0
    def model(data):
        T, N, D = data.shape  # time steps, individuals, features

        # Gaussian initial distribution.
        init_loc = pyro.param("init_loc", torch.zeros(D))
        init_scale = pyro.param("init_scale", 1e-2 * torch.eye(D),
                                constraint=constraints.lower_cholesky)

        # Linear dynamics with Gaussian noise.
        trans_const = pyro.param("trans_const", torch.zeros(D))
        trans_coeff = pyro.param("trans_coeff", torch.eye(D))
        noise = pyro.param("noise", 1e-2 * torch.eye(D),
                           constraint=constraints.lower_cholesky)

        obs_plate = pyro.plate("channel", D, dim=-1)
        with pyro.plate("data", N, dim=-2):
            state = None
            for t in range(T):
                # Transition.
                if t == 0:
                    loc = init_loc
                    scale_tril = init_scale
                else:
                    loc = trans_const + funsor.torch.torch_tensordot(trans_coeff, state, 1)
                    scale_tril = noise
                state = pyro.sample("state_{}".format(t),
                                    dist.MultivariateNormal(loc, scale_tril),
                                    infer={"exact": exact})

                # Factorial probit likelihood model.
                with obs_plate:
                    pyro.sample("obs_{}".format(t),
                                dist.Bernoulli(logits=state["channel"]),
                                obs=data[t])
Exemple #7
0
def model_0(sequences, lengths, args, batch_size=None, include_prior=True):
    assert not torch._C._get_tracing_state()
    num_sequences, max_length, data_dim = sequences.shape
    with handlers.mask(mask=include_prior):
        # Our prior on transition probabilities will be:
        # stay in the same state with 90% probability; uniformly jump to another
        # state with 10% probability.
        probs_x = pyro.sample(
            "probs_x",
            dist.Dirichlet(0.9 * torch.eye(args.hidden_dim) + 0.1).to_event(1))
        # We put a weak prior on the conditional probability of a tone sounding.
        # We know that on average about 4 of 88 tones are active, so we'll set a
        # rough weak prior of 10% of the notes being active at any one time.
        probs_y = pyro.sample(
            "probs_y",
            dist.Beta(0.1, 0.9).expand([args.hidden_dim,
                                        data_dim]).to_event(2))
    # In this first model we'll sequentially iterate over sequences in a
    # minibatch; this will make it easy to reason about tensor shapes.
    tones_plate = pyro.plate("tones", data_dim, dim=-1)
    for i in pyro.plate("sequences", len(sequences), batch_size):
        length = lengths[i]
        sequence = sequences[i, :length]
        x = 0
        for t in pyro.markov(range(length)):
            # On the next line, we'll overwrite the value of x with an updated
            # value. If we wanted to record all x values, we could instead
            # write x[t] = pyro.sample(...x[t-1]...).
            x = pyro.sample("x_{}_{}".format(i, t),
                            dist.Categorical(probs_x[x]),
                            infer={"enumerate": "parallel"})
            with tones_plate:
                pyro.sample("y_{}_{}".format(i, t),
                            dist.Bernoulli(probs_y[x.squeeze(-1)]),
                            obs=sequence[t])
Exemple #8
0
    def model(data=None):
        probs_a = torch.tensor([0.45, 0.55])
        probs_b = torch.tensor([[0.6, 0.4], [0.4, 0.6]])
        probs_c = torch.tensor([[0.75, 0.25], [0.55, 0.45]])
        probs_d = torch.tensor([[[0.4, 0.6], [0.3, 0.7]],
                                [[0.3, 0.7], [0.2, 0.8]]])

        b_axis = pyro.plate("b_axis", 2)
        c_axis = pyro.plate("c_axis", 2)
        a = pyro.sample("a", dist.Categorical(probs_a))
        b = [
            pyro.sample("b_{}".format(i), dist.Categorical(probs_b[a]))
            for i in b_axis
        ]
        c = [
            pyro.sample("c_{}".format(j), dist.Categorical(probs_c[a]))
            for j in c_axis
        ]
        for i in b_axis:
            for j in c_axis:
                pyro.sample(
                    "d_{}_{}".format(i, j),
                    dist.Categorical(Vindex(probs_d)[b[i], c[j]]),
                    obs=data[i, j],
                )
Exemple #9
0
 def model(z1=None, z2=None):
     p = pyro.param("p", torch.tensor([0.25, 0.75]))
     loc = pyro.param("loc", torch.tensor([-1.0, 1.0]))
     z1 = pyro.sample("z1", dist.Categorical(p), obs=z1)
     with pyro.plate("data[0]", 3):
         pyro.sample("x1", dist.Normal(loc[z1], 1.0), obs=data[0])
     with pyro.plate("data[1]", 2):
         z2 = pyro.sample("z2", dist.Categorical(p), obs=z2)
         pyro.sample("x2", dist.Normal(loc[z2], 1.0), obs=data[1])
Exemple #10
0
 def model():
     p = torch.tensor(0.5, requires_grad=True)
     with pyro.plate("plate_outer", 5, dim=plate_dims[0]):
         pyro.sample("x", dist.Bernoulli(p))
         with pyro.plate("plate_inner_1", 6, dim=plate_dims[1]):
             pyro.sample("y", dist.Bernoulli(p))
             with pyro.plate("plate_inner_2", 7, dim=plate_dims[2]):
                 pyro.sample("z", dist.Bernoulli(p))
                 with pyro.plate("plate_inner_3", 8, dim=plate_dims[3]):
                     pyro.sample("q", dist.Bernoulli(p))
    def model():
        pyro.sample("w", dist.Bernoulli(0.5), infer={'enumerate': 'parallel'})

        with pyro.plate("non_enum", 2):
            a = pyro.sample("a", dist.Bernoulli(0.5), infer={'enumerate': None})

        p = (1.0 + a.sum(-1)) / (2.0 + a.shape[0])  # introduce dependency of b on a

        with pyro.plate("enum_1", 3):
            pyro.sample("b", dist.Bernoulli(p), infer={'enumerate': enumerate_})
Exemple #12
0
 def model():
     x = pyro.sample("x0", dist.Categorical(pyro.param("q0")))
     with pyro.plate("local", 3):
         for i in range(1, depth):
             x = pyro.sample(
                 "x{}".format(i),
                 dist.Categorical(pyro.param("q{}".format(i))[..., x, :]))
         with pyro.plate("data", 4):
             pyro.sample("y",
                         dist.Bernoulli(pyro.param("qy")[..., x]),
                         obs=data)
Exemple #13
0
 def model():
     d = dist.Categorical(p)
     context1 = pyro.plate("outer", outer_dim, dim=-1)
     context2 = pyro.plate("inner", inner_dim, dim=-2)
     pyro.sample("w", d)
     with context1:
         pyro.sample("x", d)
     with context2:
         pyro.sample("y", d)
     with context1, context2:
         pyro.sample("z", d)
Exemple #14
0
 def guide():
     d = dist.Categorical(pyro.param("q"))
     context1 = pyro.plate("outer", outer_dim, dim=-1)
     context2 = pyro.plate("inner", inner_dim, dim=-2)
     pyro.sample("w", d, infer={"enumerate": "parallel"})
     with context1:
         pyro.sample("x", d, infer={"enumerate": "parallel"})
     with context2:
         pyro.sample("y", d, infer={"enumerate": "parallel"})
     with context1, context2:
         pyro.sample("z", d, infer={"enumerate": "parallel"})
Exemple #15
0
def model_6(sequences, lengths, args, batch_size=None, include_prior=False):
    num_sequences, max_length, data_dim = sequences.shape
    assert lengths.shape == (num_sequences, )
    assert lengths.max() <= max_length
    hidden_dim = args.hidden_dim

    if not args.raftery_parameterization:
        # Explicitly parameterize the full tensor of transition probabilities, which
        # has hidden_dim cubed entries.
        probs_x = pyro.param("probs_x",
                             torch.rand(hidden_dim, hidden_dim, hidden_dim),
                             constraint=constraints.simplex)
    else:
        # Use the more parsimonious "Raftery" parameterization of
        # the tensor of transition probabilities. See reference:
        # Raftery, A. E. A model for high-order markov chains.
        # Journal of the Royal Statistical Society. 1985.
        probs_x1 = pyro.param("probs_x1",
                              torch.rand(hidden_dim, hidden_dim),
                              constraint=constraints.simplex)
        probs_x2 = pyro.param("probs_x2",
                              torch.rand(hidden_dim, hidden_dim),
                              constraint=constraints.simplex)
        mix_lambda = pyro.param("mix_lambda",
                                torch.tensor(0.5),
                                constraint=constraints.unit_interval)
        # we use broadcasting to combine two tensors of shape (hidden_dim, hidden_dim) and
        # (hidden_dim, 1, hidden_dim) to obtain a tensor of shape (hidden_dim, hidden_dim, hidden_dim)
        probs_x = mix_lambda * probs_x1 + (1.0 -
                                           mix_lambda) * probs_x2.unsqueeze(-2)

    probs_y = pyro.param("probs_y",
                         torch.rand(hidden_dim, data_dim),
                         constraint=constraints.unit_interval)
    tones_plate = pyro.plate("tones", data_dim, dim=-1)
    with pyro.plate("sequences", num_sequences, batch_size, dim=-2) as batch:
        lengths = lengths[batch]
        x_curr, x_prev = torch.tensor(0), torch.tensor(0)
        # we need to pass the argument `history=2' to `pyro.markov()`
        # since our model is now 2-markov
        for t in pyro.markov(range(lengths.max()), history=2):
            with handlers.mask(mask=(t < lengths).unsqueeze(-1)):
                probs_x_t = Vindex(probs_x)[x_prev, x_curr]
                x_prev, x_curr = x_curr, pyro.sample(
                    "x_{}".format(t),
                    dist.Categorical(probs_x_t),
                    infer={"enumerate": "parallel"})
                with tones_plate:
                    probs_y_t = probs_y[x_curr.squeeze(-1)]
                    pyro.sample("y_{}".format(t),
                                dist.Bernoulli(probs_y_t),
                                obs=sequences[batch, t])
Exemple #16
0
 def auto_model():
     probs_a = pyro.param("probs_a")
     probs_b = pyro.param("probs_b")
     probs_c = pyro.param("probs_c")
     probs_d = pyro.param("probs_d")
     with pyro.plate("a_axis", 2, dim=-1):
         a = pyro.sample("a", dist.Categorical(probs_a),
                         infer={"enumerate": "parallel"})
         pyro.sample("b", dist.Categorical(probs_b[a]), obs=b_data)
     with pyro.plate("c_axis", 3, dim=-1):
         c = pyro.sample("c", dist.Categorical(probs_c),
                         infer={"enumerate": "parallel"})
         pyro.sample("d", dist.Categorical(probs_d[c]), obs=d_data)
 def model():
     pyro.sample("w", dist.Bernoulli(0.5), infer={'enumerate': 'parallel'})
     inner_plate = pyro.plate("plate",
                              10,
                              subsample_size=4 if subsampling else None)
     for i in pyro.plate(
             "iplate", 10,
             subsample=torch.arange(3) if subsampling else None):
         pyro.sample("y_{}".format(i), dist.Bernoulli(0.5))
         with inner_plate:
             pyro.sample("x_{}".format(i),
                         dist.Bernoulli(0.5),
                         infer={'enumerate': enumerate_})
    def model():
        x_plate = pyro.plate("x_plate", 10, dim=-1)
        y_plate = pyro.plate("y_plate", 11, dim=-2)
        q = pyro.param("q", torch.tensor([0.5, 0.5]))
        pyro.sample("a", dist.Bernoulli(0.5))
        with x_plate:
            b = pyro.sample("b", dist.Bernoulli(0.5)).long()
        with y_plate:
            # Note that it is difficult to check that c does not depend on b.
            c = pyro.sample("c", dist.Bernoulli(0.5)).long()
        with x_plate, y_plate:
            pyro.sample("d", dist.Bernoulli(Vindex(q)[b] if reuse_plate else 0.5))

        assert c.shape != b.shape or enumerate_ == "sequential"
Exemple #19
0
    def model():
        p = pyro.param("p", torch.ones(3, 3))
        q = pyro.param("q", torch.tensor([0.5, 0.5]))
        plate_x = pyro.plate("plate_x",
                             4,
                             subsample_size=3 if subsampling else None,
                             dim=-1)
        plate_y = pyro.plate("plate_y",
                             5,
                             subsample_size=3 if subsampling else None,
                             dim=-1)
        plate_z = pyro.plate("plate_z",
                             6,
                             subsample_size=3 if subsampling else None,
                             dim=-2)

        a = pyro.sample("a", dist.Bernoulli(q[0])).long()
        w = 0
        for i in pyro.markov(range(4)):
            w = pyro.sample("w_{}".format(i), dist.Categorical(p[w]))

        with plate_x:
            b = pyro.sample("b", dist.Bernoulli(q[a])).long()
            x = 0
            for i in pyro.markov(range(4)):
                x = pyro.sample("x_{}".format(i), dist.Categorical(p[x]))

        with plate_y:
            c = pyro.sample("c", dist.Bernoulli(q[a])).long()
            y = 0
            for i in pyro.markov(range(4)):
                y = pyro.sample("y_{}".format(i), dist.Categorical(p[y]))

        with plate_z:
            d = pyro.sample("d", dist.Bernoulli(q[a])).long()
            z = 0
            for i in pyro.markov(range(4)):
                z = pyro.sample("z_{}".format(i), dist.Categorical(p[z]))

        with plate_x, plate_z:
            # this part is tricky: how do we know to preserve b's dimension?
            # also, how do we know how to make b and d have different dimensions?
            e = pyro.sample("e",
                            dist.Bernoulli(q[b if reuse_plate else a])).long()
            xz = 0
            for i in pyro.markov(range(4)):
                xz = pyro.sample("xz_{}".format(i), dist.Categorical(p[xz]))

        return a, b, c, d, e
Exemple #20
0
def model2():

    data = [torch.tensor([-1.0, -1.0, 0.0]), torch.tensor([-1.0, 1.0])]
    p = pyro.param("p", torch.tensor([0.25, 0.75]))
    loc = pyro.sample("loc", dist.Normal(0, 1).expand([2]).to_event(1))
    # FIXME results in infinite loop in transformeddist_to_funsor.
    # scale = pyro.sample("scale", dist.LogNormal(0, 1))
    z1 = pyro.sample("z1", dist.Categorical(p))
    scale = pyro.sample("scale", dist.Normal(torch.tensor([0.0, 1.0])[z1],
                                             1)).exp()
    with pyro.plate("data[0]", 3):
        pyro.sample("x1", dist.Normal(loc[z1], scale), obs=data[0])
    with pyro.plate("data[1]", 2):
        z2 = pyro.sample("z2", dist.Categorical(p))
        pyro.sample("x2", dist.Normal(loc[z2], scale), obs=data[1])
Exemple #21
0
def model_6(data, history, vectorized):
    x_dim = 3
    x_init = pyro.param("x_init",
                        lambda: torch.rand(x_dim),
                        constraint=constraints.simplex)
    x_trans = pyro.param("x_trans",
                         lambda: torch.rand((len(data) - 1, x_dim, x_dim)),
                         constraint=constraints.simplex)
    locs = pyro.param("locs", lambda: torch.rand(x_dim))

    x_prev = None
    markov_loop = \
        pyro.vectorized_markov(name="time", size=len(data), dim=-2, history=history) if vectorized \
        else pyro.markov(range(len(data)), history=history)
    for i in markov_loop:
        if isinstance(i, int) and i < 1:
            x_probs = x_init
        elif isinstance(i, int):
            x_probs = x_trans[i - 1, x_prev]
        else:
            x_probs = Vindex(x_trans)[(i - 1)[:, None], x_prev]

        x_curr = pyro.sample("x_{}".format(i), dist.Categorical(x_probs))
        with pyro.plate("tones", data.shape[-1], dim=-1):
            pyro.sample("y_{}".format(i),
                        dist.Normal(Vindex(locs)[..., x_curr], 1.),
                        obs=data[i])
        x_prev = x_curr
def model_5(data, history, vectorized):
    x_dim, y_dim = 3, 2
    x_init = pyro.param("x_init",
                        lambda: torch.rand(x_dim),
                        constraint=constraints.simplex)
    x_init_2 = pyro.param("x_init_2",
                          lambda: torch.rand(x_dim, x_dim),
                          constraint=constraints.simplex)
    x_trans = pyro.param(
        "x_trans",
        lambda: torch.rand((x_dim, x_dim, x_dim)),
        constraint=constraints.simplex,
    )
    y_probs = pyro.param("y_probs",
                         lambda: torch.rand(x_dim, y_dim),
                         constraint=constraints.simplex)

    x_prev = x_prev_2 = None
    markov_loop = (pyro.vectorized_markov(
        name="time", size=len(data), dim=-2, history=history) if vectorized
                   else pyro.markov(range(len(data)), history=history))
    for i in markov_loop:
        if isinstance(i, int) and i == 0:
            x_probs = x_init
        elif isinstance(i, int) and i == 1:
            x_probs = Vindex(x_init_2)[x_prev]
        else:
            x_probs = Vindex(x_trans)[x_prev_2, x_prev]

        x_curr = pyro.sample("x_{}".format(i), dist.Categorical(x_probs))
        with pyro.plate("tones", data.shape[-1], dim=-1):
            pyro.sample("y_{}".format(i),
                        dist.Categorical(Vindex(y_probs)[x_curr]),
                        obs=data[i])
        x_prev_2, x_prev = x_prev, x_curr
Exemple #23
0
 def model(z=None):
     p = pyro.param("p", torch.tensor([0.75, 0.25]))
     iz = pyro.sample("z", dist.Categorical(p), obs=z)
     z = torch.tensor([0.0, 1.0])[iz]
     logger.info("z.shape = {}".format(z.shape))
     with pyro.plate("data", 3):
         pyro.sample("x", dist.Normal(z, 1.0), obs=data)
def model_1(data, history, vectorized):
    x_dim = 3
    init = pyro.param("init",
                      lambda: torch.rand(x_dim),
                      constraint=constraints.simplex)
    trans = pyro.param("trans",
                       lambda: torch.rand((x_dim, x_dim)),
                       constraint=constraints.simplex)
    locs = pyro.param("locs", lambda: torch.rand(x_dim))

    x_prev = None
    markov_loop = (pyro.vectorized_markov(
        name="time", size=len(data), dim=-2, history=history) if vectorized
                   else pyro.markov(range(len(data)), history=history))
    for i in markov_loop:
        x_curr = pyro.sample(
            "x_{}".format(i),
            dist.Categorical(
                init if isinstance(i, int) and i < 1 else trans[x_prev]),
        )
        with pyro.plate("tones", data.shape[-1], dim=-1):
            pyro.sample(
                "y_{}".format(i),
                dist.Normal(Vindex(locs)[..., x_curr], 1.0),
                obs=data[i],
            )
        x_prev = x_curr
Exemple #25
0
 def auto_guide(data):
     probs_a = pyro.param("guide_probs_a")
     probs_c = pyro.param("guide_probs_c")
     a = pyro.sample("a", dist.Categorical(probs_a),
                     infer={"enumerate": "parallel"})
     with pyro.plate("data", 2, dim=-1):
         pyro.sample("c", dist.Categorical(probs_c[a]))
Exemple #26
0
def model_2(data, history, vectorized):
    x_dim, y_dim = 3, 2
    x_init = pyro.param("x_init",
                        lambda: torch.rand(x_dim),
                        constraint=constraints.simplex)
    x_trans = pyro.param("x_trans",
                         lambda: torch.rand((x_dim, x_dim)),
                         constraint=constraints.simplex)
    y_init = pyro.param("y_init",
                        lambda: torch.rand(x_dim, y_dim),
                        constraint=constraints.simplex)
    y_trans = pyro.param("y_trans",
                         lambda: torch.rand((x_dim, y_dim, y_dim)),
                         constraint=constraints.simplex)

    x_prev = y_prev = None
    markov_loop = \
        pyro.vectorized_markov(name="time", size=len(data), dim=-2, history=history) if vectorized \
        else pyro.markov(range(len(data)), history=history)
    for i in markov_loop:
        x_curr = pyro.sample(
            "x_{}".format(i),
            dist.Categorical(
                x_init if isinstance(i, int) and i < 1 else x_trans[x_prev]))
        with pyro.plate("tones", data.shape[-1], dim=-1):
            y_curr = pyro.sample(
                "y_{}".format(i),
                dist.Categorical(y_init[x_curr] if isinstance(i, int) and i < 1
                                 else Vindex(y_trans)[x_curr, y_prev]),
                obs=data[i])
        x_prev, y_prev = x_curr, y_curr
Exemple #27
0
def model_4(sequences, lengths, args, batch_size=None, include_prior=True):
    with ignore_jit_warnings():
        num_sequences, max_length, data_dim = map(int, sequences.shape)
        assert lengths.shape == (num_sequences,)
        assert lengths.max() <= max_length
    hidden_dim = int(args.hidden_dim**0.5)  # split between w and x
    with handlers.mask(mask=include_prior):
        probs_w = pyro.sample(
            "probs_w", dist.Dirichlet(0.9 * torch.eye(hidden_dim) + 0.1).to_event(1)
        )
        probs_x = pyro.sample(
            "probs_x",
            dist.Dirichlet(0.9 * torch.eye(hidden_dim) + 0.1)
            .expand_by([hidden_dim])
            .to_event(2),
        )
        probs_y = pyro.sample(
            "probs_y",
            dist.Beta(0.1, 0.9).expand([hidden_dim, hidden_dim, data_dim]).to_event(3),
        )
    tones_plate = pyro.plate("tones", data_dim, dim=-1)
    with pyro.plate("sequences", num_sequences, batch_size, dim=-2) as batch:
        lengths = lengths[batch]
        # Note the broadcasting tricks here: we declare a hidden torch.arange and
        # ensure that w and x are always tensors so we can unsqueeze them below,
        # thus ensuring that the x sample sites have correct distribution shape.
        w = x = torch.tensor(0, dtype=torch.long)
        for t in pyro.markov(range(max_length if args.jit else lengths.max())):
            with handlers.mask(mask=(t < lengths).unsqueeze(-1)):
                w = pyro.sample(
                    "w_{}".format(t),
                    dist.Categorical(probs_w[w]),
                    infer={"enumerate": "parallel"},
                )
                x = pyro.sample(
                    "x_{}".format(t),
                    dist.Categorical(Vindex(probs_x)[w, x]),
                    infer={"enumerate": "parallel"},
                )
                with tones_plate as tones:
                    pyro.sample(
                        "y_{}".format(t),
                        dist.Bernoulli(probs_y[w, x, tones]),
                        obs=sequences[batch, t],
                    )
Exemple #28
0
def double_exp_model(data):
    k1 = pyro.param("k1", lambda: torch.tensor(0.01), constraint=constraints.positive)
    k2 = pyro.param("k2", lambda: torch.tensor(0.05), constraint=constraints.positive)
    A = pyro.param("A", lambda: torch.tensor(0.5), constraint=constraints.unit_interval)
    k = torch.stack([k1, k2])

    with pyro.plate("data", len(data)):
        m = pyro.sample("m", dist.Bernoulli(A), infer={"enumerate": "parallel"})
        pyro.sample("obs", dist.Exponential(k[m.long()]), obs=data)
Exemple #29
0
 def model():
     with pyro.plate("plate", 10, subsample_size=subsample_size, dim=None):
         p0 = torch.tensor(0.)
         p0 = pyro.subsample(p0, event_dim=0)
         assert p0.shape == ()
         p = 0.5 * torch.ones(10)
         p = pyro.subsample(p, event_dim=0)
         assert len(p) == (subsample_size if subsample_size else 10)
         pyro.sample("x", dist.Bernoulli(p))
    def model(data=None):
        probs_a = torch.tensor([0.45, 0.55])
        probs_b = torch.tensor([[0.6, 0.4], [0.4, 0.6]])
        probs_c = torch.tensor([[0.75, 0.25], [0.55, 0.45]])
        probs_d = torch.tensor([[[0.4, 0.6], [0.3, 0.7]],
                                [[0.3, 0.7], [0.2, 0.8]]])

        b_axis = pyro.plate("b_axis", 2)
        c_axis = pyro.plate("c_axis", 2)
        a = pyro.sample("a", dist.Categorical(probs_a))
        with c_axis:
            c = pyro.sample("c", dist.Categorical(probs_c[a]))
        for i in b_axis:
            b_i = pyro.sample("b_{}".format(i), dist.Categorical(probs_b[a]))
            with c_axis:
                pyro.sample("d_{}".format(i),
                            dist.Categorical(Vindex(probs_d)[b_i, c]),
                            obs=data[i])