def test_zip_0_gate(rate): # if gate is 0 ZIP is Poisson zip_ = ZeroInflatedPoisson(torch.zeros(1), torch.tensor(rate)) pois = Poisson(torch.tensor(rate)) s = pois.sample((20, )) zip_prob = zip_.log_prob(s) pois_prob = pois.log_prob(s) assert_tensors_equal(zip_prob, pois_prob)
def test_zip_0_gate(rate): # if gate is 0 ZIP is Poisson zip1 = ZeroInflatedPoisson(torch.tensor(rate), gate=torch.zeros(1)) zip2 = ZeroInflatedPoisson(torch.tensor(rate), gate_logits=torch.tensor(-99.9)) pois = Poisson(torch.tensor(rate)) s = pois.sample((20, )) zip1_prob = zip1.log_prob(s) zip2_prob = zip2.log_prob(s) pois_prob = pois.log_prob(s) assert_close(zip1_prob, pois_prob) assert_close(zip2_prob, pois_prob)
def train_model(data, n_steps, pmf): def model(data): mu = 2.8 num_sigmas = 4 sigma = 0.3 low = mu - num_sigmas * sigma high = mu + num_sigmas * sigma f = pyro.sample("latent", dist.Uniform(low, high)) print(f) # sample f from the prior # Probabilities are generated by the pmf for i in range(len(data)): pyro.sample("obs_{}".format(i), Poisson(f), obs=data[i]) def guide(data): lam = pyro.param("lam", torch.tensor(2.0), constraint=constraints.positive) # alpha_q = pyro.param("alpha_q", torch.tensor(2.0)) # beta_q = pyro.param("beta_q", torch.tensor(1.0)) pyro.sample("latent", dist.Poisson(torch.tensor(2.0))) adam_params = {"lr": 0.0005, "betas": (0.90, 0.999)} optimizer = ClippedAdam(adam_params) svi = SVI(model, guide, optimizer, loss=Trace_ELBO()) for step in range(n_steps): loss = svi.step(data) if step % 100 == 0: logging.info(".") logging.info("Elbo loss: {}".format(loss)) # grab the learned variational parameters lam = pyro.param("lam").item() print(lam) # a_q = pyro.param("alpha_q").item() # b_q = pyro.param("beta_q").item() # print(a_q, b_q) posterior = Poisson(lam) logging.info("Sampling:{}".format(posterior.sample()))