Example #1
0
    def setUp(self):

        # simple Gaussian-emission HMM
        def model():
            p_latent = pyro.param("p1", Variable(torch.Tensor([[0.7], [0.3]])))
            p_obs = pyro.param("p2", Variable(torch.Tensor([[0.9], [0.1]])))

            latents = [Variable(torch.ones(1, 1))]
            observes = []
            for t in range(self.model_steps):

                latents.append(
                    pyro.sample(
                        "latent_{}".format(str(t)),
                        Bernoulli(
                            torch.index_select(p_latent, 0,
                                               latents[-1].view(-1).long()))))

                observes.append(
                    pyro.observe(
                        "observe_{}".format(str(t)),
                        Bernoulli(
                            torch.index_select(p_obs, 0,
                                               latents[-1].view(-1).long())),
                        self.data[t]))
            return torch.sum(torch.cat(latents))

        self.model_steps = 3
        self.data = [pyro.ones(1, 1) for _ in range(self.model_steps)]
        self.model = model
Example #2
0
File: gamma.py Project: zaxtax/pyro
 def log_pdf(self, x):
     """
     gamma log-likelihood
     """
     ll_1 = -self.beta * x
     ll_2 = (self.alpha - pyro.ones([1])) * torch.log(x)
     ll_3 = self.alpha * torch.log(self.beta)
     ll_4 = -log_gamma(self.alpha)
     return ll_1 + ll_2 + ll_3 + ll_4
Example #3
0
File: kl_qp.py Project: zaxtax/pyro
    def _enter_poutine(self, *args, **kwargs):
        """
        When model execution begins
        """
        super(VIGuideCo, self)._enter_poutine(*args, **kwargs)

        # trace structure:
        # site = {"sample": sample, "logq": logq, "reparam": reparam}
        self.trace = OrderedDict()
        self.batch = []
        # TODO: make this cleaner via a trace data structure
        self.score_multiplier = pyro.ones(1)
Example #4
0
File: gamma.py Project: zaxtax/pyro
 def batch_log_pdf(self, x, batch_size=1):
     """
     gamma log-likelihood
     """
     if x.dim() == 1 and self.beta.dim() == 1 and batch_size == 1:
         return self.log_pdf(x)
     elif x.dim() == 1:
         x = x.expand(batch_size, x.size(0))
     ll_1 = -self.beta * x
     ll_2 = (self.alpha - pyro.ones(x.size())) * torch.log(x)
     ll_3 = self.alpha * torch.log(self.beta)
     ll_4 = -log_gamma(self.alpha)
     return ll_1 + ll_2 + ll_3 + ll_4
Example #5
0
def log_gamma(xx):
    """
    quick and dirty log gamma copied from webppl
    """
    gamma_coeff = [
        76.18009172947146, -86.50532032941677, 24.01409824083091,
        -1.231739572450155, 0.1208650973866179e-2, -0.5395239384953e-5
    ]
    magic1 = 1.000000000190015
    magic2 = 2.5066282746310005
    x = xx - 1.0
    t = x + 5.5
    t = t - (x + 0.5) * torch.log(t)
    ser = pyro.ones(x.size()) * magic1
    for c in gamma_coeff:
        x = x + 1.0
        ser = ser + torch.pow(x / c, -1)
    return torch.log(ser * magic2) - t
Example #6
0
        def model():
            ps = pyro.param("ps", Variable(torch.Tensor([[0.8], [0.3]])))
            mu = pyro.param("mu", Variable(torch.Tensor([[-0.1], [0.9]])))
            sigma = Variable(torch.ones(1, 1))

            latents = [Variable(torch.ones(1))]
            observes = []
            for t in range(3):

                latents.append(
                    pyro.sample("latent_{}".format(str(t)),
                                Bernoulli(ps[latents[-1][0].long().data])))

                observes.append(
                    pyro.observe("observe_{}".format(str(t)),
                                 Normal(mu[latents[-1][0].long().data], sigma),
                                 pyro.ones(1)))
            return latents
Example #7
0
        def model():
            ps = pyro.param("ps", Variable(torch.Tensor([[0.8], [0.3]])))
            mu = pyro.param("mu", Variable(torch.Tensor([[-0.1], [0.9]])))
            sigma = Variable(torch.ones(1, 1))

            latents = [Variable(torch.ones(1))]
            observes = []
            for t in range(3):

                latents.append(
                    pyro.sample("latent_{}".format(str(t)),
                                Bernoulli(ps[latents[-1][0].long().data])))

                observes.append(
                    pyro.observe("observe_{}".format(str(t)),
                                 Normal(mu[latents[-1][0].long().data], sigma),
                                 pyro.ones(1)))
            return latents
Example #8
0
    def setUp(self):

        # simple Gaussian-emission HMM
        def model():
            p_latent = pyro.param("p1", Variable(torch.Tensor([[0.7], [0.3]])))
            p_obs = pyro.param("p2", Variable(torch.Tensor([[0.9], [0.1]])))

            latents = [Variable(torch.ones(1, 1))]
            observes = []
            for t in range(self.model_steps):

                latents.append(
                    pyro.sample("latent_{}".format(str(t)),
                                Bernoulli(torch.index_select(p_latent, 0, latents[-1].view(-1).long()))))

                observes.append(
                    pyro.observe("observe_{}".format(str(t)),
                                 Bernoulli(torch.index_select(p_obs, 0, latents[-1].view(-1).long())),
                                 self.data[t]))
            return torch.sum(torch.cat(latents))

        self.model_steps = 3
        self.data = [pyro.ones(1, 1) for _ in range(self.model_steps)]
        self.model = model