Ejemplo n.º 1
0
 def forward(self):
     beta = self.beta_obj.get_val()
     likelihood = torch.dot(beta, torch.mv(torch.t(self.X), self.y)) - \
                  torch.sum(logsumexp_torch(Variable(torch.zeros(self.num_ob)), torch.mv(self.X, beta)))
     prior = self.beta_obj.get_out()
     posterior = prior + likelihood
     out = -posterior
     return (out)
Ejemplo n.º 2
0
 def forward(self):
     likelihood = torch.dot(self.beta, torch.mv(torch.t(self.X), self.y)) - \
                  torch.sum(logsumexp_torch(Variable(torch.zeros(self.num_ob)), torch.mv(self.X, self.beta)))
     prior = -torch.dot(self.beta, self.beta) / (self.sigma *
                                                 self.sigma) * 0.5
     posterior = prior + likelihood
     out = -posterior
     return (out)
Ejemplo n.º 3
0
    def forward(self):
        beta = self.beta[:(self.dim-1)]
        sigma = torch.exp(self.beta[self.dim-1])
        likelihood = torch.dot(beta, torch.mv(torch.t(self.X), self.y)) - \
                     torch.sum(logsumexp_torch(Variable(torch.zeros(self.num_ob)), torch.mv(self.X, beta)))
        prior = -torch.dot(beta, beta)/(sigma*sigma) * 0.5 - self.num_ob* 0.5 * torch.log(sigma*sigma) - sigma*self.lamb

        hessian_term = -self.beta[self.dim-1]
        posterior = prior + likelihood + hessian_term
        out = -posterior
        return(out)
Ejemplo n.º 4
0
def log_p_y_given_theta(observed_point):

    X = Variable(observed_point["input"],
                 requires_grad=False).type(precision_type)
    y = Variable(observed_point["target"],
                 requires_grad=False).type(precision_type)
    num_ob = X.shape[0]
    likelihood = torch.dot(beta, torch.mv(torch.t(X), y)) - \
                 torch.sum(logsumexp_torch(Variable(torch.zeros(num_ob)), torch.mv(X, beta)))

    return (likelihood.data[0])
Ejemplo n.º 5
0
    def log_p_y_given_theta(self, observed_point, posterior_point):
        self.load_point(posterior_point)
        X = Variable(observed_point["input"],
                     requires_grad=False).type(self.precision_type)
        y = Variable(observed_point["target"],
                     requires_grad=False).type(self.precision_type)
        #print(self.beta.type)
        #exit()
        num_ob = X.shape[0]
        likelihood = torch.dot(self.beta, torch.mv(torch.t(X), y)) - \
                     torch.sum(logsumexp_torch(Variable(torch.zeros(num_ob)), torch.mv(X, self.beta)))

        return (likelihood.data[0])
Ejemplo n.º 6
0
    def forward(self):
        if self.gibbs:
            print("sigma2 {}".format(self.sigma2))
        else:
            print("sigma2 {}".format(torch.exp(self.log_sigma2)))
        beta = self.beta
        if self.gibbs:
            sigma2 = self.sigma2
        else:
            sigma2 = torch.exp(self.log_sigma2)
        likelihood = torch.dot(beta, torch.mv(torch.t(self.X), self.y)) - \
                     torch.sum(logsumexp_torch(Variable(torch.zeros(self.num_ob)), torch.mv(self.X, beta)))
        prior = (-(beta * beta) / (sigma2) - torch.log(sigma2)).sum() * 0.5
        if not self.gibbs:
            prior += log_inv_gamma_density(x=sigma2, alpha=0.5, beta=0.5)
            prior += self.log_sigma2

        #hessian_term = -self.beta[self.dim-1]
        posterior = prior + likelihood
        out = -posterior
        return (out)
Ejemplo n.º 7
0
    def forward(self, input=None):
        if input is None:
            X = self.X
            y = self.y

        else:
            X = Variable(input["input"],
                         requires_grad=False).type(self.precision_type)
            y = Variable(input["target"],
                         requires_grad=False).type(self.precision_type)
        num_ob = X.shape[0]
        #print(self.precision_type)
        # print(self.beta)
        # #print(X.data)
        # exit()
        likelihood = torch.dot(self.beta, torch.mv(torch.t(X), y)) - \
                     torch.sum(logsumexp_torch(Variable(torch.zeros(num_ob)), torch.mv(X, self.beta)))
        prior = -torch.dot(self.beta, self.beta) / (self.sigma *
                                                    self.sigma) * 0.5
        posterior = prior + likelihood
        out = -posterior
        return (out)
Ejemplo n.º 8
0
def V(beta):
    likelihood = torch.dot(beta, torch.mv(torch.t(X), y)) - \
                 torch.sum(logsumexp_torch(Variable(torch.zeros(num_ob)), torch.mv(X, beta)))
    prior = -torch.dot(beta, beta) * 0.5
    posterior = prior + likelihood
    return (-posterior)