Пример #1
0
    def get_vlb(self,
                vlb_c=True,
                vlb_p=True,
                vlb_v=True,
                vlb_m=True):
        # import pdb; pdb.set_trace()
        vlb = 0

        # Get the VLB of the expected class assignments
        if vlb_c:
            E_ln_m = self.expected_log_m()
            for k in xrange(self.K):
                # Add the cross entropy of p(c | m)
                vlb += Discrete().negentropy(E_x=self.mf_m[k,:], E_ln_p=E_ln_m)

                # Subtract the negative entropy of q(c)
                vlb -= Discrete(self.mf_m[k,:]).negentropy()

        # Get the VLB of the connection probability matrix
        # Add the cross entropy of p(p | tau1, tau0)
        if vlb_p:
            vlb += Beta(self.tau1, self.tau0).\
                negentropy(E_ln_p=(psi(self.mf_tau1) - psi(self.mf_tau0 + self.mf_tau1)),
                           E_ln_notp=(psi(self.mf_tau0) - psi(self.mf_tau0 + self.mf_tau1))).sum()

            # Subtract the negative entropy of q(p)
            vlb -= Beta(self.mf_tau1, self.mf_tau0).negentropy().sum()

        # Get the VLB of the weight scale matrix, v
        # Add the cross entropy of p(v | alpha, beta)
        if vlb_v:
            vlb += Gamma(self.alpha, self.beta).\
                negentropy(E_lambda=self.mf_alpha/self.mf_beta,
                           E_ln_lambda=psi(self.mf_alpha) - np.log(self.mf_beta)).sum()

            # Subtract the negative entropy of q(v)
            vlb -= Gamma(self.mf_alpha, self.mf_beta).negentropy().sum()

        # Get the VLB of the block probability vector, m
        # Add the cross entropy of p(m | pi)
        if vlb_m:
            vlb += Dirichlet(self.pi).negentropy(E_ln_g=self.expected_log_m())

            # Subtract the negative entropy of q(m)
            vlb -= Dirichlet(self.mf_pi).negentropy()

        return vlb
Пример #2
0
    def get_vlb(self):
        """
        Variational lower bound for \lambda_k^0
        E[LN p(g | \gamma)] -
        E[LN q(g | \tilde{\gamma})]
        :return:
        """
        vlb = 0

        # First term
        # E[LN p(g | \gamma)]
        E_ln_g = self.expected_log_g()
        vlb += Dirichlet(self.gamma[None, None, :]).negentropy(E_ln_g=E_ln_g).sum()

        # Second term
        # E[LN q(g | \tilde{gamma})]
        vlb -= Dirichlet(self.mf_gamma).negentropy().sum()

        return vlb
Пример #3
0
    def log_likelihood(self, x):
        """
        Compute the log likelihood of a set of SBM parameters

        :param x:    (m,p,v) tuple
        :return:
        """
        m, p, v = x

        lp = 0
        lp += Dirichlet(self.pi).log_probability(m)
        lp += Gamma(self.alpha, self.beta).log_probability(v).sum()
        return lp
Пример #4
0
    def log_likelihood(self, x):
        """
        Compute the log likelihood of a set of SBM parameters

        :param x:    (m,p,v) tuple
        :return:
        """
        m,p,v,c = x

        lp = 0
        lp += Dirichlet(self.pi).log_probability(m)
        lp += Beta(self.tau1 * np.ones((self.C, self.C)),
                   self.tau0 * np.ones((self.C, self.C))).log_probability(p).sum()
        lp += Gamma(self.alpha, self.beta).log_probability(v).sum()
        lp += (np.log(m)[c]).sum()
        return lp