示例#1
0
    def _post_sample(self,
                     f_mean,
                     f_var=None,
                     expectedlog=False,
                     K_star=None,
                     v=None,
                     u=None):

        if v is None:
            v = self.pref_v
        if u is None:
            u = self.pref_u

        if np.isscalar(f_mean):
            N = 1
        else:
            N = f_mean.shape[0]

        # since we don't have f_cov
        if K_star is not None and self.use_svi:
            #sample the inducing points because we don't have full covariance matrix. In this case, f_cov should be Ks_nm
            f_samples = mvn.rvs(mean=self.um_minus_mu0.flatten(),
                                cov=self.uS,
                                size=1000).T
            f_samples = K_star.dot(self.invK_mm).dot(f_samples) + f_mean
        elif K_star is not None:
            f_samples = mvn.rvs(mean=f_mean.flatten(), cov=K_star, size=1000).T
        else:
            f_samples = np.random.normal(loc=f_mean,
                                         scale=np.sqrt(f_var),
                                         size=(N, 1000))

        # g_f = (f_samples[v, :] - f_samples[u, :])  / np.sqrt(2)
        # phi = norm.cdf(g_f) # the probability of the actual observation, which takes g_f as a parameter. In the

        if N == 1:
            phi = self.forward_model(f_samples, v=[0], u=[0])
        else:
            phi = self.forward_model(f_samples, v=v, u=u)

        phi = temper_extreme_probs(phi)
        if expectedlog:
            phi = np.log(phi)
            notphi = np.log(1 - phi)
        else:
            notphi = 1 - phi

        m_post = np.mean(phi, axis=1)[:, np.newaxis]
        not_m_post = np.mean(notphi, axis=1)[:, np.newaxis]
        v_post = np.var(phi, axis=1)[:, np.newaxis]
        v_post = temper_extreme_probs(v_post, zero_only=True)
        v_post[
            m_post * (1 - not_m_post) <=
            1e-7] = 1e-8  # fixes extreme values to sensible values. Don't think this is needed and can lower variance?

        return m_post, not_m_post, v_post
示例#2
0
    def _post_rough(self, f_mean, f_cov=None, pref_v=None, pref_u=None):
        '''
        When making predictions, we want to predict the probability of each listed preference pair.
        Use a solution given by applying the forward model to the mean of the latent function --
        ignore the uncertainty in f itself, considering only the uncertainty due to the noise sigma.
        '''
        if pref_v is None:
            pref_v = self.pref_v
        if pref_u is None:
            pref_u = self.pref_u

        # to remedy this. However, previously the maths seemed to show it wasn't needed at all?
        if f_cov is None:
            m_post = self.forward_model(f_mean,
                                        None,
                                        v=pref_v,
                                        u=pref_u,
                                        return_g_f=False)
        else:
            # since we subtract the two f-values, the correlations between them are flipped, hence the '-' in the last
            # two covariance terms here.
            m_post = self.forward_model(
                f_mean,
                f_cov[pref_v, pref_v] + f_cov[pref_u, pref_u] -
                f_cov[pref_v, pref_u] - f_cov[pref_u, pref_v],
                v=pref_v,
                u=pref_u,
                return_g_f=False)
        m_post = temper_extreme_probs(m_post)
        not_m_post = 1 - m_post

        return m_post, not_m_post,
示例#3
0
    def _logpt(self):
        rho = pref_likelihood(self.obs_f, v=self.pref_v, u=self.pref_u)
        rho = temper_extreme_probs(rho)

        return np.log(rho), np.log(1 - rho)