예제 #1
0
    def predict_density(self, Fmus, Fvars, Y):
        """
        Given a Normal distribution for the latent function, and a datum Y, compute the log predictive density of Y.
        i.e. if :math:`p(f_* | y) = \\mathcal{N}(Fmu, Fvar)` and :math:`p(y_*|f_*)` is the likelihood, then this
        method computes the log predictive density :math:`\\log \\int p(y_*|f)p(f_* | y) df`. Here, we implement a
        Monte-Carlo routine.

        Parameters
        ----------
        Fmus : array/tensor, shape=(N, K)
            Mean(s) of Gaussian density.
        Fvars : array/tensor, shape=(N, K(, K))
            Covariance(s) of Gaussian density.
        Y : arrays/tensors, shape=(N(, K))
            Deterministic arguments to be passed by name to funcs.

        Returns
        -------
        log_density : array/tensor, shape=(N(, K))
            Log predictive density.
        """
        if isinstance(self.invlink, SoftArgMax):
            return ndiag_mc(self.logp,
                            self.num_monte_carlo_points,
                            Fmus,
                            Fvars,
                            logspace=True,
                            epsilon=None,
                            Y=Y)
        else:
            raise NotImplementedError
예제 #2
0
    def variational_expectations(self, Fmu, Fvar, Y, Y_var, freq):
        r"""
        Compute the expected log density of the data, given a Gaussian
        distribution for the function values.
        if
            q(f) = N(Fmu, Fvar)
        and this object represents
            p(y|f)
        then this method computes
           \int (\log p(y|f)) q(f) df.
        Here, we implement a default Gauss-Hermite quadrature routine, but some
        likelihoods (Gaussian, Poisson) will implement specific cases.
        """

        if self.use_mc:
            return ndiagquad(self.logp,
                             self.num_gauss_hermite_points,
                             Fmu,
                             Fvar,
                             Y=Y,
                             Y_var=Y_var,
                             freq=freq)
        return ndiag_mc(self.logp,
                        self.num_mc_samples,
                        Fmu,
                        Fvar,
                        Y=Y,
                        Y_var=Y_var,
                        freq=freq)
예제 #3
0
 def _mc_quadrature(self,
                    funcs,
                    Fmu,
                    Fvar,
                    logspace: bool = False,
                    epsilon=None,
                    **Ys):
     return ndiag_mc(funcs, self.num_monte_carlo_points, Fmu, Fvar,
                     logspace, epsilon, **Ys)
예제 #4
0
파일: svgp.py 프로젝트: Mr-G1998/mogpe
 def single_predict_mean(args):
     Fmu, Fvar = args
     integrand2 = lambda *X: self.likelihood.conditional_variance(
         *X) + tf.square(self.likelihood.conditional_mean(*X))
     epsilon = None
     E_y, E_y2 = ndiag_mc(
         [self.likelihood.conditional_mean, integrand2],
         S=self.likelihood.num_monte_carlo_points,
         Fmu=Fmu,
         Fvar=Fvar,
         epsilon=epsilon)
     return E_y
예제 #5
0
    def variational_expectations(self,
                                 Fmu,
                                 Fvar,
                                 Y,
                                 Y_var,
                                 freq,
                                 mc=False,
                                 mvn=False):
        r"""
        Compute the expected log density of the data, given a Gaussian
        distribution for the function values.
        if
            q(f) = N(Fmu, Fvar)
        and this object represents
            p(y|f)
        then this method computes
           \int (\log p(y|f)) q(f) df.
        Here, we implement a default Gauss-Hermite quadrature routine, but some
        likelihoods (Gaussian, Poisson) will implement specific cases.
        """
        if mvn:
            assert len(Fvar.shape) == 3

        if not mvn:
            if not mc:
                return ndiagquad(self.logp,
                                 self.num_gauss_hermite_points,
                                 Fmu,
                                 Fvar,
                                 Y=Y,
                                 Y_var=Y_var,
                                 freq=freq)
            else:
                return ndiag_mc(self.logp,
                                self.num_mc_samples,
                                Fmu,
                                Fvar,
                                Y=Y,
                                Y_var=Y_var,
                                freq=freq)
        else:
            if not mc:
                raise ValueError("Too slow to do this")
            else:
                return mvn_mc(self.logp,
                              self.num_mc_samples,
                              Fmu,
                              Fvar,
                              Y=Y,
                              Y_var=Y_var,
                              freq=freq)