def predict_density(self, Fmus, Fvars, Y): """ Given a Normal distribution for the latent function, and a datum Y, compute the log predictive density of Y. i.e. if :math:`p(f_* | y) = \\mathcal{N}(Fmu, Fvar)` and :math:`p(y_*|f_*)` is the likelihood, then this method computes the log predictive density :math:`\\log \\int p(y_*|f)p(f_* | y) df`. Here, we implement a Monte-Carlo routine. Parameters ---------- Fmus : array/tensor, shape=(N, K) Mean(s) of Gaussian density. Fvars : array/tensor, shape=(N, K(, K)) Covariance(s) of Gaussian density. Y : arrays/tensors, shape=(N(, K)) Deterministic arguments to be passed by name to funcs. Returns ------- log_density : array/tensor, shape=(N(, K)) Log predictive density. """ if isinstance(self.invlink, SoftArgMax): return ndiag_mc(self.logp, self.num_monte_carlo_points, Fmus, Fvars, logspace=True, epsilon=None, Y=Y) else: raise NotImplementedError
def variational_expectations(self, Fmu, Fvar, Y, Y_var, freq): r""" Compute the expected log density of the data, given a Gaussian distribution for the function values. if q(f) = N(Fmu, Fvar) and this object represents p(y|f) then this method computes \int (\log p(y|f)) q(f) df. Here, we implement a default Gauss-Hermite quadrature routine, but some likelihoods (Gaussian, Poisson) will implement specific cases. """ if self.use_mc: return ndiagquad(self.logp, self.num_gauss_hermite_points, Fmu, Fvar, Y=Y, Y_var=Y_var, freq=freq) return ndiag_mc(self.logp, self.num_mc_samples, Fmu, Fvar, Y=Y, Y_var=Y_var, freq=freq)
def _mc_quadrature(self, funcs, Fmu, Fvar, logspace: bool = False, epsilon=None, **Ys): return ndiag_mc(funcs, self.num_monte_carlo_points, Fmu, Fvar, logspace, epsilon, **Ys)
def single_predict_mean(args): Fmu, Fvar = args integrand2 = lambda *X: self.likelihood.conditional_variance( *X) + tf.square(self.likelihood.conditional_mean(*X)) epsilon = None E_y, E_y2 = ndiag_mc( [self.likelihood.conditional_mean, integrand2], S=self.likelihood.num_monte_carlo_points, Fmu=Fmu, Fvar=Fvar, epsilon=epsilon) return E_y
def variational_expectations(self, Fmu, Fvar, Y, Y_var, freq, mc=False, mvn=False): r""" Compute the expected log density of the data, given a Gaussian distribution for the function values. if q(f) = N(Fmu, Fvar) and this object represents p(y|f) then this method computes \int (\log p(y|f)) q(f) df. Here, we implement a default Gauss-Hermite quadrature routine, but some likelihoods (Gaussian, Poisson) will implement specific cases. """ if mvn: assert len(Fvar.shape) == 3 if not mvn: if not mc: return ndiagquad(self.logp, self.num_gauss_hermite_points, Fmu, Fvar, Y=Y, Y_var=Y_var, freq=freq) else: return ndiag_mc(self.logp, self.num_mc_samples, Fmu, Fvar, Y=Y, Y_var=Y_var, freq=freq) else: if not mc: raise ValueError("Too slow to do this") else: return mvn_mc(self.logp, self.num_mc_samples, Fmu, Fvar, Y=Y, Y_var=Y_var, freq=freq)