def test_diagquad_1d(mu, var):
    num_gauss_hermite_points = 25
    quad = ndiagquad([lambda *X: tf.exp(X[0])], num_gauss_hermite_points, [mu],
                     [var])
    quad_old = ndiagquad_old([lambda *X: tf.exp(X[0])],
                             num_gauss_hermite_points, [mu], [var])
    assert_allclose(quad[0], quad_old[0])
Exemplo n.º 2
0
    def predict_density(self, Fmu, Fvar, Y):
        r"""
        Given a Normal distribution for the latent function, and a datum Y,
        compute the log predictive density of Y.

        i.e. if
            q(f) = N(Fmu, Fvar)

        and this object represents

            p(y|f)

        then this method computes the predictive density

            \log \int p(y=Y|f)q(f) df

        Here, we implement a default Gauss-Hermite quadrature routine, but some
        likelihoods (Gaussian, Poisson) will implement specific cases.
        """
        return ndiagquad(self.logp,
                         self.num_gauss_hermite_points,
                         Fmu,
                         Fvar,
                         logspace=True,
                         Y=Y)
Exemplo n.º 3
0
 def func_ndiagquad_autograph_false():
     mu = np.array([1.0, 1.3])
     var = np.array([3.0, 3.5])
     num_gauss_hermite_points = 25
     return quadrature.ndiagquad(
         [lambda *X: tf.exp(X[0])], num_gauss_hermite_points, [mu], [var]
     )
Exemplo n.º 4
0
    def variational_expectations(self, Fmu, Fvar, Y, Y_var, freq):
        r"""
        Compute the expected log density of the data, given a Gaussian
        distribution for the function values.
        if
            q(f) = N(Fmu, Fvar)
        and this object represents
            p(y|f)
        then this method computes
           \int (\log p(y|f)) q(f) df.
        Here, we implement a default Gauss-Hermite quadrature routine, but some
        likelihoods (Gaussian, Poisson) will implement specific cases.
        """

        if self.use_mc:
            return ndiagquad(self.logp,
                             self.num_gauss_hermite_points,
                             Fmu,
                             Fvar,
                             Y=Y,
                             Y_var=Y_var,
                             freq=freq)
        return ndiag_mc(self.logp,
                        self.num_mc_samples,
                        Fmu,
                        Fvar,
                        Y=Y,
                        Y_var=Y_var,
                        freq=freq)
Exemplo n.º 5
0
    def predict_mean_and_var(self, Fmu, Fvar):
        r"""
        Given a Normal distribution for the latent function,
        return the mean of Y

        if
            q(f) = N(Fmu, Fvar)

        and this object represents

            p(y|f)

        then this method computes the predictive mean

           \int\int y p(y|f)q(f) df dy

        and the predictive variance

           \int\int y^2 p(y|f)q(f) df dy  - [ \int\int y p(y|f)q(f) df dy ]^2

        Here, we implement a default Gauss-Hermite quadrature routine, but some
        likelihoods (e.g. Gaussian) will implement specific cases.
        """
        integrand2 = lambda *X: self.conditional_variance(*X) + tf.square(self.conditional_mean(*X))
        E_y, E_y2 = ndiagquad([self.conditional_mean, integrand2],
                              self.num_gauss_hermite_points,
                              Fmu, Fvar)
        V_y = E_y2 - tf.square(E_y)
        return E_y, V_y
Exemplo n.º 6
0
def test_diagquad_with_kwarg(mu1, var1):
    alpha = np.array([2.5, -1.3])
    num_gauss_hermite_points = 25
    quad = quadrature.ndiagquad(
        lambda X, Y: tf.exp(X * Y), num_gauss_hermite_points, mu1, var1, Y=alpha
    )
    expected = np.exp(alpha * mu1 + alpha ** 2 * var1 / 2)
    assert_allclose(quad, expected)
Exemplo n.º 7
0
def test_diagquad_logspace(mu1, var1, mu2, var2):
    alpha = 2.5
    num_gauss_hermite_points = 25
    quad = quadrature.ndiagquad(lambda *X: (X[0] + alpha * X[1]),
                                num_gauss_hermite_points, [mu1, mu2],
                                [var1, var2],
                                logspace=True)
    expected = mu1 + var1 / 2 + alpha * mu2 + alpha**2 * var2 / 2
    assert_allclose(quad, expected)
Exemplo n.º 8
0
def test_diagquad_2d(mu1, var1, mu2, var2):
    alpha = 2.5
    # using logspace=True we can reduce this, see test_diagquad_logspace
    num_gauss_hermite_points = 35
    quad = quadrature.ndiagquad(lambda *X: tf.exp(X[0] + alpha * X[1]),
                                num_gauss_hermite_points, [mu1, mu2],
                                [var1, var2])
    expected = np.exp(mu1 + var1 / 2 + alpha * mu2 + alpha**2 * var2 / 2)
    assert_allclose(quad, expected)
Exemplo n.º 9
0
    def variational_expectations(self,
                                 Fmu,
                                 Fvar,
                                 Y,
                                 Y_var,
                                 freq,
                                 mc=False,
                                 mvn=False):
        r"""
        Compute the expected log density of the data, given a Gaussian
        distribution for the function values.
        if
            q(f) = N(Fmu, Fvar)
        and this object represents
            p(y|f)
        then this method computes
           \int (\log p(y|f)) q(f) df.
        Here, we implement a default Gauss-Hermite quadrature routine, but some
        likelihoods (Gaussian, Poisson) will implement specific cases.
        """
        if mvn:
            assert len(Fvar.shape) == 3

        if not mvn:
            if not mc:
                return ndiagquad(self.logp,
                                 self.num_gauss_hermite_points,
                                 Fmu,
                                 Fvar,
                                 Y=Y,
                                 Y_var=Y_var,
                                 freq=freq)
            else:
                return ndiag_mc(self.logp,
                                self.num_mc_samples,
                                Fmu,
                                Fvar,
                                Y=Y,
                                Y_var=Y_var,
                                freq=freq)
        else:
            if not mc:
                raise ValueError("Too slow to do this")
            else:
                return mvn_mc(self.logp,
                              self.num_mc_samples,
                              Fmu,
                              Fvar,
                              Y=Y,
                              Y_var=Y_var,
                              freq=freq)
def test_diagquad_with_kwarg(mu1, var1):
    alpha = np.array([2.5, -1.3])
    num_gauss_hermite_points = 25
    quad = ndiagquad(lambda X, Y: tf.exp(X * Y),
                     num_gauss_hermite_points,
                     mu1,
                     var1,
                     Y=alpha)
    quad_old = ndiagquad_old(lambda X, Y: tf.exp(X * Y),
                             num_gauss_hermite_points,
                             mu1,
                             var1,
                             Y=alpha)
    assert_allclose(quad, quad_old)
Exemplo n.º 11
0
def test_quadrature_variational_expectation(likelihood_setup, mu, var):
    """
    Where quadrature methods have been overwritten, make sure the new code
    does something close to the quadrature.
    """
    likelihood, y = likelihood_setup.likelihood, likelihood_setup.Y
    F1 = likelihood.variational_expectations(mu, var, y)
    F2 = ndiagquad(likelihood.log_prob,
                   likelihood.num_gauss_hermite_points,
                   mu,
                   var,
                   Y=y)
    assert_allclose(F1,
                    F2,
                    rtol=likelihood_setup.rtol,
                    atol=likelihood_setup.atol)
Exemplo n.º 12
0
 def predict_density(self, Fmu, Fvar, Y, hetero_variance=None, **args):
     """
     Given a Normal distribution for the latent function, and a datum Y,
     compute the (log) predictive density of Y.
     i.e. if
         q(f) = N(Fmu, Fvar)
     and this object represents
         p(y|f)
     then this method computes the predictive density
        \int p(y=Y|f)q(f) df
     Here, we implement a default Gauss-Hermite quadrature routine, but some
     likelihoods (Gaussian, Poisson) will implement specific cases.
     """
     exp_p = ndiagquad(lambda X, Y, **Ys: tf.exp(self.logp(X, Y,**Ys)),
             self.num_gauss_hermite_points,
             Fmu, Fvar, Y=Y, hetero_variance=hetero_variance, **args)
     return tf.log(exp_p)
def test_diagquad_2d(mu1, var1, mu2, var2):
    alpha = 2.5
    # using logspace=True we can reduce this, see test_diagquad_logspace
    num_gauss_hermite_points = 35
    quad = ndiagquad(
        lambda *X: tf.exp(X[0] + alpha * X[1]),
        num_gauss_hermite_points,
        [mu1, mu2],
        [var1, var2],
    )
    quad_old = ndiagquad_old(
        lambda *X: tf.exp(X[0] + alpha * X[1]),
        num_gauss_hermite_points,
        [mu1, mu2],
        [var1, var2],
    )
    assert_allclose(quad, quad_old)
def test_diagquad_logspace(mu1, var1, mu2, var2):
    alpha = 2.5
    num_gauss_hermite_points = 25
    quad = ndiagquad(
        lambda *X: (X[0] + alpha * X[1]),
        num_gauss_hermite_points,
        [mu1, mu2],
        [var1, var2],
        logspace=True,
    )
    quad_old = ndiagquad_old(
        lambda *X: (X[0] + alpha * X[1]),
        num_gauss_hermite_points,
        [mu1, mu2],
        [var1, var2],
        logspace=True,
    )
    assert_allclose(quad, quad_old)
Exemplo n.º 15
0
def test_quadrature_variational_expectation(likelihood_setup, mu, var):
    """
    Where quadrature methods have been overwritten, make sure the new code
    does something close to the quadrature.
    """
    likelihood, y = likelihood_setup.likelihood, likelihood_setup.Y
    if isinstance(likelihood, MultiClass):
        pytest.skip(
            "Test fails due to issue with ndiagquad (github issue #1091)")
    F1 = likelihood.variational_expectations(mu, var, y)
    F2 = ndiagquad(likelihood.log_prob,
                   likelihood.num_gauss_hermite_points,
                   mu,
                   var,
                   Y=y)
    assert_allclose(F1,
                    F2,
                    rtol=likelihood_setup.rtol,
                    atol=likelihood_setup.atol)
Exemplo n.º 16
0
 def variational_expectations(self, Fmu, Fvar, Y, weights, **kwargs):
     r"""
     Compute the expected log density of the data, given a Gaussian
     distribution for the function values.
     if
         q(f) = N(Fmu, Fvar)
     and this object represents
         p(y|f)
     then this method computes
        \int (\log p(y|f)) q(f) df.
     Here, we implement a default Gauss-Hermite quadrature routine, but some
     likelihoods (Gaussian, Poisson) will implement specific cases.
     """
     Y_burst = {"Y_{}".format(i): Y[:, :, i] for i in range(self.Nf)}
     weights_burst = {
         "W_{}".format(i): weights[:, :, i]
         for i in range(self.Nf)
     }
     return ndiagquad(self.logp, self.num_gauss_hermite_points, Fmu, Fvar,
                      **Y_burst, **weights_burst, **kwargs)
Exemplo n.º 17
0
def test_diagquad_1d(mu, var):
    num_gauss_hermite_points = 25
    quad = quadrature.ndiagquad([lambda *X: tf.exp(X[0])],
                                num_gauss_hermite_points, [mu], [var])
    expected = np.exp(mu + var / 2)
    assert_allclose(quad[0], expected)