Beispiel #1
0
class SVGPGatingNetworkBinary(SVGPGatingNetworkBase):
    def __init__(self,
                 gating_function: SVGPGatingFunction = None,
                 name='GatingNetwork'):
        assert isinstance(gating_function, SVGPGatingFunction)
        gating_function_list = [gating_function]
        super().__init__(gating_function_list, name=name)
        # self.gating_function = gating_function
        self.likelihood = Bernoulli()
        self.num_experts = 2

    # def prior_kls(self) -> tf.Tensor:
    #     """Returns the set of experts KL divergences as a batched tensor.

    #     :returns: a Tensor with shape [num_experts,]
    #     """
    #     return tf.convert_to_tensor(self.gating_function.prior_kl())

    def predict_fs(self,
                   Xnew: InputData,
                   num_inducing_samples: int = None) -> MeanAndVariance:
        f_mu, f_var = self.gating_function_list[0].predict_f(
            Xnew, num_inducing_samples)
        Fmu = tf.stack([f_mu, -f_mu], -1)
        Fvar = tf.stack([f_var, f_var], -1)
        return Fmu, Fvar

    def predict_mixing_probs(self,
                             Xnew: InputData,
                             num_inducing_samples: int = None):
        """Compute mixing probabilities.

        Returns a tensor with dimensions,
            [num_inducing_samples,num_data, output_dim, num_experts]
        if num_inducing_samples=None otherwise a tensor with dimensions,
            [num_data, output_dim, num_experts]

        .. math::
            \\mathbf{u}_h \sim \mathcal{N}(q\_mu, q\_sqrt \cdot q\_sqrt^T) \\\\
            \\Pr(\\alpha=k | \\mathbf{Xnew}, \\mathbf{u}_h)

        :param Xnew: test input(s) [num_data, input_dim]
        :param num_inducing_samples: how many samples to draw from inducing points
        """
        h_mu, h_var = self.gating_function_list[0].predict_f(
            Xnew, num_inducing_samples, full_cov=False)

        def single_predict_mean(args):
            h_mu, h_var = args
            return self.likelihood.predict_mean_and_var(h_mu, h_var)[0]

        if num_inducing_samples is None:
            prob_a_0 = self.likelihood.predict_mean_and_var(h_mu, h_var)[0]
        else:
            prob_a_0 = tf.map_fn(single_predict_mean, (h_mu, h_var),
                                 dtype=tf.float64)

        prob_a_1 = 1 - prob_a_0
        mixing_probs = tf.stack([prob_a_0, prob_a_1], -1)
        return mixing_probs
Beispiel #2
0
 def __init__(self,
              gating_function: SVGPGatingFunction = None,
              name='GatingNetwork'):
     assert isinstance(gating_function, SVGPGatingFunction)
     gating_function_list = [gating_function]
     super().__init__(gating_function_list, name=name)
     # self.gating_function = gating_function
     self.likelihood = Bernoulli()
     self.num_experts = 2
 def test_bernoulli(self):
     lik = Bernoulli()
     N, Ns, D_Y = self.X.shape[0], self.Xs.shape[0], self.D_Y
     Y = np.random.choice([-1., 1.], N * D_Y).reshape(N, D_Y)
     Ys = np.random.choice([-1., 1.], Ns * D_Y).reshape(Ns, D_Y)
     for L in [1, 2]:
         self.compare_to_single_layer(Y, Ys, lik, L)
Beispiel #4
0
def test_softmax_bernoulli_equivalence(num, dimF, dimY):
    dF = np.vstack(
        (np.random.randn(num - 3,
                         dimF), np.array([[-3.0, 0.0], [3, 0.0], [0.0, 0.0]])))
    dY = np.vstack((np.random.randn(num - 3, dimY), np.ones((3, dimY)))) > 0
    F = to_default_float(dF)
    Fvar = tf.exp(
        tf.stack([F[:, 1], -10.0 + tf.zeros(F.shape[0], dtype=F.dtype)],
                 axis=1))
    F = tf.stack([F[:, 0], tf.zeros(F.shape[0], dtype=F.dtype)], axis=1)
    Y = to_default_int(dY)
    Ylabel = 1 - Y

    softmax_likelihood = Softmax(dimF)
    bernoulli_likelihood = Bernoulli(invlink=tf.sigmoid)
    softmax_likelihood.num_monte_carlo_points = int(
        0.3e7)  # Minimum number of points to pass the test on CircleCI
    bernoulli_likelihood.num_gauss_hermite_points = 40

    assert_allclose(
        softmax_likelihood.conditional_mean(F)[:, :1],
        bernoulli_likelihood.conditional_mean(F[:, :1]),
    )

    assert_allclose(
        softmax_likelihood.conditional_variance(F)[:, :1],
        bernoulli_likelihood.conditional_variance(F[:, :1]),
    )

    assert_allclose(
        softmax_likelihood.log_prob(F, Ylabel),
        bernoulli_likelihood.log_prob(F[:, :1], Y.numpy()),
    )

    mean1, var1 = softmax_likelihood.predict_mean_and_var(F, Fvar)
    mean2, var2 = bernoulli_likelihood.predict_mean_and_var(
        F[:, :1], Fvar[:, :1])

    assert_allclose(mean1[:, 0, None], mean2, rtol=2e-3)
    assert_allclose(var1[:, 0, None], var2, rtol=2e-3)

    ls_ve = softmax_likelihood.variational_expectations(F, Fvar, Ylabel)
    lb_ve = bernoulli_likelihood.variational_expectations(
        F[:, :1], Fvar[:, :1], Y.numpy())
    assert_allclose(ls_ve, lb_ve, rtol=5e-3)
Beispiel #5
0
    def __init__(self,
                 input_dim,
                 kernel_cls=SquaredExponential,
                 use_ard=True,
                 vgp_cls=SVGP,
                 num_inducing_points=None,
                 inducing_index_points_initializer=None,
                 whiten=True,
                 jitter=1e-6,
                 seed=None,
                 dtype=tf.float64):

        # TODO: We only need this check to avoid passing unexpected
        # `lengthscales` kwarg. The correct thing to do is add
        # logic to not pass this kwarg for non-stationary kernels...
        assert issubclass(kernel_cls, Stationary), \
            "Currently only support stationary kernels."

        if input_dim > 1 and use_ard:
            length_scales = np.ones(input_dim)
        else:
            length_scales = 1.0

        self.kernel = kernel_cls(lengthscales=length_scales)
        self.likelihood = Bernoulli(invlink=tf.sigmoid)

        if inducing_index_points_initializer is not None:
            assert num_inducing_points is not None, \
                "Must specify `input_dim` and `num_inducing_points`."
            self.inducing_index_points_initial = (
                inducing_index_points_initializer(shape=(num_inducing_points,
                                                         input_dim),
                                                  dtype=dtype))

        self.whiten = whiten
        self.vgp_cls = vgp_cls

        self._vgp = None
        self.optimizer = None

        self.whiten = whiten
        self.jitter = jitter
        self.seed = seed
Beispiel #6
0
    LikelihoodSetup(Beta(),
                    Y=tf.random.uniform(Datum.Yshape, dtype=default_float())),
    LikelihoodSetup(Ordinal(np.array([-1, 1])),
                    Y=tf.random.uniform(Datum.Yshape,
                                        0,
                                        3,
                                        dtype=default_int())),
    LikelihoodSetup(Poisson(invlink=tf.square),
                    Y=tf.random.poisson(Datum.Yshape,
                                        1.0,
                                        dtype=default_float())),
    LikelihoodSetup(Exponential(invlink=tf.square),
                    Y=tf.random.uniform(Datum.Yshape, dtype=default_float())),
    LikelihoodSetup(Gamma(invlink=tf.square),
                    Y=tf.random.uniform(Datum.Yshape, dtype=default_float())),
    LikelihoodSetup(Bernoulli(invlink=tf.sigmoid),
                    Y=tf.random.uniform(Datum.Yshape, dtype=default_float())),
    pytest.param(LikelihoodSetup(MultiClass(2),
                                 Y=tf.argmax(Datum.Y,
                                             1).numpy().reshape(-1, 1),
                                 rtol=1e-3,
                                 atol=1e-3),
                 marks=pytest.mark.skip),
]


def get_likelihood(likelihood_setup):
    if not isinstance(likelihood_setup, LikelihoodSetup):
        # pytest.param()
        likelihood_setup, = likelihood_setup.values
    return likelihood_setup.likelihood
Beispiel #7
0
        Y=tf.random.uniform(Datum.Yshape, 0, 3, dtype=default_int()),
    ),
    LikelihoodSetup(
        Poisson(invlink=tf.square),
        Y=tf.random.poisson(Datum.Yshape, 1.0, dtype=default_float()),
    ),
    LikelihoodSetup(
        Exponential(invlink=tf.square),
        Y=tf.random.uniform(Datum.Yshape, dtype=default_float()),
    ),
    LikelihoodSetup(
        Gamma(invlink=tf.square),
        Y=tf.random.uniform(Datum.Yshape, dtype=default_float()),
    ),
    LikelihoodSetup(
        Bernoulli(invlink=tf.sigmoid),
        Y=tf.random.uniform(Datum.Yshape, dtype=default_float()),
    ),
]

likelihood_setups = scalar_likelihood_setups + [
    LikelihoodSetup(
        MultiClass(3),
        Y=tf.argmax(Datum.Y, 1).numpy().reshape(-1, 1),
        rtol=1e-3,
        atol=1e-3,
    ),
]


def filter_analytic_scalar_likelihood(method_name):