Exemple #1
0
def test_compare_mixed_kernel(session_tf):
    data = DataMixedKernel

    kern_list = [RBF(data.D) for _ in range(data.L)]
    k1 = mk.SeparateMixedMok(kern_list, W=data.W)
    f1 = mf.SharedIndependentMof(InducingPoints(data.X[:data.M, ...].copy()))
    m1 = SVGP(data.X,
              data.Y,
              k1,
              Gaussian(),
              feat=f1,
              q_mu=data.mu_data,
              q_sqrt=data.sqrt_data)

    kern_list = [RBF(data.D) for _ in range(data.L)]
    k2 = mk.SeparateMixedMok(kern_list, W=data.W)
    f2 = mf.MixedKernelSharedMof(InducingPoints(data.X[:data.M, ...].copy()))
    m2 = SVGP(data.X,
              data.Y,
              k2,
              Gaussian(),
              feat=f2,
              q_mu=data.mu_data,
              q_sqrt=data.sqrt_data)

    check_equality_predictions(session_tf, [m1, m2])
Exemple #2
0
    def fit(self, X, y):

        self.scaler = StandardScaler()

        X = self.scaler.fit_transform(X)
        M = self.n_inducing
        L = self.n_latent

        self.Z = find_starting_z(X, self.n_inducing, use_minibatching=False)

        q_mu = np.zeros((M, L))
        q_sqrt = np.repeat(np.eye(M)[None, ...], L, axis=0) * 1.0

        feature = mf.MixedKernelSharedMof(gpf.features.InducingPoints(self.Z))

        self.m = gpf.models.SVGP(X,
                                 y,
                                 self.kernel,
                                 gpf.likelihoods.Bernoulli(),
                                 feat=feature,
                                 q_mu=q_mu,
                                 q_sqrt=q_sqrt,
                                 whiten=self.whiten,
                                 mean_function=self.mean_function())

        self.m.feature.set_trainable(self.train_inducing_points)

        opt = gpf.train.ScipyOptimizer(options={'maxfun': self.maxiter})
        opt.minimize(self.m, disp=self.verbose_fit, maxiter=self.maxiter)
        self.is_fit = True
Exemple #3
0
def test_sample_conditional_mixedkernel(session_tf):
    q_mu = np.random.randn(Data.M, Data.L)  # M x L
    q_sqrt = np.array([
        np.tril(np.random.randn(Data.M, Data.M)) for _ in range(Data.L)
    ])  # L x M x M
    Z = Data.X[:Data.M, ...]  # M x D
    N = int(10e5)
    Xs = np.ones((N, Data.D), dtype=float_type)

    values = {"Xnew": Xs, "q_mu": q_mu, "q_sqrt": q_sqrt}
    placeholders = _create_placeholder_dict(values)
    feed_dict = _create_feed_dict(placeholders, values)

    # Path 1: mixed kernel: most efficient route
    W = np.random.randn(Data.P, Data.L)
    mixed_kernel = mk.SeparateMixedMok([RBF(Data.D) for _ in range(Data.L)], W)
    mixed_feature = mf.MixedKernelSharedMof(InducingPoints(Z.copy()))

    sample = sample_conditional(placeholders["Xnew"],
                                mixed_feature,
                                mixed_kernel,
                                placeholders["q_mu"],
                                q_sqrt=placeholders["q_sqrt"],
                                white=True)
    value = session_tf.run(sample, feed_dict=feed_dict)

    # Path 2: independent kernels, mixed later
    separate_kernel = mk.SeparateIndependentMok(
        [RBF(Data.D) for _ in range(Data.L)])
    shared_feature = mf.SharedIndependentMof(InducingPoints(Z.copy()))
    sample2 = sample_conditional(placeholders["Xnew"],
                                 shared_feature,
                                 separate_kernel,
                                 placeholders["q_mu"],
                                 q_sqrt=placeholders["q_sqrt"],
                                 white=True)
    value2 = session_tf.run(sample2, feed_dict=feed_dict)
    value2 = np.matmul(value2, W.T)
    # check if mean and covariance of samples are similar
    np.testing.assert_array_almost_equal(np.mean(value, axis=0),
                                         np.mean(value2, axis=0),
                                         decimal=1)
    np.testing.assert_array_almost_equal(np.cov(value, rowvar=False),
                                         np.cov(value2, rowvar=False),
                                         decimal=1)
def test_conditional_broadcasting(session_tf, full_cov, white,
                                  conditional_type):
    """
    Test that the `conditional` and `sample_conditional` broadcasts correctly
    over leading dimensions of Xnew. Xnew can be shape [..., N, D],
    and conditional should broadcast over the [...].
    """
    X_ = tf.placeholder(tf.float64, [None, None])
    q_mu = np.random.randn(Data.M, Data.Dy)
    q_sqrt = np.tril(np.random.randn(Data.Dy, Data.M, Data.M), -1)

    if conditional_type == "Z":
        feat = Data.Z
        kern = gpflow.kernels.Matern52(Data.Dx, lengthscales=0.5)
    elif conditional_type == "inducing_points":
        feat = gpflow.features.InducingPoints(Data.Z)
        kern = gpflow.kernels.Matern52(Data.Dx, lengthscales=0.5)
    elif conditional_type == "mixing":
        # variational params have different output dim in this case
        q_mu = np.random.randn(Data.M, Data.L)
        q_sqrt = np.tril(np.random.randn(Data.L, Data.M, Data.M), -1)
        feat = mf.MixedKernelSharedMof(gpflow.features.InducingPoints(Data.Z))
        kern = mk.SeparateMixedMok(kernels=[
            gpflow.kernels.Matern52(Data.Dx, lengthscales=0.5)
            for _ in range(Data.L)
        ],
                                   W=Data.W)

    if conditional_type == "mixing" and full_cov:
        pytest.skip("combination is not implemented")

    num_samples = 5
    sample_tf, mean_tf, cov_tf = sample_conditional(
        X_,
        feat,
        kern,
        tf.convert_to_tensor(q_mu),
        q_sqrt=tf.convert_to_tensor(q_sqrt),
        white=white,
        full_cov=full_cov,
        num_samples=num_samples)

    ss, ms, vs = [], [], []
    for X in Data.SX:
        s, m, v = session_tf.run([sample_tf, mean_tf, cov_tf], {X_: X})
        ms.append(m)
        vs.append(v)
        ss.append(s)

    ms = np.array(ms)
    vs = np.array(vs)
    ss = np.array(ss)

    ss_S12, ms_S12, vs_S12 = session_tf.run(
        sample_conditional(Data.SX,
                           feat,
                           kern,
                           tf.convert_to_tensor(q_mu),
                           q_sqrt=tf.convert_to_tensor(q_sqrt),
                           white=white,
                           full_cov=full_cov,
                           num_samples=num_samples))

    ss_S1_S2, ms_S1_S2, vs_S1_S2 = session_tf.run(
        sample_conditional(Data.S1_S2_X,
                           feat,
                           kern,
                           tf.convert_to_tensor(q_mu),
                           q_sqrt=tf.convert_to_tensor(q_sqrt),
                           white=white,
                           full_cov=full_cov,
                           num_samples=num_samples))

    assert_allclose(ss_S12.shape, ss.shape)
    assert_allclose(ms_S12, ms)
    assert_allclose(vs_S12, vs)
    assert_allclose(ms_S1_S2.reshape(Data.S1 * Data.S2, Data.N, Data.Dy), ms)
    assert_allclose(ss_S1_S2.shape,
                    [Data.S1, Data.S2, num_samples, Data.N, Data.Dy])

    if full_cov:
        assert_allclose(
            vs_S1_S2.reshape(Data.S1 * Data.S2, Data.Dy, Data.N, Data.N), vs)
    else:
        assert_allclose(vs_S1_S2.reshape(Data.S1 * Data.S2, Data.N, Data.Dy),
                        vs)
Exemple #5
0
# start = start[:, 0:2]
# predict = predict[:, 0:2]

D = start.shape[1]  # number of input dimensions
M = 20  # number of inducing points
L = 1  # number of latent GPs
P = predict.shape[1]  # number of observations = output dimensions
MAXITER = gpflow.test_util.notebook_niter(int(1e100))

q_mu = np.zeros((M, L))
q_sqrt = np.repeat(np.eye(M)[None, ...], L, axis=0) * 1.0

kern_list = [
    gpflow.kernels.RBF(D) + gpflow.kernels.Linear(D) for _ in range(L)
]
kernel = mk.SeparateMixedMok(kern_list, W=np.random.randn(P, L))
feature = mf.MixedKernelSharedMof(
    gpflow.features.InducingPoints(start[:M, ...].copy()))
m = gpflow.models.SVGP(start,
                       predict,
                       kernel,
                       gpflow.likelihoods.Gaussian(),
                       feat=feature,
                       q_mu=q_mu,
                       q_sqrt=q_sqrt)
opt = gpflow.train.ScipyOptimizer()
opt.minimize(m, disp=True, maxiter=MAXITER)

saver = gpflow.Saver()
saver.save('./model/multioutput.mdl', m)
 def mixed_shared(self):
     return mf.MixedKernelSharedMof(make_ip())