Esempio n. 1
0
def test_MixedKernelSeparateMof():
    data = DataMixedKernel

    kern_list = [SquaredExponential() for _ in range(data.L)]
    inducing_variable_list = [
        InducingPoints(data.X[:data.M, ...]) for _ in range(data.L)
    ]
    k1 = mk.LinearCoregionalization(kern_list, W=data.W)
    f1 = mf.SeparateIndependentInducingVariables(inducing_variable_list)
    model_1 = SVGP(k1,
                   Gaussian(),
                   inducing_variable=f1,
                   q_mu=data.mu_data,
                   q_sqrt=data.sqrt_data)

    kern_list = [SquaredExponential() for _ in range(data.L)]
    inducing_variable_list = [
        InducingPoints(data.X[:data.M, ...]) for _ in range(data.L)
    ]
    k2 = mk.LinearCoregionalization(kern_list, W=data.W)
    f2 = mf.SeparateIndependentInducingVariables(inducing_variable_list)
    model_2 = SVGP(k2,
                   Gaussian(),
                   inducing_variable=f2,
                   q_mu=data.mu_data,
                   q_sqrt=data.sqrt_data)

    check_equality_predictions(Data.data, [model_1, model_2])
Esempio n. 2
0
def test_separate_independent_conditional_with_q_sqrt_none():
    """
    In response to bug #1523, this test checks that separate_independent_condtional
    does not fail when q_sqrt=None.
    """
    q_sqrt = None
    data = DataMixedKernel

    kern_list = [SquaredExponential() for _ in range(data.L)]
    kernel = gpflow.kernels.SeparateIndependent(kern_list)
    inducing_variable_list = [
        InducingPoints(data.X[:data.M, ...]) for _ in range(data.L)
    ]
    inducing_variable = mf.SeparateIndependentInducingVariables(
        inducing_variable_list)

    mu_1, var_1 = gpflow.conditionals.conditional(
        data.X,
        inducing_variable,
        kernel,
        data.mu_data,
        full_cov=False,
        full_output_cov=False,
        q_sqrt=q_sqrt,
        white=True,
    )
Esempio n. 3
0
def test_separate_independent_mof():
    """
    Same test as above but we use different (i.e. separate) inducing inducing
    for each of the output dimensions.
    """
    np.random.seed(0)

    # Model 1 (INefficient)
    q_mu_1 = np.random.randn(Data.M * Data.P, 1)
    q_sqrt_1 = np.tril(np.random.randn(Data.M * Data.P,
                                       Data.M * Data.P))[None,
                                                         ...]  # 1 x MP x MP

    kernel_1 = mk.SharedIndependent(
        SquaredExponential(variance=0.5, lengthscales=1.2), Data.P)
    inducing_variable_1 = InducingPoints(Data.X[:Data.M, ...])
    model_1 = SVGP(kernel_1,
                   Gaussian(),
                   inducing_variable_1,
                   q_mu=q_mu_1,
                   q_sqrt=q_sqrt_1)
    set_trainable(model_1, False)
    set_trainable(model_1.q_sqrt, True)
    set_trainable(model_1.q_mu, True)

    gpflow.optimizers.Scipy().minimize(
        model_1.training_loss_closure(Data.data),
        variables=model_1.trainable_variables,
        method="BFGS",
        compile=True,
    )

    # Model 2 (efficient)
    q_mu_2 = np.random.randn(Data.M, Data.P)
    q_sqrt_2 = np.array([
        np.tril(np.random.randn(Data.M, Data.M)) for _ in range(Data.P)
    ])  # P x M x M
    kernel_2 = mk.SharedIndependent(
        SquaredExponential(variance=0.5, lengthscales=1.2), Data.P)
    inducing_variable_list_2 = [
        InducingPoints(Data.X[:Data.M, ...]) for _ in range(Data.P)
    ]
    inducing_variable_2 = mf.SeparateIndependentInducingVariables(
        inducing_variable_list_2)
    model_2 = SVGP(kernel_2,
                   Gaussian(),
                   inducing_variable_2,
                   q_mu=q_mu_2,
                   q_sqrt=q_sqrt_2)
    set_trainable(model_2, False)
    set_trainable(model_2.q_sqrt, True)
    set_trainable(model_2.q_mu, True)

    gpflow.optimizers.Scipy().minimize(
        model_2.training_loss_closure(Data.data),
        variables=model_2.trainable_variables,
        method="BFGS",
        compile=True,
    )

    # Model 3 (Inefficient): an idenitical inducing variable is used P times,
    # and treated as a separate one.
    q_mu_3 = np.random.randn(Data.M, Data.P)
    q_sqrt_3 = np.array([
        np.tril(np.random.randn(Data.M, Data.M)) for _ in range(Data.P)
    ])  # P x M x M
    kern_list = [
        SquaredExponential(variance=0.5, lengthscales=1.2)
        for _ in range(Data.P)
    ]
    kernel_3 = mk.SeparateIndependent(kern_list)
    inducing_variable_list_3 = [
        InducingPoints(Data.X[:Data.M, ...]) for _ in range(Data.P)
    ]
    inducing_variable_3 = mf.SeparateIndependentInducingVariables(
        inducing_variable_list_3)
    model_3 = SVGP(kernel_3,
                   Gaussian(),
                   inducing_variable_3,
                   q_mu=q_mu_3,
                   q_sqrt=q_sqrt_3)
    set_trainable(model_3, False)
    set_trainable(model_3.q_sqrt, True)
    set_trainable(model_3.q_mu, True)

    gpflow.optimizers.Scipy().minimize(
        model_3.training_loss_closure(Data.data),
        variables=model_3.trainable_variables,
        method="BFGS",
        compile=True,
    )

    check_equality_predictions(Data.data, [model_1, model_2, model_3])
Esempio n. 4
0

class Datum:
    D = 1
    L = 2
    P = 3
    M = 10
    N = 100
    W = rng.randn(P, L)
    X = rng.randn(N)[:, None]
    Xnew = rng.randn(N)[:, None]


multioutput_inducing_variable_list = [
    mf.SharedIndependentInducingVariables(make_ip()),
    mf.SeparateIndependentInducingVariables(make_ips(Datum.P)),
]

multioutput_fallback_inducing_variable_list = [
    mf.FallbackSharedIndependentInducingVariables(make_ip()),
    mf.FallbackSeparateIndependentInducingVariables(make_ips(Datum.P)),
]

multioutput_kernel_list = [
    mk.SharedIndependent(make_kernel(), Datum.P),
    mk.SeparateIndependent(make_kernels(Datum.L)),
    mk.LinearCoregionalization(make_kernels(Datum.L), Datum.W),
]


@pytest.mark.parametrize("inducing_variable",