Exemple #1
0
def build_deep_gp(input_dim, num_data):
    layers = [input_dim, 2, 2, 1]
    # Below are different ways to build layers

    # 1. Pass in Lists:
    kernel_list = [RBF(), Matern12()]
    num_inducing = [25, 25]
    l1_kernel = construct_basic_kernel(kernels=kernel_list)
    l1_inducing = construct_basic_inducing_variables(num_inducing=num_inducing, input_dim=layers[0])

    # 2. Pass in kernels, specificy output dims (shared hyperparams/variables)
    l2_kernel = construct_basic_kernel(kernels=RBF(), output_dim=layers[2], share_hyperparams=True)
    l2_inducing = construct_basic_inducing_variables(
        num_inducing=25, input_dim=layers[1], share_variables=True
    )

    # 3. Pass in kernels, specificy output dims (independent hyperparams/vars)
    # By default and the constructor will make indep. copies
    l3_kernel = construct_basic_kernel(kernels=RBF(), output_dim=layers[3])
    l3_inducing = construct_basic_inducing_variables(
        num_inducing=25, input_dim=layers[2], output_dim=layers[3]
    )

    # Assemble at the end
    gp_layers = [
        GPLayer(l1_kernel, l1_inducing, num_data),
        GPLayer(l2_kernel, l2_inducing, num_data),
        GPLayer(l3_kernel, l3_inducing, num_data, mean_function=Zero()),
    ]
    return DeepGP(gp_layers, Gaussian(0.1))
def build_gp_layers(layer_sizes, num_data):
    gp_layers = []
    for input_dim, output_dim in zip(layer_sizes[:-1], layer_sizes[1:]):

        kernel = construct_basic_kernel(kernels=RBF(), output_dim=output_dim)
        inducing_vars = construct_basic_inducing_variables(
            num_inducing=25, input_dim=input_dim, output_dim=output_dim)

        layer = GPLayer(kernel, inducing_vars, num_data)
        gp_layers.append(layer)

    gp_layers[-1].mean_function = Zero()

    return gp_layers
Exemple #3
0
def setup_gp_layer_and_data(num_inducing: int, **gp_layer_kwargs):
    input_dim = 30
    output_dim = 5
    num_data = 100
    data = make_data(input_dim, output_dim, num_data=num_data)

    kernel = construct_basic_kernel(RBF(), output_dim)
    inducing_vars = construct_basic_inducing_variables(num_inducing, input_dim,
                                                       output_dim)
    mean_function = Zero(output_dim)

    gp_layer = GPLayer(kernel,
                       inducing_vars,
                       num_data,
                       mean_function=mean_function,
                       **gp_layer_kwargs)
    return gp_layer, data
Exemple #4
0
    def __init__(
        self,
        likelihood: gpflow.likelihoods.Likelihood = gpflow.likelihoods.
        Gaussian(0.01)
    ) -> None:
        kernel = construct_basic_kernel(gpflow.kernels.SquaredExponential(),
                                        output_dim=1,
                                        share_hyperparams=True)
        inducing_var = construct_basic_inducing_variables(
            num_inducing=5,
            input_dim=1,
            share_variables=True,
            z_init=tf.random.normal([5, 1], dtype=gpflow.default_float()),
        )

        gp_layer = GPLayer(kernel, inducing_var, 10)

        super().__init__(
            [gp_layer],  # not actually used
            likelihood,
        )
Exemple #5
0
M = Z.shape[0]

# Layer 1
inducing_var1 = construct_basic_inducing_variables(M,
                                                   D,
                                                   D,
                                                   share_variables=True,
                                                   z_init=Z.copy())
kernel1 = construct_basic_kernel(
    gpflow.kernels.SquaredExponential(lengthscales=0.15),
    output_dim=D,
    share_hyperparams=True,
)
layer1 = GPLayer(kernel1,
                 inducing_var1,
                 num_data,
                 full_cov=True,
                 num_samples=num_samples)

# Layer 2
inducing_var2 = construct_basic_inducing_variables(M,
                                                   D,
                                                   D,
                                                   share_variables=True,
                                                   z_init=Z.copy())
kernel2 = construct_basic_kernel(
    gpflow.kernels.SquaredExponential(lengthscales=0.8, variance=0.1),
    output_dim=D,
    share_hyperparams=True,
)
layer2 = GPLayer(kernel2,
Exemple #6
0
#     -0.47, -2.55,  1.81,  1.10, -0.57,
#     -2.60,  1.56, -0.62,  1.86,  0.80
# ]).astype(default_float())
# Z_aug = np.random.rand(3 * 5,).astype(default_float())
# num_inducing_points = Z.shape[0]
# axs[0].plot(Z, q, 'o', color='black')

# Shallow sparse GP
Z1 = np.linspace(min(X), max(X),
                 num=num_inducing_points)[..., None].astype(default_float())
# Z1 = Z[..., None]
feat1 = SharedIndependentInducingVariables(InducingPoints(Z1))
kern1 = SharedIndependent(Kernel(lengthscales=lengthscale,
                                 variance=outer_variance),
                          output_dim=1)
layer1 = GPLayer(kern1, feat1, X.shape[0], mean_function=Zero(), white=False)
# layer1.q_mu.assign(value=q[..., None])

lik_layer = LikelihoodLayer(Gaussian(variance=likelihood_variance))

model = DeepGP([layer1], lik_layer, input_dim=1, output_dim=1)
model.compile(tf.optimizers.Adam(learning_rate=learning_rate))
callbacks = [
    tf.keras.callbacks.ReduceLROnPlateau(
        monitor="loss",
        patience=patience,
        factor=factor,
        verbose=verbose,
        min_lr=min_learning_rate,
    )
]
Exemple #7
0
# %%
Z = X.copy()
M = Z.shape[0]

# Layer 1
inducing_var1 = construct_basic_inducing_variables(M,
                                                   D,
                                                   D,
                                                   share_variables=True,
                                                   z_init=Z.copy())
kernel1 = construct_basic_kernel(
    gpflow.kernels.SquaredExponential(lengthscales=0.15),
    output_dim=D,
    share_hyperparams=True,
)
layer1 = GPLayer(kernel1, inducing_var1, Ns)

# Layer 2
inducing_var2 = construct_basic_inducing_variables(M,
                                                   D,
                                                   D,
                                                   share_variables=True,
                                                   z_init=Z.copy())
kernel2 = construct_basic_kernel(
    gpflow.kernels.SquaredExponential(lengthscales=0.8, variance=0.1),
    output_dim=D,
    share_hyperparams=True,
)
layer2 = GPLayer(kernel2, inducing_var2, Ns)

# Layer 3