Beispiel #1
0
def monitored_training_loop(model: gpflow.models.SVGP,
                            logdir: str,
                            epochs: int = 1,
                            logging_epoch_freq: int = 10,
                            num_samples: int = 10):
    summary_writer = tf.summary.create_file_writer(logdir)
    tf_optimization_step = tf.function(optimization_step)
    batches = iter(train_dataset)

    with summary_writer.as_default():
        for epoch in range(epochs):
            for _ in range(num_batches_per_epoch):
                tf_optimization_step(model, next(batches))

            epoch_id = epoch + 1
            if epoch_id % logging_epoch_freq == 0:
                tf.print(f"Epoch {epoch_id}: ELBO (train) {model.elbo(data)}")

                mean, var = model.predict_f(samples_input)
                samples = model.predict_f_samples(samples_input, num_samples)
                fig = plotting_regression(X, Y, samples_input, mean, var,
                                          samples)

                summary_matplotlib_image(dict(model_samples=fig), step=epoch)
                tf.summary.scalar('elbo', data=model.elbo(data), step=epoch)
                tf.summary.scalar('likelihood/variance',
                                  data=model.likelihood.variance,
                                  step=epoch)
                tf.summary.scalar('kernel/lengthscale',
                                  data=model.kernel.lengthscale,
                                  step=epoch)
                tf.summary.scalar('kernel/variance',
                                  data=model.kernel.variance,
                                  step=epoch)
Beispiel #2
0
def optimization_step(optimizer, model: gpflow.models.SVGP, batch):
    with tf.GradientTape(watch_accessed_variables=False) as tape:
        tape.watch(model.trainable_variables)
        objective = -model.elbo(*batch)
        grads = tape.gradient(objective, model.trainable_variables)
    optimizer.apply_gradients(zip(grads, model.trainable_variables))
    return objective
Beispiel #3
0
 def step(model: gpf.models.SVGP, batch: Tuple[tf.Tensor, tf.Tensor]):
     with tf.GradientTape(watch_accessed_variables=False) as tape:
         tape.watch(gp.trainable_variables)
         loss = model.training_loss(batch)
     grads = tape.gradient(loss, gp.trainable_variables)
     optimizer.apply_gradients(zip(grads, gp.trainable_variables))
     return loss
Beispiel #4
0
def optimization_step(model: gpflow.models.SVGP, batch: Tuple[tf.Tensor,
                                                              tf.Tensor]):
    with tf.GradientTape(watch_accessed_variables=False) as tape:
        tape.watch(model.trainable_variables)
        obj = -model.elbo(batch)
        grads = tape.gradient(obj, model.trainable_variables)
    optimizer.apply_gradients(zip(grads, model.trainable_variables))