def find_best_model_initialization(self, num_kernel_samples: int) -> None: """ Test `num_kernel_samples` models with sampled kernel parameters. The model's kernel parameters are then set to the sample achieving maximal likelihood. :param num_kernel_samples: Number of randomly sampled kernels to evaluate. """ @tf.function def evaluate_loss_of_model_parameters() -> tf.Tensor: randomize_hyperparameters(self.model) return self.model.training_loss() squeeze_hyperparameters(self.model) current_best_parameters = read_values(self.model) min_loss = self.model.training_loss() for _ in tf.range(num_kernel_samples): try: train_loss = evaluate_loss_of_model_parameters() except tf.errors.InvalidArgumentError: # allow badly specified kernel params train_loss = 1e100 if train_loss < min_loss: # only keep best kernel params min_loss = train_loss current_best_parameters = read_values(self.model) multiple_assign(self.model, current_best_parameters)
def test_dict_utilities(model): """ Test both `parameter_dict()` and `read_values()` """ class SubModule(tf.Module): def __init__(self): self.parameter = gpflow.Parameter(1.0) self.variable = tf.Variable(1.0) class Module(tf.Module): def __init__(self): self.submodule = SubModule() self.top_parameter = gpflow.Parameter(3.0) m = Module() params = gpflow.utilities.parameter_dict(m) # { # ".submodule.parameter": <parameter object>, # ".submodule.variable": <variable object> # } assert list(params.keys()) == [ ".submodule.parameter", ".submodule.variable", ".top_parameter", ] assert list(params.values()) == [ m.submodule.parameter, m.submodule.variable, m.top_parameter, ] for k, v in read_values(m).items(): assert params[k].numpy() == v