inference_rect.model_param_init() inference_sphere1.model_param_init() inference_sphere2.model_param_init() params_rect = inference_rect.learning(n_restarts=10) params_sphere1 = inference_sphere1.learning(n_restarts=10) inference_sphere2.matrix_update(model_sphere1.param_to_vec()) if ndim == 2: x1_grid, x2_grid = np.meshgrid(np.linspace(-1, 1, 50), np.linspace(-1, 1, 50)) x_pred_points = Variable( torch.from_numpy( np.vstack([x1_grid.flatten(), x2_grid.flatten()]).astype(np.float32)).t()) pred_mean_rect, pred_var_rect = inference_rect.predict(x_pred_points) pred_std_rect = pred_var_rect**0.5 acq_rect = acquisition(x_pred_points, inference_rect, params_rect, reference=reference) pred_mean_sphere1, pred_var_sphere1 = inference_sphere1.predict( x_pred_points) pred_mean_sphere2, pred_var_sphere2 = inference_sphere2.predict( x_pred_points) pred_std_sphere1 = pred_var_sphere1**0.5 pred_std_sphere2 = pred_var_sphere2**0.5 acq_sphere1 = acquisition(x_pred_points, inference_sphere1, params_sphere1,
inference_normal = Inference((x_input, output), model_normal) inference_shadow = ShadowInference((x_input, output), model_shadow) inference_normal.init_parameters() inference_shadow.init_parameters() params_normal = inference_normal.learning(n_restarts=5) inference_shadow.cholesky_update(model_normal.param_to_vec()) if ndim == 2: x1_grid, x2_grid = np.meshgrid(np.linspace(-1, 1, 50), np.linspace(-1, 1, 50)) x_pred_points = Variable( torch.from_numpy( np.vstack([x1_grid.flatten(), x2_grid.flatten()]).astype(np.float32)).t()) pred_mean_normal, pred_var_normal = inference_normal.predict( x_pred_points) pred_std_normal = pred_var_normal**0.5 acq_normal = acquisition(x_pred_points, deepcopy_inference(inference_normal, params_normal), reference=reference) pred_mean_shadow, pred_var_shadow = inference_shadow.predict( x_pred_points) pred_std_shadow = pred_var_shadow**0.5 acq_shadow = acquisition(x_pred_points, deepcopy_inference(inference_shadow, params_normal), reference=reference) # ShadowInference unit test