def get_joint_instance(self, query_points): x = var2link(query_points) return MultivariateNormalVariable( loc=self.mean_function(x), covariance_matrix=self.covariance_function(x), name=self.name + "(" + query_points.name + ")")
h_mean2.append(h_mean) z_mean2.append(z_mean) lower_bound2.append(mean - sd) upper_bound2.append(mean + sd) MSE = np.mean((np.array(ground_truth) - np.array(x_mean2))**2) var = 0.5 * (np.array(upper_bound2) - np.array(lower_bound2))**2 Lk = np.mean(0.5 * (np.array(ground_truth) - np.array(x_mean2))**2 / var + 0.5 * np.log(var) + 0.5 * np.log(2 * np.pi)) print("MF MSE {}".format(MSE)) print("MF lk {}".format(Lk)) MSE2.append(MSE) Lk2.append(Lk) QV = MultivariateNormalVariable(loc=np.zeros((3 * T, )), scale_tril=0.1 * np.identity(3 * T), name="V", learnable=True) Qx = [DeterministicVariable(QV[0], 'x0')] Qh = [DeterministicVariable(QV[0], 'h0')] Qz = [DeterministicVariable(QV[0], 'z0')] for t in range(1, T): Qx.append(DeterministicVariable(x_mean2[t] + QV[t], x_names[t])) Qh.append(DeterministicVariable(h_mean2[t] + QV[T + t], h_names[t])) Qz.append( DeterministicVariable(z_mean2[t] + QV[2 * T + t], z_names[t])) variational_posterior = ProbabilisticModel(Qx + Qh + Qz) AR_model.set_posterior_model(variational_posterior) # Inference #
inference.perform_inference(AR_model, number_iterations=N_itr, number_samples=N_smpl, optimizer=optimizer, lr=lr) loss_list2 = AR_model.diagnostics["loss curve"] # ELBO ELBO2.append(float(AR_model.estimate_log_model_evidence(N_ELBO_smpl).detach().numpy())) print("MF {}".format(ELBO2[-1])) # Multivariate normal variational distribution # QV = MultivariateNormalVariable(loc=np.zeros((T,)), scale_tril=np.identity(T), learnable=True) Qx = [NormalVariable(QV[0], 0.1, 'x0', learnable=True)] for t in range(1, T): Qx.append(NormalVariable(QV[t], 0.1, x_names[t], learnable=True)) variational_posterior = ProbabilisticModel(Qx) AR_model.set_posterior_model(variational_posterior) # Inference # inference.perform_inference(AR_model, number_iterations=N_itr, number_samples=N_smpl, optimizer=optimizer, lr=lr)
weights = NormalVariable(np.zeros((1, number_regressors)), 0.5*np.ones((1, number_regressors)), "weights") x = DeterministicVariable(input_variable, "x", is_observed=True) logit_p = BF.matmul(weights, x) k = BinomialVariable(1, logit_p=logit_p, name="k") model = ProbabilisticModel([k]) samples = model._get_sample(300) # Observations k.observe(labels) # Variational Model #Qweights = NormalVariable(np.zeros((1, number_regressors)), # np.ones((1, number_regressors)), "weights", learnable=True) Qweights = MultivariateNormalVariable(loc=np.zeros((1, number_regressors)), covariance_matrix=np.identity(number_regressors), name="weights", learnable=True) variational_model = ProbabilisticModel([Qweights]) model.set_posterior_model(variational_model) # Inference inference.perform_inference(model, number_iterations=3000, number_samples=50, optimizer='Adam', lr=0.001) loss_list = model.diagnostics["loss curve"] # Statistics posterior_samples = model._get_posterior_sample(1000)
import numpy as np import matplotlib.pyplot as plt from brancher.variables import ProbabilisticModel from brancher.standard_variables import MultivariateNormalVariable mean = np.zeros((2, 1)) chol_cov = np.array([[1., -1.], [0., 4.]]) x = MultivariateNormalVariable(mean, chol_cov=chol_cov) number_samples = 500 samples = x._get_sample(number_samples) for sample in samples[x].data: plt.scatter(sample[0, 0, 0], sample[0, 1, 0], c="b") plt.show() mean = np.zeros((1, 2, 1)) diag_cov = np.ones((1, 2, 1)) y = MultivariateNormalVariable(mean, diag_cov=diag_cov) number_samples = 500 samples = y._get_sample(number_samples) for sample in samples[y].data: plt.scatter(sample[0, 0, 0], sample[0, 1, 0], c="b") plt.show()
for xt in x: x_posterior_samples2 = posterior_samples2[xt].detach().numpy().flatten() mean2 = np.mean(x_posterior_samples2) sd2 = np.sqrt(np.var(x_posterior_samples2)) x_mean2.append(mean2) lower_bound2.append(mean2 - sd2) upper_bound2.append(mean2 + sd2) # Multivariate normal variational distribution # rank = 5 cov_factor = RootVariable(np.random.normal(0, 0.5, (T, rank)), "cov_factor") cov_shift = RootVariable(0.01 * np.identity(T), "cov_shift", learnable=False) mean_shift = RootVariable(np.zeros((T, )), "mean_shift", learnable=True) QV = MultivariateNormalVariable( loc=mean_shift, covariance_matrix=cov_shift + BF.matmul(cov_factor, BF.transpose(cov_factor, 2, 1)), name="V", learnable=True) Qomega = NormalVariable(2 * np.pi * 8, 5., 'omega', learnable=True) Qdrift = NormalVariable(0., 1., 'drift', learnable=True) Qx = [NormalVariable(QV[0], 0.1, 'x0', learnable=True)] for t in range(1, T): Qx.append(NormalVariable(QV[t], 0.1, x_names[t], learnable=True)) variational_posterior = ProbabilisticModel([Qomega, Qdrift] + Qx) AR_model.set_posterior_model(variational_posterior) # Inference # inference.perform_inference(AR_model, number_iterations=N_itr, number_samples=N_smpl,
import numpy as np import matplotlib.pyplot as plt from brancher.variables import ProbabilisticModel from brancher.standard_variables import MultivariateNormalVariable mean = np.zeros((2, 1)) covariance_matrix = np.array([[1., -0.3], [-0.3, 1.]]) x = MultivariateNormalVariable(mean, covariance_matrix=covariance_matrix) number_samples = 500 samples = x._get_sample(number_samples) for sample in samples[x].data: plt.scatter(sample[0, 0, 0], sample[0, 1, 0], c="b") plt.show()