def forward(self, position, heterogeneity, from_adjacency): # update products self.products = self.product.to_product( position) * self.concat.message_to_x["s_uv"] # update vector x = { "a_u": GaussianArray( tf.tile(tf.expand_dims(heterogeneity.precision(), 0), [self.N, 1, 1]), tf.tile( tf.expand_dims(heterogeneity.mean_times_precision(), 0), [self.N, 1, 1])), "a_v": GaussianArray( tf.tile(tf.expand_dims(heterogeneity.precision(), 1), [1, self.N, 1]), tf.tile( tf.expand_dims(heterogeneity.mean_times_precision(), 1), [1, self.N, 1])), "s_uv": self.products } self.vectors = self.concat.to_v(x) * self.sum.message_to_x # update linear predictors self.linear_predictors = self.sum.to_sum(self.vectors) * from_adjacency
def __init__(self, N, mean=0., variance=1.): self.N = N self.mean = mean self.variance = variance self.prior = Prior(GaussianArray.from_shape((N, 1), mean, variance)) self.marginal = GaussianArray.uniform((N, 1)) self.from_adjacency = GaussianArray.uniform((N, 1))
def __init__(self, N, K): self.N = N self.K = K # incoming messages self.from_position = GaussianArray.uniform((N, K)) self.from_heterogeneity = GaussianArray.uniform((N, 1)) self.from_adjacency = GaussianArray.uniform((N, N)) # nodes self.products = GaussianArray.uniform((N, N, K)) self.vectors = GaussianArray.uniform((N, N, K + 2)) self.linear_predictors = GaussianArray.uniform((N, N)) # factors self.product = Product((N, K), (N, N, K)) self.concat = Concatenate( { "a_u": (N, N, 1), "a_v": (N, N, 1), "s_uv": (N, N, K) }, (N, N, K + 2)) self.sum = Sum((N, N, K + 2), (N, N))
import tensorflow as tf import numpy as np from models.distributions.gaussianarray import GaussianArray from models.distributions.bernoulliarray import BernoulliArray # ----------------------------------------------------------------------------- # PRIOR from models.vmp.vmp_factors2 import Prior child = GaussianArray.uniform((3, 3)) self = Prior(child, 0., 1.) self.forward() self.to_elbo() # ----------------------------------------------------------------------------- # ADD VARIANCE from models.vmp.vmp_factors2 import AddVariance parent = GaussianArray.from_shape((3, 3), 0., 1.) child = GaussianArray.from_shape((3, 3), 0., 1.) self = AddVariance(child, parent, 1.) self.to_child() print(child) self.to_parent() print(parent) self.to_elbo() # ----------------------------------------------------------------------------- # PROBIT from models.vmp.vmp_factors2 import Probit parent = GaussianArray.from_shape((5, 5), 0., 1.) A = tf.where(tf.random.normal((5, 5)) > 0., 1., 0.)
import tensorflow as tf import sys sys.path.append("/NNVI") from models.distributions.gaussianarray import GaussianArray from NNVI.vmp_tf.vmp.vmp_factors import Sum, WeightedSum, Probit factor = Sum() x = GaussianArray.from_array(tf.random.normal((3, 2, 3), 0.0, 1.0), tf.random.normal((3, 2, 3), 0.0, 1.0)**2) sum = factor.to_sum(x) new_x = factor.to_x(x, sum) x = GaussianArray.from_array(tf.random.normal((3, 2), 0.0, 1.0), tf.random.normal((3, 2), 0.0, 1.0)**2) B = tf.random.normal((2, 5), 0.0, 1.0) B0 = tf.random.normal((1, 5), -1.0, 1.0) factor = WeightedSum() result = factor.to_result(x, B, B0) new_x = factor.to_x(x, result, B, B0) m = tf.tensordot(x.mean(), B, 1) + B0 v = tf.tensordot(x.log_var(), B**2, 1) result = GaussianArray.from_array(m, v) x = GaussianArray.from_array(tf.random.normal((3, 3), 0.0, 1.0),
def initialize(self): self.prior.message_to_x = GaussianArray.from_array( mean=tf.random.normal((self.N, 1), self.mean, self.variance), variance=tf.ones((self.N, 1)) * self.variance) self.marginal = self.prior.to_x()
def initialize(self): # TODO: Check this; I think it does nothing. self.prior.message_to_x = GaussianArray.from_array( mean=tf.random.normal((self.N, self.K), self.mean, self.variance), variance=tf.ones((self.N, self.K)) * self.variance) self.marginal = self.prior.to_x()
missing tf.where(missing, self.nodes["linear_predictor_covariate"].mean(), 0.) tf.where(missing, self.factors["weighted_sum"].message_to_result.mean(), 0.) tf.where(missing, X_complete, 0.) # factor tests -------------------------------- # gaussian comparison variance = tf.random.normal((1, p), 0., 1.)**2 self = GaussianComparison((N, p)) self.to_mean(X, variance) # weighted sum self = WeightedSum((N, K), (N, p)) x = GaussianArray.from_array(tf.random.normal((N, K), 0., 1.), tf.ones((N, K))) result = GaussianArray.from_array(tf.random.normal((N, p), 0., 1.), 1. * tf.ones((N, p))) self.to_result(x, B, B0) self.to_x(x, result, B, B0) # --------------------------------------- # Missing values GaussianArray.observed(X) mean = self.nodes["linear_predictor_covariate"] x = self.nodes["covariates_continuous"] variance = self.parameters["noise_covariate"].value()
self.nodes["heterogeneity"].mean(), transpose_b=True) self.factors["noise"].message_to_x.mean() tf.reduce_sum( tf.where(self.nodes["noisy_linear_predictor"].mean() > 0., 1, 0) - A) tf.reduce_sum( tf.where(self.factors["noise"].message_to_x.mean() > 0., 1, 0) - A) x = self.nodes["vector"] sum = self.nodes["linear_predictor"] # factor tests -------------------------------- # sum self = Sum((N, K + 2), (N, )) x = GaussianArray.from_array(tf.random.normal((N, K + 2), 0., 1.), tf.ones((N, K + 2))) sum = GaussianArray.from_array(tf.random.normal((N, ), 0., 1.), 1.2 * tf.ones( (N, ))) self.to_x(x, sum) self.to_sum(x) print((self.message_to_x * x).mean()) print((self.message_to_sum * sum).mean()) # product self = Product((N, K), (N, N, K)) x = GaussianArray.from_array(tf.random.normal((N, K), 0., 1.), tf.ones((N, K))) product = GaussianArray.from_array(tf.random.normal((N, N, K), 0., 1.), 0.1 * tf.ones((N, N, K)))
import tensorflow as tf import numpy as np from models.distributions.gaussianarray import GaussianArray from models.vmp.vmp_factors2 import Product N = 3 K = 1 parent = GaussianArray.from_shape((N, K), 1.414, 1.) upper = tf.linalg.band_part(tf.ones((N, N)), -1, 0) == 0 mean = tf.where(tf.expand_dims(upper, 2), 2., 0.) variance = tf.where(tf.expand_dims(upper, 2), 5., np.inf) child = GaussianArray.from_array(mean, variance) self = Product(child, parent) print(parent) self.forward() self.backward() print(parent) print(child) self.to_child() print(child.mean()) self.to_elbo()
print(m) print(parameters[0]) print(v) print(parameters[1]) # ------------------------------ # Logistic factor import tensorflow as tf import numpy as np from models.distributions.gaussianarray import GaussianArray from models.distributions.bernoulliarray import BernoulliArray from models.vmp.vmp_factors2 import Logistic shape = (5, 5) parent = GaussianArray.from_array(tf.random.normal(shape, 0., 1.), tf.ones(shape)) A = tf.where(parent.mean() + tf.random.normal(shape) > 0., 1., 0.) lower = tf.ones_like(A) upper = tf.linalg.band_part(lower, -1, 0) == 0 A_lower = tf.where(upper, A, np.nan) child = BernoulliArray.observed(A_lower) self = Logistic(child, parent) self.to_elbo() self.to_child() print(self.message_to_child) self.to_parent() print(parent)