def __init__(self, positions, heterogeneity, linear_predictor): super().__init__() self._deterministic = True N, K = positions.shape() # neighboring nodes self.positions = positions self.heterogeneity = heterogeneity self.linear_predictor = linear_predictor # hidden nodes self._products = GaussianArray.uniform((N, N, K)) self._heterogeneity_expanded = GaussianArray.uniform((N, N, 2)) self._vector = GaussianArray.uniform((N, N, K + 2)) self._nodes.update({ "products": self._products, "heterogeneity_expanded": self._heterogeneity_expanded, "vector": self._vector }) # hidden factors self._product = Product(parent=self.positions, child=self._products) self._expand_transpose = ExpandTranspose( child=self._heterogeneity_expanded, parent=self.heterogeneity) self._concatenate = Concatenate(parts={ "products": self._products, "heterogeneity_expanded": self._heterogeneity_expanded }, vector=self._vector) self._sum = Sum(parent=self._vector, child=self.linear_predictor) self._factors.update({ "product": self._product, "expand_transpose": self._expand_transpose, "concatenate": self._concatenate, "sum": self._sum })
def __init__(self, child, parent): super().__init__() self._deterministic = True self.child = child self.parent = parent self.message_to_child = GaussianArray.uniform(child.shape()) self.message_to_parent = GaussianArray.uniform(parent.shape())
def __init__(self, child: GaussianArray, parent: GaussianArray, variance: float = 1.): super().__init__() self._deterministic = False self.shape = child.shape() self.child = child self.parent = parent self.message_to_child = GaussianArray.uniform(self.shape) self.message_to_parent = GaussianArray.uniform(self.shape) self.variance = ParameterArrayLogScale(variance, False, name="AddVariance.variance") self._parameters = {"variance": self.variance}
def __init__(self, shape_in, shape_out): super().__init__() d = len(shape_out) self.message_to_x = {k: GaussianArray.uniform(s) for k, s in shape_in.items()} self.message_to_v = GaussianArray.uniform(shape_out) size = [s[-1] for k, s in shape_in.items()] begin = [0, *np.cumsum(size[:-1])] self._size = [tuple([*shape_out[:-1], s]) for s in size] self._begin = [tuple([*[0]*(d - 1), s]) for s in begin] self._name = [k for k, s in shape_in.items()]
def __init__(self, parent: GaussianArray, child_cts: GaussianArray, child_bin: BernoulliArray, variance_cts=None, variance_bin=None, bin_model="NoisyProbit"): super().__init__() self._deterministic = False # nodes self.parent = parent self.child_cts = child_cts self.child_bin = child_bin # dimensions N, K = parent.shape() p_cts, p_bin = child_cts.shape()[1], child_bin.shape()[1] if variance_cts is None: variance_cts = tf.ones((1, p_cts)) if variance_bin is None: variance_bin = tf.ones((1, p_bin)) # hidden nodes self._lin_pred_cts = GaussianArray.uniform((N, p_cts)) self._lin_pred_bin = GaussianArray.uniform((N, p_bin)) self._nodes.update({ "lin_pred_cts": self._lin_pred_cts, "lin_pred_bin": self._lin_pred_bin }) # factors self._split = Split(parts={ "cts": self._lin_pred_cts, "bin": self._lin_pred_bin }, vector=self.parent) self._model_cts = AddVariance(parent=self._lin_pred_cts, child=self.child_cts, variance=variance_cts) if bin_model == "Logistic": self._model_bin = Logistic( parent=self._lin_pred_bin, child=self.child_bin, ) else: self._model_bin = NoisyProbit(parent=self._lin_pred_bin, child=self.child_bin, variance=variance_bin) self._factors.update({ "split": self._split, "model_cts": self._model_cts, "model_bin": self._model_bin })
def __init__(self, vector: GaussianArray, parts: Dict[str, GaussianArray]): super().__init__() self._deterministic = True self.vector = vector shape_in = {n: p.shape() for n, p in parts.items()} self.parts = parts shape_out = vector.shape() d = len(shape_out) self.message_to_parts = {k: GaussianArray.uniform(s) for k, s in shape_in.items()} self.message_to_vector = GaussianArray.uniform(shape_out) size = [s[-1] for k, s in shape_in.items()] begin = [0, *np.cumsum(size[:-1])] self._size = [tuple([*shape_out[:-1], s]) for s in size] self._begin = [tuple([*[0] * (d - 1), s]) for s in begin] self._name = [k for k, s in shape_in.items()]
def __init__(self, child: GaussianArray, parent: GaussianArray, bias=None, weight=None): super().__init__() self._deterministic = True self.child = child self.shape_child = child.shape() self.parent = parent self.shape_parent = parent.shape() self.message_to_child = GaussianArray.uniform(child.shape()) self.message_to_parent = GaussianArray.uniform(parent.shape()) if bias is None: bias = tf.zeros((1, self.shape_child[1])) self.bias = ParameterArray(bias, False, name="WeightedSum.bias") if weight is None: weight = tf.ones((self.shape_parent[1], self.shape_child[1])) self.weight = ParameterArray(weight, False, name="WeightedSum.weight") self._parameters = {"bias": self.bias, "weight": self.weight}
def __init__(self, child: BernoulliArray, parent: GaussianArray): super().__init__() self._deterministic = True self.shape = child.shape() self.child = child self.parent = parent self.message_to_child = BernoulliArray.uniform(self.shape) self.message_to_parent = GaussianArray.uniform(self.shape)
def __init__(self, N, K, A): self.A = A self.N = N self.K = K self.parameters = { "noise": ParameterArray(1. * tf.ones((1, 1))) } self.nodes = { "latent": GaussianArray.uniform((N, K)), "heterogeneity": GaussianArray.uniform((N, 1)), "product": GaussianArray.uniform((N, N, K)), "vector": GaussianArray.uniform((N, N, K+2)), "linear_predictor": GaussianArray.uniform((N, N)), "noisy_linear_predictor": GaussianArray.uniform((N, N)), "links": tf.zeros((N, N)) } self.factors = { "latent_prior": Prior(GaussianArray.from_shape((N, K), 0., 1.)), "heterogeneity_prior": Prior(GaussianArray.from_shape((N, 1), 0., 1.)), "product": Product((N, K), (N, N, K)), "concatenate": Concatenate({"a_u": (N, N, 1), "a_v": (N, N, 1), "s_uv": (N, N, K)}, (N, N, K+2)), "sum": Sum((N, N, K+2), (N, N)), "noise": AddVariance((N, N)), "adjacency": Probit((N, N)) } self._current_iter = 0 self._break_symmetry()
def __init__(self, child: BernoulliArray, parent: GaussianArray, variance=tf.ones((1, 1))): super().__init__() self._deterministic = False self.shape = child.shape() # nodes self.child = child self.parent = parent # hidden nodes self._noisy = GaussianArray.uniform(self.shape) self._nodes.update({"noisy": self._noisy}) # factors self._noise = AddVariance(parent=parent, child=self._noisy, variance=variance) self._probit = Probit(parent=self._noisy, child=child) self._factors.update({"noise": self._noise, "probit": self._probit})
def __init__(self, shape_in, shape_out): super().__init__() self.message_to_sum = GaussianArray.uniform(shape_out) self.message_to_x = GaussianArray.uniform(shape_in)
def __init__(self, shape): super().__init__() self._deterministic = False self.message_to_mean = GaussianArray.uniform(shape) self.message_to_x = GaussianArray.uniform(shape)
def __init__(self, shape, variance): super().__init__() self._deterministic = False self.message_to_x = GaussianArray.uniform(shape) self.message_to_result = BernoulliArray.uniform(shape) self.variance = variance
def __init__(self, shape): super().__init__() self.message_to_x = GaussianArray.uniform(shape) self.message_to_result = BernoulliArray.uniform(shape)
def __init__(self, K, A, X_cts=None, X_bin=None, link_model="NoisyProbit", bin_model="NoisyProbit", initial={ "bias": None, "weights": None, "positions": None, "heterogeneity": None }): super().__init__() self._deterministic = False self._check_input(K, A, X_cts, X_bin) # initial values if initial["bias"] is None: initial["bias"] = tf.zeros((1, self.p)) if initial["weights"] is None: initial["weights"] = tf.ones((self.K, self.p)) if initial["positions"] is None: initial["positions"] = tf.random.normal((self.N, self.K), 0., 1.) if initial["heterogeneity"] is None: initial["heterogeneity"] = tf.random.normal((self.N, 1), 0., 1.) # prepare nodes self.positions = GaussianArray.uniform((self.N, self.K)) self.heterogeneity = GaussianArray.uniform((self.N, 1)) self.covariate_mean = GaussianArray.uniform((self.N, self.p)) self.adjacency_mean = GaussianArray.uniform((self.N, self.N)) self.covariate_continuous = GaussianArray.observed(X_cts) self.covariate_binary = BernoulliArray.observed(X_bin) self.links = BernoulliArray.observed(A) self._nodes = { "positions": self.positions, "heterogeneity": self.heterogeneity, "covariate_mean": self.covariate_mean, "adjacency_mean": self.adjacency_mean, "covariate_continuous": self.covariate_continuous, "covariate_binary": self.covariate_binary, "links": self.links } # prepare factors self.position_prior = Prior(child=self.positions, mean=0., variance=1., initial=initial["positions"], name="position_prior") self.heterogeneity_prior = Prior(child=self.heterogeneity, mean=-2., variance=1., initial=initial["heterogeneity"], name="heterogeneity_prior") self.inner_product_model = InnerProductModel( positions=self.positions, heterogeneity=self.heterogeneity, linear_predictor=self.adjacency_mean) if self.p > 0: self.mean_model = WeightedSum(parent=self.positions, child=self.covariate_mean, bias=initial["bias"], weight=initial["weights"]) self.covariate_model = GLM(parent=self.covariate_mean, child_cts=self.covariate_continuous, child_bin=self.covariate_binary, variance_cts=tf.ones((1, self.p_cts)), variance_bin=tf.ones((1, self.p_bin)), bin_model=bin_model) else: self.mean_model = VMPFactor() self.covariate_model = VMPFactor() if link_model == "Logistic": self.adjacency_model = Logistic(parent=self.adjacency_mean, child=self.links) else: self.adjacency_model = NoisyProbit(parent=self.adjacency_mean, child=self.links, variance=1.) self._factors = { "position_prior": self.position_prior, "heterogeneity_prior": self.heterogeneity_prior, "mean_model": self.mean_model, "covariate_model": self.covariate_model, "inner_product_model": self.inner_product_model, "adjacency_model": self.adjacency_model } self.elbo = -np.inf