def test_update(device, f, dtype): npdtype = dtype.as_numpy_dtype M, K, tau, F = (20, 30), 3, 0.1, 2 npU = (np.random.normal(size=(K, M[0])).astype(npdtype), np.random.normal(size=(K, M[1])).astype(npdtype)) U = [tf.constant(npU[0]), tf.constant(npU[1])] npnoise = np.random.normal(size=M).astype(npdtype) npdata = np.dot(npU[0].T, npU[1]) + npnoise data = tf.constant(npdata, dtype=dtype) lh = Normal2dLikelihood(M=M, K=K, tau=tau, dtype=dtype) lh.init(data=data) properties = Properties(persistent=True, dtype=dtype) prior = Uniform(dummy=tf.constant(np.random.random(K).astype(npdtype), dtype=dtype), properties=properties) postUf = PostU(lh, prior, f) Ufupdated = postUf.update(U, data, transform=False) for g in range(F): assert(Ufupdated.dtype == dtype) with tf.Session() as sess: sess.run(tf.global_variables_initializer()) npUfupdated = sess.run(Ufupdated) assert(not np.allclose(npU[f], npUfupdated)) tf.reset_default_graph()
def cond(self) -> Exponential: tau = self.tau name = self.name + "Cond" properties = Properties(name=name, drawType=self.drawType, updateType=self.updateType, persistent=False) cond = Exponential(beta=1. / tau, properties=properties) return (cond)
def cond(self) -> CenLaplace: tau = self.tau name = self.name + "Cond" properties = Properties(name=name, drawType=self.drawType, updateType=self.updateType, persistent=False) cond = CenLaplace(beta=1. / tau, properties=properties) return (cond)
def productParams(self, d0: Distribution, d1: Distribution): name = self.name(d0, d1) drawType = self.drawType(d0, d1) updateType = self.updateType(d0, d1) persistent = False properties = Properties(name=name, drawType=drawType, updateType=updateType, persistent=persistent) params = {"properties": properties} return (params)
def __init__(self, M: Tuple[int, ...], K: int=1, tau: float = 1./1e2, drawType: DrawType = DrawType.SAMPLE, updateType: UpdateType = UpdateType.ALL, dtype=tf.float32) -> None: Likelihood.__init__(self, M, K) self.__tauInit = tau self.__dtype = dtype self.__properties = Properties(name='likelihood', drawType=drawType, dtype=dtype, updateType=updateType, persistent=True)
def cond(self) -> Normal: mu = self.mu Psi = self.Psi tau = self.tau mu = tf.ones_like(tau) * mu Psi = tf.ones_like(tau) * Psi name = self.name + "Cond" properties = Properties(name=name, drawType=self.drawType, updateType=self.updateType, persistent=False) cond = Normal(mu=mu, tau=tau / Psi, properties=properties) return (cond)
def lhUfk(self, Uf: Tensor, prepVars: Tuple[Tensor, ...], f: int, k: Tensor) -> Distribution: XVT, VVT, alpha = prepVars XvT = XVT[:, k] VvT = VVT[..., k] vvT = VVT[..., k, k] Ufk = Uf[k] UVvT = tf.reduce_sum(tf.transpose(Uf) * VvT, axis=-1) uvvT = Ufk * vvT Xtildev = XvT - UVvT + uvvT mu = Xtildev / vvT tau = vvT * alpha properties = Properties(name=f"lhU{f}k", drawType=self.noiseDistribution.drawType, updateType=self.noiseDistribution.updateType, persistent=False) lhUfk = Normal(mu=mu, tau=tau, properties=properties) return (lhUfk)
def cond(self) -> CenNnFullyElasticNetCond: b = self.b mu = self.mu tau = self.tau betaExponential = self.betaExponential tauLomax = self.tauLomax b = tf.ones_like(tauLomax) * b mu = tf.ones_like(tauLomax) * mu tau = tf.ones_like(tauLomax) * tau betaExponential = tf.ones_like(tauLomax) * betaExponential name = self.name + "Cond" properties = Properties(name=name, drawType=self.drawType, updateType=self.updateType, persistent=False) cond = CenNnFullyElasticNetCond(b=b, mu=mu, tau=tau, betaExponential=betaExponential, beta=1. / tauLomax, properties=properties) return (cond)