def _new_initial(self, size, deterministic, more_replacements=None): aesara_condition_is_here = isinstance(deterministic, Variable) if aesara_condition_is_here: return at.switch( deterministic, at.repeat(self.mean.dimshuffle("x", 0), size if size is not None else 1, -1), self.histogram[self.randidx(size)], ) else: if deterministic: return at.repeat(self.mean.dimshuffle("x", 0), size if size is not None else 1, -1) else: return self.histogram[self.randidx(size)]
def __call__(self, X): XY = X.dot(X.T) x2 = at.sum(X**2, axis=1).dimshuffle(0, "x") X2e = at.repeat(x2, X.shape[0], axis=1) H = X2e + X2e.T - 2.0 * XY V = at.sort(H.flatten()) length = V.shape[0] # median distance m = at.switch( at.eq((length % 2), 0), # if even vector at.mean(V[((length // 2) - 1):((length // 2) + 1)]), # if odd vector V[length // 2], ) h = 0.5 * m / at.log(floatX(H.shape[0]) + floatX(1)) # RBF Kxy = at.exp(-H / h / 2.0) # Derivative dxkxy = -at.dot(Kxy, X) sumkxy = at.sum(Kxy, axis=-1, keepdims=True) dxkxy = at.add(dxkxy, at.mul(X, sumkxy)) / h return Kxy, dxkxy
def _new_initial(self, size, deterministic, more_replacements=None): aesara_condition_is_here = isinstance(deterministic, Variable) if size is None: size = 1 size = at.as_tensor(size) if aesara_condition_is_here: return at.switch( deterministic, at.repeat(self.mean.reshape((1, -1)), size, -1), self.histogram[self.randidx(size)], ) else: if deterministic: raise NotImplementedInference( "Deterministic sampling from a Histogram is broken in v4") return at.repeat(self.mean.reshape((1, -1)), size, -1) else: return self.histogram[self.randidx(size)]
def logdet(self): return aet.repeat(aet.sum(aet.log(self.scale)), self.z0.shape[0])