def test_zeros(): """Tests zeros""" # Scalar zeros = ops.zeros([1]) assert isinstance(zeros, tf.Tensor) assert zeros.ndim == 1 assert zeros.shape[0] == 1 assert zeros.numpy() == 0.0 # 1D zeros = ops.zeros([5]) assert isinstance(zeros, tf.Tensor) assert zeros.ndim == 1 assert zeros.shape[0] == 5 assert all(zeros.numpy() == 0.0) # 2D zeros = ops.zeros([5, 4]) assert isinstance(zeros, tf.Tensor) assert zeros.ndim == 2 assert zeros.shape[0] == 5 assert zeros.shape[1] == 4 assert np.all(zeros.numpy() == 0.0) # 3D zeros = ops.zeros([5, 4, 3]) assert isinstance(zeros, tf.Tensor) assert zeros.ndim == 3 assert zeros.shape[0] == 5 assert zeros.shape[1] == 4 assert zeros.shape[2] == 3 assert np.all(zeros.numpy() == 0.0)
def __init__( self, d: int = 1, prior=None, expand_dims: int = -1, name="MultivariateNormalParameter", ): # Prior if prior is None: prior = MultivariateNormal(O.zeros([d]), O.eye(d)) # Transform if expand_dims is not None: transform = lambda x: O.expand_dims(x, expand_dims) else: transform = None # Initializer and variable transforms initializer = { "loc": lambda x: xavier([d]), "cov": lambda x: xavier([int(d * (d + 1) / 2)]), } var_transform = {"loc": None, "cov": O.log_cholesky_transform} super().__init__( posterior=MultivariateNormal, prior=prior, transform=transform, initializer=initializer, var_transform=var_transform, name=name, )
def kl_loss(self): """Compute the sum of the Kullback–Leibler divergences between this parameter's priors and its variational posteriors.""" if self.prior is None: return O.zeros([]) else: return O.sum(O.kl_divergence(self.posterior, self.prior), axis=None)
def __init__( self, d: int = 1, prior=None, expand_dims: int = -1, name="MultivariateNormalParameter", ): # Transformation for scale parameters def log_cholesky_transform(x): if get_backend() == "pytorch": raise NotImplementedError else: import tensorflow as tf import tensorflow_probability as tfp E = tfp.math.fill_triangular(x) E = tf.linalg.set_diag(E, tf.exp(tf.linalg.tensor_diag_part(E))) return E @ tf.transpose(E) # Prior if prior is None: prior = MultivariateNormal(O.zeros([d]), O.eye(d)) # Transform if expand_dims is not None: transform = lambda x: O.expand_dims(x, expand_dims) else: transform = None # Initializer and variable transforms initializer = { "loc": lambda x: xavier([d]), "cov": lambda x: xavier([int(d * (d + 1) / 2)]), } var_transform = {"loc": None, "cov": log_cholesky_transform} super().__init__( posterior=MultivariateNormal, prior=prior, transform=transform, initializer=initializer, var_transform=var_transform, name=name, )