def _block_indicator(self): num_repeat = array.size_of_shape( self.distribution.batch_shape[-self.reinterpreted_batch_ndims:]) dim = array.size_of_shape(self.distribution.event_shape) block_indicator = numpy.fromfunction( lambda i, j: i // dim == j // dim, (num_repeat * dim, num_repeat * dim)).astype(int) if self.xp is cuda.cupy: block_indicator = cuda.to_gpu(block_indicator) return block_indicator
def covariance(self): """ The covariance of the independent distribution. By definition, the covariance of the new distribution becomes block diagonal matrix. Let :math:`\\Sigma_{\\mathbf{x}}` be the covariance matrix of the original random variable :math:`\\mathbf{x} \\in \\mathbb{R}^d`, and :math:`\\mathbf{x}^{(1)}, \\mathbf{x}^{(2)}, \\cdots \\mathbf{x}^{(m)}` be the :math:`m` i.i.d. random variables, new covariance matrix :math:`\\Sigma_{\\mathbf{y}}` of :math:`\\mathbf{y} = [\\mathbf{x}^{(1)}, \\mathbf{x}^{(2)}, \\cdots, \\mathbf{x}^{(m)}] \\in \\mathbb{R}^{md}` can be written as .. math:: \\left[\\begin{array}{ccc} \\Sigma_{\\mathbf{x}^{1}} & & 0 \\\\ & \\ddots & \\\\ 0 & & \\Sigma_{\\mathbf{x}^{m}} \\end{array} \\right]. Note that this relationship holds only if the covariance matrix of the original distribution is given analytically. Returns: ~chainer.Variable: The covariance of the distribution. """ num_repeat = array.size_of_shape( self.distribution.batch_shape[-self.reinterpreted_batch_ndims:]) dim = array.size_of_shape(self.distribution.event_shape) cov = repeat.repeat(reshape.reshape(self.distribution.covariance, ((self.batch_shape) + (1, num_repeat, dim, dim))), num_repeat, axis=-4) cov = reshape.reshape( transpose.transpose(cov, axes=(tuple(range(len(self.batch_shape))) + (-4, -2, -3, -1))), self.batch_shape + (num_repeat * dim, num_repeat * dim)) block_indicator = self.xp.reshape( self._block_indicator, tuple([1] * len(self.batch_shape)) + self._block_indicator.shape) return cov * block_indicator
def covariance(self): """ The covariance of the independent distribution. By definition, the covariance of the new distribution becomes block diagonal matrix. Let :math:`\\Sigma_{\\mathbf{x}}` be the covariance matrix of the original random variable :math:`\\mathbf{x} \\in \\mathbb{R}^d`, and :math:`\\mathbf{x}^{(1)}, \\mathbf{x}^{(2)}, \\cdots \\mathbf{x}^{(m)}` be the :math:`m` i.i.d. random variables, new covariance matrix :math:`\\Sigma_{\\mathbf{y}}` of :math:`\\mathbf{y} = [\\mathbf{x}^{(1)}, \\mathbf{x}^{(2)}, \\cdots, \\mathbf{x}^{(m)}] \\in \\mathbb{R}^{md}` can be written as .. math:: \\left[\\begin{array}{ccc} \\Sigma_{\\mathbf{x}^{1}} & & 0 \\\\ & \\ddots & \\\\ 0 & & \\Sigma_{\\mathbf{x}^{m}} \\end{array} \\right]. Note that this relationship holds only if the covariance matrix of the original distribution is given analytically. Returns: ~chainer.Variable: The covariance of the distribution. """ num_repeat = array.size_of_shape( self.distribution.batch_shape[-self.reinterpreted_batch_ndims:]) dim = array.size_of_shape(self.distribution.event_shape) cov = repeat.repeat( reshape.reshape( self.distribution.covariance, ((self.batch_shape) + (1, num_repeat, dim, dim))), num_repeat, axis=-4) cov = reshape.reshape( transpose.transpose( cov, axes=( tuple(range(len(self.batch_shape))) + (-4, -2, -3, -1))), self.batch_shape + (num_repeat * dim, num_repeat * dim)) block_indicator = self.xp.reshape( self._block_indicator, tuple([1] * len(self.batch_shape)) + self._block_indicator.shape) return cov * block_indicator
def test_size_of_shape(self): arr = numpy.empty(self.shape) size = array.size_of_shape(arr.shape) size_expect = arr.size assert type(size) == type(size_expect) assert size == size_expect