Exemple #1
0
    def sample_n(self, n):
        if self._is_gpu:
            eps = cuda.cupy.random.standard_normal(
                (n,)+self.loc.shape, dtype=self.loc.dtype)
        else:
            eps = numpy.random.standard_normal(
                (n,)+self.loc.shape).astype(numpy.float32)
        noise = repeat.repeat(
            expand_dims.expand_dims(self.scale, axis=0), n, axis=0) * eps
        noise += repeat.repeat(expand_dims.expand_dims(
            self.loc, axis=0), n, axis=0)

        return noise
Exemple #2
0
    def sample_n(self, n):
        if self._is_gpu:
            eps = cuda.cupy.random.standard_normal((n, ) + self.loc.shape,
                                                   dtype=self.loc.dtype)
        else:
            eps = numpy.random.standard_normal((n, ) + self.loc.shape).astype(
                numpy.float32)
        noise = repeat.repeat(
            expand_dims.expand_dims(self.scale, axis=0), n, axis=0) * eps
        noise += repeat.repeat(expand_dims.expand_dims(self.loc, axis=0),
                               n,
                               axis=0)

        return noise
Exemple #3
0
    def sample_n(self, n):
        if self._is_gpu:
            eps = cuda.cupy.random.standard_normal(
                (n,)+self.loc.shape+(1,), dtype=self.loc.dtype)
        else:
            eps = numpy.random.standard_normal(
                (n,)+self.loc.shape+(1,)).astype(numpy.float32)

        noise = matmul.matmul(repeat.repeat(
            expand_dims.expand_dims(self.scale_tril, axis=0), n, axis=0), eps)
        noise = squeeze.squeeze(noise, axis=-1)
        noise += repeat.repeat(expand_dims.expand_dims(
            self.loc, axis=0), n, axis=0)

        return noise
    def sample_n(self, n):
        if self._is_gpu:
            eps = cuda.cupy.random.standard_normal(
                (n,)+self.loc.shape+(1,), dtype=self.loc.dtype)
        else:
            eps = numpy.random.standard_normal(
                (n,)+self.loc.shape+(1,)).astype(numpy.float32)

        noise = matmul.matmul(repeat.repeat(
            expand_dims.expand_dims(self.scale_tril, axis=0), n, axis=0), eps)
        noise = squeeze.squeeze(noise, axis=-1)
        noise += repeat.repeat(expand_dims.expand_dims(
            self.loc, axis=0), n, axis=0)

        return noise
Exemple #5
0
    def covariance(self):
        """ The covariance of the independent distribution.

        By definition, the covariance of the new
        distribution becomes block diagonal matrix. Let
        :math:`\\Sigma_{\\mathbf{x}}` be the covariance matrix of the original
        random variable :math:`\\mathbf{x} \\in \\mathbb{R}^d`, and
        :math:`\\mathbf{x}^{(1)}, \\mathbf{x}^{(2)}, \\cdots \\mathbf{x}^{(m)}`
        be the :math:`m` i.i.d. random variables, new covariance matrix
        :math:`\\Sigma_{\\mathbf{y}}` of :math:`\\mathbf{y} =
        [\\mathbf{x}^{(1)}, \\mathbf{x}^{(2)}, \\cdots, \\mathbf{x}^{(m)}] \\in
        \\mathbb{R}^{md}` can be written as

        .. math::
            \\left[\\begin{array}{ccc}
                    \\Sigma_{\\mathbf{x}^{1}} & & 0 \\\\
                    & \\ddots & \\\\
                    0 & & \\Sigma_{\\mathbf{x}^{m}}
            \\end{array} \\right].

        Note that this relationship holds only if the covariance matrix of the
        original distribution is given analytically.

        Returns:
            ~chainer.Variable: The covariance of the distribution.
        """
        num_repeat = array.size_of_shape(
            self.distribution.batch_shape[-self.reinterpreted_batch_ndims:])
        dim = array.size_of_shape(self.distribution.event_shape)
        cov = repeat.repeat(reshape.reshape(self.distribution.covariance,
                                            ((self.batch_shape) +
                                             (1, num_repeat, dim, dim))),
                            num_repeat,
                            axis=-4)
        cov = reshape.reshape(
            transpose.transpose(cov,
                                axes=(tuple(range(len(self.batch_shape))) +
                                      (-4, -2, -3, -1))),
            self.batch_shape + (num_repeat * dim, num_repeat * dim))
        block_indicator = self.xp.reshape(
            self._block_indicator,
            tuple([1] * len(self.batch_shape)) + self._block_indicator.shape)
        return cov * block_indicator
Exemple #6
0
    def covariance(self):
        """ The covariance of the independent distribution.

        By definition, the covariance of the new
        distribution becomes block diagonal matrix. Let
        :math:`\\Sigma_{\\mathbf{x}}` be the covariance matrix of the original
        random variable :math:`\\mathbf{x} \\in \\mathbb{R}^d`, and
        :math:`\\mathbf{x}^{(1)}, \\mathbf{x}^{(2)}, \\cdots \\mathbf{x}^{(m)}`
        be the :math:`m` i.i.d. random variables, new covariance matrix
        :math:`\\Sigma_{\\mathbf{y}}` of :math:`\\mathbf{y} =
        [\\mathbf{x}^{(1)}, \\mathbf{x}^{(2)}, \\cdots, \\mathbf{x}^{(m)}] \\in
        \\mathbb{R}^{md}` can be written as

        .. math::
            \\left[\\begin{array}{ccc}
                    \\Sigma_{\\mathbf{x}^{1}} & & 0 \\\\
                    & \\ddots & \\\\
                    0 & & \\Sigma_{\\mathbf{x}^{m}}
            \\end{array} \\right].

        Note that this relationship holds only if the covariance matrix of the
        original distribution is given analytically.

        Returns:
            ~chainer.Variable: The covariance of the distribution.
        """
        num_repeat = array.size_of_shape(
            self.distribution.batch_shape[-self.reinterpreted_batch_ndims:])
        dim = array.size_of_shape(self.distribution.event_shape)
        cov = repeat.repeat(
            reshape.reshape(
                self.distribution.covariance,
                ((self.batch_shape) + (1, num_repeat, dim, dim))),
            num_repeat, axis=-4)
        cov = reshape.reshape(
            transpose.transpose(
                cov, axes=(
                    tuple(range(len(self.batch_shape))) + (-4, -2, -3, -1))),
            self.batch_shape + (num_repeat * dim, num_repeat * dim))
        block_indicator = self.xp.reshape(
            self._block_indicator,
            tuple([1] * len(self.batch_shape)) + self._block_indicator.shape)
        return cov * block_indicator