def setUp(self): self.program = paddle.static.Program() self.executor = paddle.static.Executor() with paddle.static.program_guard(self.program): rate_np = config.xrand((100, 200, 99)) rate = paddle.static.data('rate', rate_np.shape, rate_np.dtype) self.mock_dist = mock.Exponential(rate) self.feeds = {'rate': rate_np}
@config.place(config.DEVICES) @config.parameterize( (config.TEST_CASE_NAME, 'dist'), [('test-mock-exp', mock.Exponential(rate=paddle.rand( [100, 200, 99], dtype=config.DEFAULT_DTYPE)))]) class TestExponentialFamily(unittest.TestCase): def test_entropy(self): np.testing.assert_allclose( self.dist.entropy(), paddle.distribution.ExponentialFamily.entropy(self.dist), rtol=config.RTOL.get(config.DEFAULT_DTYPE), atol=config.ATOL.get(config.DEFAULT_DTYPE)) @config.place(config.DEVICES) @config.parameterize( (config.TEST_CASE_NAME, 'dist'), [('test-dummy', mock.DummyExpFamily(0.5, 0.5)), ('test-dirichlet', paddle.distribution.Dirichlet(paddle.to_tensor(config.xrand()))), ( 'test-beta', paddle.distribution.Beta( paddle.to_tensor(config.xrand()), paddle.to_tensor(config.xrand())))]) class TestExponentialFamilyException(unittest.TestCase): def test_entropy_exception(self): with self.assertRaises(NotImplementedError): paddle.distribution.ExponentialFamily.entropy(self.dist)
def test_prob(self): with self.assertRaises(NotImplementedError): self.dist.prob(paddle.to_tensor(config.xrand()))
def test_log_normalizer(self): self.assertTrue( np.all( self._paddle_diric._log_normalizer( paddle.to_tensor(config.xrand((100, 100, 100)))).numpy() < 0.0))
import unittest import numpy as np import paddle import scipy.stats import config from config import (ATOL, DEVICES, RTOL, TEST_CASE_NAME, parameterize, place, xrand) @place(DEVICES) @parameterize( (TEST_CASE_NAME, 'concentration'), [ ('test-one-dim', config.xrand((89, ))), # ('test-multi-dim', config.xrand((10, 20, 30))) ]) class TestDirichlet(unittest.TestCase): def setUp(self): self._paddle_diric = paddle.distribution.Dirichlet( paddle.to_tensor(self.concentration)) def test_mean(self): with paddle.fluid.dygraph.guard(self.place): np.testing.assert_allclose( self._paddle_diric.mean, scipy.stats.dirichlet.mean(self.concentration), rtol=RTOL.get(str(self.concentration.dtype)), atol=ATOL.get(str(self.concentration.dtype)))
# See the License for the specific language governing permissions and # limitations under the License. import numbers import unittest import numpy as np import paddle import scipy.stats from config import (ATOL, DEVICES, RTOL, TEST_CASE_NAME, parameterize, place, xrand) @place(DEVICES) @parameterize((TEST_CASE_NAME, 'alpha', 'beta'), [('test-scale', 1.0, 2.0), ('test-tensor', xrand(), xrand()), ('test-broadcast', xrand((2, 1)), xrand((2, 5)))]) class TestBeta(unittest.TestCase): def setUp(self): # scale no need convert to tensor for scale input unittest alpha, beta = self.alpha, self.beta if not isinstance(self.alpha, numbers.Real): alpha = paddle.to_tensor(self.alpha) if not isinstance(self.beta, numbers.Real): beta = paddle.to_tensor(self.beta) self._paddle_beta = paddle.distribution.Beta(alpha, beta) def test_mean(self): with paddle.fluid.dygraph.guard(self.place): np.testing.assert_allclose(