def test_transformed_distribution() -> None:
    zero = nd.zeros(1)
    one = nd.ones(1)

    # If Y = -log(U) with U ~ Uniform(0, 1), then Y ~ Exponential(1)
    exponential = TransformedDistribution(
        Uniform(zero, one),
        bijection.log,
        bijection.AffineTransformation(scale=-1 * one),
    )

    # For Y ~ Exponential(1), P(Y) = e^{-x) ==> log P(Y) = -x
    assert exponential.log_prob(1 * one).asscalar() == -1.0
    assert exponential.log_prob(2 * one).asscalar() == -2.0

    # If Y ~ Exponential(1), then U = 1 - e^{-Y} has Uniform(0, 1) distribution
    uniform = TransformedDistribution(
        exponential,
        bijection.AffineTransformation(scale=-1 * one),
        bijection.log.inverse_bijection(),  # == bijection.exp
        bijection.AffineTransformation(loc=one, scale=-1 * one),
    )
    # For U ~ Uniform(0, 1), log P(U) = 0
    assert uniform.log_prob(0.5 * one).asscalar() == 0
    assert uniform.log_prob(0.2 * one).asscalar() == 0
Esempio n. 2
0
 def distribution(self, distr_args, loc=None, scale=None, dim=None):
     dist = LowrankMultivariateGaussian(dim, self.rank, *distr_args)
     if loc is None and scale is None:
         return dist
     else:
         return TransformedDistribution(
             dist, [bijection.AffineTransformation(loc=loc, scale=scale)])
Esempio n. 3
0
 def distribution(self, distr_args, scale=None, **kwargs) -> Distribution:
     # todo dirty way of calling for now, this can be cleaned
     distr = LowrankMultivariateGaussian(self.dim, self.rank, *distr_args)
     if scale is None:
         return distr
     else:
         return TransformedDistribution(
             distr, bijection.AffineTransformation(scale=scale))
Esempio n. 4
0
 def distribution(self, distr_args, scale=None, **kwargs) -> Distribution:
     distr = LowrankMultivariateGaussian(self.dim, self.rank, *distr_args)
     if scale is None:
         return distr
     else:
         return TransformedDistribution(
             distr, [bijection.AffineTransformation(scale=scale)]
         )
Esempio n. 5
0
def test_transformed_distribution(serialize_fn) -> None:
    zero = nd.zeros(1)
    one = nd.ones(1)

    # If Y = -log(U) with U ~ Uniform(0, 1), then Y ~ Exponential(1)
    exponential = TransformedDistribution(
        Uniform(zero, one),
        [bijection.log,
         bijection.AffineTransformation(scale=-1 * one)],
    )
    exponential = serialize_fn(exponential)

    # For Y ~ Exponential(1), P(Y) = e^{-x) ==> log P(Y) = -x
    assert exponential.log_prob(1 * one).asscalar() == -1.0
    assert exponential.log_prob(2 * one).asscalar() == -2.0

    v = np.linspace(0, 5, 101)
    assert np.allclose(exponential.cdf(nd.array(v)).asnumpy(), exp_cdf(v))

    level = np.linspace(1.0e-5, 1.0 - 1.0e-5, 101)

    qs_calc = exponential.quantile(nd.array(level)).asnumpy()[:, 0]
    qs_theo = exp_quantile(level)
    assert np.allclose(qs_calc, qs_theo, atol=1.0e-2)

    # If Y ~ Exponential(1), then U = 1 - e^{-Y} has Uniform(0, 1) distribution
    uniform = TransformedDistribution(
        exponential,
        [
            bijection.AffineTransformation(scale=-1 * one),
            bijection.log.inverse_bijection(),  # == bijection.exp
            bijection.AffineTransformation(loc=one, scale=-1 * one),
        ],
    )
    uniform = serialize_fn(uniform)
    # For U ~ Uniform(0, 1), log P(U) = 0
    assert uniform.log_prob(0.5 * one).asscalar() == 0
    assert uniform.log_prob(0.2 * one).asscalar() == 0

    v = np.linspace(0, 1, 101)
    assert np.allclose(uniform.cdf(nd.array(v)).asnumpy(), v)

    qs_calc = uniform.quantile(nd.array(level)).asnumpy()[:, 0]
    assert np.allclose(qs_calc, level, atol=1.0e-2)
Esempio n. 6
0

DISTRIBUTIONS_WITH_QUANTILE_FUNCTION = (Gaussian, Uniform, Laplace, Binned)


@pytest.mark.parametrize(
    "distr",
    [
        TransformedDistribution(
            Gaussian(
                mu=mx.nd.random.uniform(shape=BATCH_SHAPE),
                sigma=mx.nd.ones(shape=BATCH_SHAPE),
            ),
            [
                bij.AffineTransformation(
                    scale=1e-1 + mx.nd.random.uniform(shape=BATCH_SHAPE)
                ),
                bij.softrelu,
            ],
        ),
        Binned(
            bin_log_probs=mx.nd.uniform(shape=BATCH_SHAPE + (23,)),
            bin_centers=mx.nd.array(np.logspace(-1, 1, 23))
            + mx.nd.zeros(BATCH_SHAPE + (23,)),
        ),
        TransformedDistribution(
            Binned(
                bin_log_probs=mx.nd.uniform(shape=BATCH_SHAPE + (23,)),
                bin_centers=mx.nd.array(np.logspace(-1, 1, 23))
                + mx.nd.zeros(BATCH_SHAPE + (23,)),
            ),