def test_transformed_distribution() -> None:
    zero = nd.zeros(1)
    one = nd.ones(1)

    # If Y = -log(U) with U ~ Uniform(0, 1), then Y ~ Exponential(1)
    exponential = TransformedDistribution(
        Uniform(zero, one),
        bijection.log,
        bijection.AffineTransformation(scale=-1 * one),
    )

    # For Y ~ Exponential(1), P(Y) = e^{-x) ==> log P(Y) = -x
    assert exponential.log_prob(1 * one).asscalar() == -1.0
    assert exponential.log_prob(2 * one).asscalar() == -2.0

    # If Y ~ Exponential(1), then U = 1 - e^{-Y} has Uniform(0, 1) distribution
    uniform = TransformedDistribution(
        exponential,
        bijection.AffineTransformation(scale=-1 * one),
        bijection.log.inverse_bijection(),  # == bijection.exp
        bijection.AffineTransformation(loc=one, scale=-1 * one),
    )
    # For U ~ Uniform(0, 1), log P(U) = 0
    assert uniform.log_prob(0.5 * one).asscalar() == 0
    assert uniform.log_prob(0.2 * one).asscalar() == 0
Exemple #2
0
def test_transformed_distribution(serialize_fn) -> None:
    zero = nd.zeros(1)
    one = nd.ones(1)

    # If Y = -log(U) with U ~ Uniform(0, 1), then Y ~ Exponential(1)
    exponential = TransformedDistribution(
        Uniform(zero, one),
        [bijection.log,
         bijection.AffineTransformation(scale=-1 * one)],
    )
    exponential = serialize_fn(exponential)

    # For Y ~ Exponential(1), P(Y) = e^{-x) ==> log P(Y) = -x
    assert exponential.log_prob(1 * one).asscalar() == -1.0
    assert exponential.log_prob(2 * one).asscalar() == -2.0

    v = np.linspace(0, 5, 101)
    assert np.allclose(exponential.cdf(nd.array(v)).asnumpy(), exp_cdf(v))

    level = np.linspace(1.0e-5, 1.0 - 1.0e-5, 101)

    qs_calc = exponential.quantile(nd.array(level)).asnumpy()[:, 0]
    qs_theo = exp_quantile(level)
    assert np.allclose(qs_calc, qs_theo, atol=1.0e-2)

    # If Y ~ Exponential(1), then U = 1 - e^{-Y} has Uniform(0, 1) distribution
    uniform = TransformedDistribution(
        exponential,
        [
            bijection.AffineTransformation(scale=-1 * one),
            bijection.log.inverse_bijection(),  # == bijection.exp
            bijection.AffineTransformation(loc=one, scale=-1 * one),
        ],
    )
    uniform = serialize_fn(uniform)
    # For U ~ Uniform(0, 1), log P(U) = 0
    assert uniform.log_prob(0.5 * one).asscalar() == 0
    assert uniform.log_prob(0.2 * one).asscalar() == 0

    v = np.linspace(0, 1, 101)
    assert np.allclose(uniform.cdf(nd.array(v)).asnumpy(), v)

    qs_calc = uniform.quantile(nd.array(level)).asnumpy()[:, 0]
    assert np.allclose(qs_calc, level, atol=1.0e-2)
Exemple #3
0
def test_DistributionForecast():
    forecast = DistributionForecast(
        distribution=Uniform(low=mx.nd.array([0.0, 0.0]),
                             high=mx.nd.array([1.0, 2.0])),
        start_date=START_DATE,
        freq=FREQ,
    )

    def percentile(value):
        return f"p{int(round(value * 100)):02d}"

    for quantile in QUANTILES:
        test_cases = [quantile, str(quantile), percentile(quantile)]
        for quant_pred in map(forecast.quantile, test_cases):
            expected = quantile * np.array([1.0, 2.0])
            assert np.allclose(
                quant_pred, expected
            ), f"Expected {quantile} quantile {quantile}. Obtained {quant_pred}."

    pred_length = 2
    assert forecast.prediction_length == pred_length
    assert len(forecast.index) == pred_length
    assert forecast.index[0] == pd.Timestamp(START_DATE)
Exemple #4
0
START_DATE = pd.Timestamp(2017, 1, 1, 12)
FREQ = "1D"

FORECASTS = {
    "QuantileForecast":
    QuantileForecast(
        forecast_arrays=QUANTILES.reshape(-1, 1),
        start_date=START_DATE,
        forecast_keys=np.array(QUANTILES, str),
        freq=FREQ,
    ),
    "SampleForecast":
    SampleForecast(samples=SAMPLES, start_date=START_DATE, freq=FREQ),
    "DistributionForecast":
    DistributionForecast(
        distribution=Uniform(low=mx.nd.zeros(1), high=mx.nd.ones(1)),
        start_date=START_DATE,
        freq=FREQ,
    ),
}


@pytest.mark.parametrize("name", FORECASTS.keys())
def test_Forecast(name):
    forecast = FORECASTS[name]

    def percentile(value):
        return f"p{int(round(value * 100)):02d}"

    num_samples, pred_length = SAMPLES.shape
Exemple #5
0
     Laplace(mu=mx.nd.zeros(shape=(3, 4, 5)),
             b=mx.nd.ones(shape=(3, 4, 5))),
     (3, 4, 5),
     (),
 ),
 (
     NegativeBinomial(
         mu=mx.nd.zeros(shape=(3, 4, 5)),
         alpha=mx.nd.ones(shape=(3, 4, 5)),
     ),
     (3, 4, 5),
     (),
 ),
 (
     Uniform(
         low=-mx.nd.ones(shape=(3, 4, 5)),
         high=mx.nd.ones(shape=(3, 4, 5)),
     ),
     (3, 4, 5),
     (),
 ),
 (
     PiecewiseLinear(
         gamma=mx.nd.ones(shape=(3, 4, 5)),
         slopes=mx.nd.ones(shape=(3, 4, 5, 10)),
         knot_spacings=mx.nd.ones(shape=(3, 4, 5, 10)) / 10,
     ),
     (3, 4, 5),
     (),
 ),
 (
     MixtureDistribution(
START_DATE = pd.Timestamp(2017, 1, 1, 12)
FREQ = "1D"

FORECASTS = {
    "QuantileForecast":
    QuantileForecast(
        forecast_arrays=QUANTILES.reshape(-1, 1),
        start_date=START_DATE,
        forecast_keys=np.array(QUANTILES, str),
        freq=FREQ,
    ),
    "SampleForecast":
    SampleForecast(samples=SAMPLES, start_date=START_DATE, freq=FREQ),
    "DistributionForecast":
    DistributionForecast(
        distribution=Uniform(low=mx.nd.zeros(1), high=mx.nd.ones(1)),
        start_date=START_DATE,
        freq=FREQ,
    ),
}


@pytest.mark.parametrize("name", FORECASTS.keys())
def test_Forecast(name):
    forecast = FORECASTS[name]

    def percentile(value):
        return f"p{int(round(value * 100)):02d}"

    num_samples, pred_length = SAMPLES.shape
Exemple #7
0
 StudentT(
     mu=mx.nd.zeros(shape=BATCH_SHAPE),
     sigma=mx.nd.ones(shape=BATCH_SHAPE),
     nu=mx.nd.ones(shape=BATCH_SHAPE),
 ),
 Dirichlet(alpha=mx.nd.ones(shape=BATCH_SHAPE)),
 Laplace(
     mu=mx.nd.zeros(shape=BATCH_SHAPE), b=mx.nd.ones(shape=BATCH_SHAPE)
 ),
 NegativeBinomial(
     mu=mx.nd.zeros(shape=BATCH_SHAPE),
     alpha=mx.nd.ones(shape=BATCH_SHAPE),
 ),
 Poisson(rate=mx.nd.ones(shape=BATCH_SHAPE)),
 Uniform(
     low=-mx.nd.ones(shape=BATCH_SHAPE),
     high=mx.nd.ones(shape=BATCH_SHAPE),
 ),
 PiecewiseLinear(
     gamma=mx.nd.ones(shape=BATCH_SHAPE),
     slopes=mx.nd.ones(shape=(3, 4, 5, 10)),
     knot_spacings=mx.nd.ones(shape=(3, 4, 5, 10)) / 10,
 ),
 MixtureDistribution(
     mixture_probs=mx.nd.stack(
         0.2 * mx.nd.ones(shape=BATCH_SHAPE),
         0.8 * mx.nd.ones(shape=BATCH_SHAPE),
         axis=-1,
     ),
     components=[
         Gaussian(
             mu=mx.nd.zeros(shape=BATCH_SHAPE),