def test_weibull_likelihood(rate: float, shape: float,
                            hybridize: bool) -> None:
    """
    Test to check that maximizing the likelihood recovers the parameters
    """

    # generate samples
    rates = mx.nd.zeros((NUM_SAMPLES, )) + rate
    shapes = mx.nd.zeros((NUM_SAMPLES, )) + shape

    distr = Weibull(rates, shapes)
    samples = distr.sample()

    init_biases = [
        inv_softplus(rate - START_TOL_MULTIPLE * TOL * rate),
        inv_softplus(shape - START_TOL_MULTIPLE * TOL * shape),
    ]

    rate_hat, shape_hat = maximum_likelihood_estimate_sgd(
        WeibullOutput(),
        samples,
        init_biases=init_biases,
        hybridize=hybridize,
        learning_rate=PositiveFloat(0.05),
        num_epochs=PositiveInt(10),
    )

    print("rate:", rate_hat, "shape:", shape_hat)
    assert (np.abs(rate_hat - rate) < TOL *
            rate), f"rate did not match: rate = {rate}, rate_hat = {rate_hat}"
    assert (np.abs(shape_hat - shape) < TOL * shape
            ), f"shape did not match: shape = {shape}, shape_hat = {shape_hat}"
Ejemplo n.º 2
0
                ],
            ),
            (3, 4, 5),
            (),
        ),
        (
            Loglogistic(
                mx.nd.zeros(shape=(3, 4, 5)),
                mx.nd.ones(shape=(3, 4, 5)),
            ),
            (3, 4, 5),
            (),
        ),
        (
            Weibull(
                mx.nd.ones(shape=(3, 4, 5)),
                mx.nd.ones(shape=(3, 4, 5)),
            ),
            (3, 4, 5),
            (),
        ),
    ],
)
def test_distribution_shapes(
    distr: Distribution,
    expected_batch_shape: Tuple,
    expected_event_shape: Tuple,
):
    assert distr.batch_shape == expected_batch_shape
    assert distr.event_shape == expected_event_shape

    x = distr.sample()