def test_loglogistic_likelihood(mu: float, sigma: float,
                                hybridize: bool) -> None:
    """
    Test to check that maximizing the likelihood recovers the parameters
    """

    # generate samples
    mus = mx.nd.zeros((NUM_SAMPLES, )) + mu
    sigmas = mx.nd.zeros((NUM_SAMPLES, )) + sigma

    distr = Loglogistic(mus, sigmas)
    samples = distr.sample()

    init_biases = [
        mu - START_TOL_MULTIPLE * TOL * mu,
        inv_softplus(sigma - START_TOL_MULTIPLE * TOL * sigma),
    ]

    mu_hat, sigma_hat = maximum_likelihood_estimate_sgd(
        LoglogisticOutput(),
        samples,
        init_biases=init_biases,
        hybridize=hybridize,
        learning_rate=PositiveFloat(0.05),
        num_epochs=PositiveInt(10),
    )

    print("mu:", mu_hat, "sigma:", sigma_hat)
    assert (np.abs(mu_hat - mu) <
            TOL * mu), f"mu did not match: mu = {mu}, mu_hat = {mu_hat}"
    assert (np.abs(sigma_hat - sigma) < TOL * sigma
            ), f"sigma did not match: sigma = {sigma}, sigma_hat = {sigma_hat}"
Пример #2
0
                    low=mx.nd.zeros(shape=(3, 4, 5)),
                    high=mx.nd.ones(shape=(3, 4, 5)),
                ),
                [
                    BoxCoxTransform(
                        lambda_1=mx.nd.ones(shape=(3, 4, 5)),
                        lambda_2=mx.nd.zeros(shape=(3, 4, 5)),
                    )
                ],
            ),
            (3, 4, 5),
            (),
        ),
        (
            Loglogistic(
                mx.nd.zeros(shape=(3, 4, 5)),
                mx.nd.ones(shape=(3, 4, 5)),
            ),
            (3, 4, 5),
            (),
        ),
        (
            Weibull(
                mx.nd.ones(shape=(3, 4, 5)),
                mx.nd.ones(shape=(3, 4, 5)),
            ),
            (3, 4, 5),
            (),
        ),
    ],
)
def test_distribution_shapes(