def test_genpareto_likelihood(xi: float, beta: float, hybridize: bool) -> None:
    """
    Test to check that maximizing the likelihood recovers the parameters
    """

    # generate samples
    xis = mx.nd.zeros((NUM_SAMPLES, )) + xi
    betas = mx.nd.zeros((NUM_SAMPLES, )) + beta

    distr = GenPareto(xis, betas)
    samples = distr.sample()

    init_biases = [
        inv_softplus(xi - START_TOL_MULTIPLE * TOL * xi),
        inv_softplus(beta - START_TOL_MULTIPLE * TOL * beta),
    ]

    xi_hat, beta_hat = maximum_likelihood_estimate_sgd(
        GenParetoOutput(),
        samples,
        init_biases=init_biases,
        hybridize=hybridize,
        learning_rate=PositiveFloat(0.05),
        num_epochs=PositiveInt(10),
    )

    print("XI:", xi_hat, "BETA:", beta_hat)
    assert (np.abs(xi_hat - xi) <
            TOL * xi), f"alpha did not match: xi = {xi}, xi_hat = {xi_hat}"
    assert (np.abs(beta_hat - beta) < TOL *
            beta), f"beta did not match: beta = {beta}, beta_hat = {beta_hat}"
Exemple #2
0
                    Gaussian(mu=mx.nd.array([-1.0]), sigma=mx.nd.array([0.2])),
                    Gamma(alpha=mx.nd.array([2.0]), beta=mx.nd.array([0.5])),
                ],
            ),
            MixtureDistributionOutput([GaussianOutput(), GammaOutput()]),
            2_000,
        ),
        (
            MixtureDistribution(
                mixture_probs=mx.nd.array([[0.7, 0.3]]),
                components=[
                    Gaussian(mu=mx.nd.array([-1.0]), sigma=mx.nd.array([0.2])),
                    GenPareto(xi=mx.nd.array([0.6]), beta=mx.nd.array([1.0])),
                ],
            ),
            MixtureDistributionOutput([GaussianOutput(), GenParetoOutput()]),
            2_000,
        ),
    ],
)
@pytest.mark.parametrize("serialize_fn", serialize_fn_list)
@pytest.mark.skip("Skip test that takes long time to run")
def test_inference_mixture_different_families(
    mixture_distribution: MixtureDistribution,
    mixture_distribution_output: MixtureDistributionOutput,
    epochs: int,
    serialize_fn,
) -> None:
    # First sample from mixture distribution and then confirm the MLE are close to true parameters
    num_samples = 10_000
    samples = mixture_distribution.sample(num_samples=num_samples)
Exemple #3
0
                ],
            ),
            MixtureDistributionOutput([GaussianOutput(),
                                       GammaOutput()]),
            2_000,
        ),
        (
            MixtureDistribution(
                mixture_probs=mx.nd.array([[0.7, 0.3]]),
                components=[
                    Gaussian(mu=mx.nd.array([-1.0]), sigma=mx.nd.array([0.2])),
                    GenPareto(xi=mx.nd.array([0.6]), beta=mx.nd.array([1.0])),
                ],
            ),
            MixtureDistributionOutput([GaussianOutput(),
                                       GenParetoOutput()]),
            2_000,
        ),
    ],
)
@pytest.mark.parametrize("serialize_fn", serialize_fn_list)
@pytest.mark.skip("Skip test that takes long time to run")
def test_inference_mixture_different_families(
    mixture_distribution: MixtureDistribution,
    mixture_distribution_output: MixtureDistributionOutput,
    epochs: int,
    serialize_fn,
) -> None:
    # First sample from mixture distribution and then confirm the MLE are close to true parameters
    num_samples = 10_000
    samples = mixture_distribution.sample(num_samples=num_samples)
Exemple #4
0
)


@pytest.mark.parametrize(
    "distr_output",
    [
        BetaOutput(),
        CategoricalOutput(num_cats=3),
        DeterministicOutput(value=42.0),
        DirichletMultinomialOutput(dim=3, n_trials=5),
        DirichletOutput(dim=4),
        EmpiricalDistributionOutput(num_samples=10,
                                    distr_output=GaussianOutput()),
        GammaOutput(),
        GaussianOutput(),
        GenParetoOutput(),
        LaplaceOutput(),
        LogitNormalOutput(),
        LoglogisticOutput(),
        LowrankMultivariateGaussianOutput(dim=5, rank=2),
        MultivariateGaussianOutput(dim=4),
        NegativeBinomialOutput(),
        OneInflatedBetaOutput(),
        PiecewiseLinearOutput(num_pieces=10),
        PoissonOutput(),
        StudentTOutput(),
        UniformOutput(),
        WeibullOutput(),
        ZeroAndOneInflatedBetaOutput(),
        ZeroInflatedBetaOutput(),
        ZeroInflatedNegativeBinomialOutput(),