def test_genpareto_likelihood(xi: float, beta: float, hybridize: bool) -> None: """ Test to check that maximizing the likelihood recovers the parameters """ # generate samples xis = mx.nd.zeros((NUM_SAMPLES, )) + xi betas = mx.nd.zeros((NUM_SAMPLES, )) + beta distr = GenPareto(xis, betas) samples = distr.sample() init_biases = [ inv_softplus(xi - START_TOL_MULTIPLE * TOL * xi), inv_softplus(beta - START_TOL_MULTIPLE * TOL * beta), ] xi_hat, beta_hat = maximum_likelihood_estimate_sgd( GenParetoOutput(), samples, init_biases=init_biases, hybridize=hybridize, learning_rate=PositiveFloat(0.05), num_epochs=PositiveInt(10), ) print("XI:", xi_hat, "BETA:", beta_hat) assert (np.abs(xi_hat - xi) < TOL * xi), f"alpha did not match: xi = {xi}, xi_hat = {xi_hat}" assert (np.abs(beta_hat - beta) < TOL * beta), f"beta did not match: beta = {beta}, beta_hat = {beta_hat}"
MixtureDistribution( mixture_probs=mx.nd.array([[0.6, 0.4]]), components=[ Gaussian(mu=mx.nd.array([-1.0]), sigma=mx.nd.array([0.2])), Gamma(alpha=mx.nd.array([2.0]), beta=mx.nd.array([0.5])), ], ), MixtureDistributionOutput([GaussianOutput(), GammaOutput()]), 2_000, ), ( MixtureDistribution( mixture_probs=mx.nd.array([[0.7, 0.3]]), components=[ Gaussian(mu=mx.nd.array([-1.0]), sigma=mx.nd.array([0.2])), GenPareto(xi=mx.nd.array([0.6]), beta=mx.nd.array([1.0])), ], ), MixtureDistributionOutput([GaussianOutput(), GenParetoOutput()]), 2_000, ), ], ) @pytest.mark.parametrize("serialize_fn", serialize_fn_list) @pytest.mark.skip("Skip test that takes long time to run") def test_inference_mixture_different_families( mixture_distribution: MixtureDistribution, mixture_distribution_output: MixtureDistributionOutput, epochs: int, serialize_fn, ) -> None: