def test_multivariate_gaussian(hybridize: bool) -> None: num_samples = 2000 dim = 2 mu = np.arange(0, dim) / float(dim) L_diag = np.ones((dim, )) L_low = 0.1 * np.ones((dim, dim)) * np.tri(dim, k=-1) L = np.diag(L_diag) + L_low Sigma = L.dot(L.transpose()) distr = MultivariateGaussian(mu=mx.nd.array(mu), L=mx.nd.array(L)) samples = distr.sample(num_samples) mu_hat, L_hat = maximum_likelihood_estimate_sgd( MultivariateGaussianOutput(dim=dim), samples, init_biases= None, # todo we would need to rework biases a bit to use it in the multivariate case hybridize=hybridize, learning_rate=PositiveFloat(0.01), num_epochs=PositiveInt(10), ) distr = MultivariateGaussian(mu=mx.nd.array([mu_hat]), L=mx.nd.array([L_hat])) Sigma_hat = distr.variance[0].asnumpy() assert np.allclose( mu_hat, mu, atol=0.1, rtol=0.1), f"mu did not match: mu = {mu}, mu_hat = {mu_hat}" assert np.allclose( Sigma_hat, Sigma, atol=0.1, rtol=0.1 ), f"Sigma did not match: sigma = {Sigma}, sigma_hat = {Sigma_hat}"
< 0.05 ) # can only calculated cdf for gaussians currently if isinstance(distr1, Gaussian) and isinstance(distr2, Gaussian): emp_cdf, edges = empirical_cdf(samples_mix.asnumpy()) calc_cdf = mixture.cdf(mx.nd.array(edges)).asnumpy() assert np.allclose(calc_cdf[1:, :], emp_cdf, atol=1e-2) @pytest.mark.parametrize( "distribution_outputs", [ ((GaussianOutput(), GaussianOutput()),), ((GaussianOutput(), StudentTOutput(), LaplaceOutput()),), ((MultivariateGaussianOutput(3), MultivariateGaussianOutput(3)),), ], ) @pytest.mark.parametrize("serialize_fn", serialize_fn_list) def test_mixture_output(distribution_outputs, serialize_fn) -> None: mdo = MixtureDistributionOutput(*distribution_outputs) args_proj = mdo.get_args_proj() args_proj.initialize() input = mx.nd.ones(shape=(512, 30)) distr_args = args_proj(input) d = mdo.distribution(distr_args) d = serialize_fn(d)
LowrankMultivariateGaussianOutput(dim=target_dim, rank=2), 10, estimator, False, False, ), ( LowrankMultivariateGaussianOutput(dim=target_dim, rank=2), 10, estimator, True, False, ), (None, 10, estimator, True, True), ( MultivariateGaussianOutput(dim=target_dim), 10, estimator, False, True, ), ( MultivariateGaussianOutput(dim=target_dim), 10, estimator, True, True, ), ], ) def test_deepvar(
mx.nd.random.gamma(shape=(3, 4, 5, 6)), [None, mx.nd.ones(shape=(3, 4, 5))], [None, mx.nd.ones(shape=(3, 4, 5))], (3, 4, 5), (), ), ( BetaOutput(), mx.nd.random.gamma(shape=(3, 4, 5, 6)), [None, mx.nd.ones(shape=(3, 4, 5))], [None, mx.nd.ones(shape=(3, 4, 5))], (3, 4, 5), (), ), ( MultivariateGaussianOutput(dim=5), mx.nd.random.normal(shape=(3, 4, 10)), [None, mx.nd.ones(shape=(3, 4, 5))], [None, mx.nd.ones(shape=(3, 4, 5))], (3, 4), (5, ), ), ( LowrankMultivariateGaussianOutput(dim=5, rank=4), mx.nd.random.normal(shape=(3, 4, 10)), [None, mx.nd.ones(shape=(3, 4, 5))], [None, mx.nd.ones(shape=(3, 4, 5))], (3, 4), (5, ), ), (
[ BetaOutput(), CategoricalOutput(num_cats=3), DeterministicOutput(value=42.0), DirichletMultinomialOutput(dim=3, n_trials=5), DirichletOutput(dim=4), EmpiricalDistributionOutput(num_samples=10, distr_output=GaussianOutput()), GammaOutput(), GaussianOutput(), GenParetoOutput(), LaplaceOutput(), LogitNormalOutput(), LoglogisticOutput(), LowrankMultivariateGaussianOutput(dim=5, rank=2), MultivariateGaussianOutput(dim=4), NegativeBinomialOutput(), OneInflatedBetaOutput(), PiecewiseLinearOutput(num_pieces=10), PoissonOutput(), StudentTOutput(), UniformOutput(), WeibullOutput(), ZeroAndOneInflatedBetaOutput(), ZeroInflatedBetaOutput(), ZeroInflatedNegativeBinomialOutput(), ZeroInflatedPoissonOutput(), ], ) def test_distribution_output_serde(distr_output: DistributionOutput): distr_output_copy = decode(encode(distr_output))