def test_probability_model_mcmc():
    np.random.seed(100)
    mu, sigma = 10, 1  # true mean and standard deviation
    np.random.seed(1)
    data = np.random.normal(mu, sigma, 100).reshape((-1, 1))

    p0 = Uniform(loc=0., scale=15)
    p1 = Lognormal(s=1., loc=0., scale=1.)
    prior = JointIndependent(marginals=[p0, p1])

    # create an instance of class Model
    candidate_model = DistributionModel(distributions=Normal(loc=None,
                                                             scale=None),
                                        n_parameters=2,
                                        prior=prior)

    sampling = MetropolisHastings(jump=10,
                                  burn_length=10,
                                  seed=[1.0, 0.2],
                                  random_state=1)
    bayes_estimator = BayesParameterEstimation(sampling_class=sampling,
                                               inference_model=candidate_model,
                                               data=data,
                                               nsamples=5)
    s = bayes_estimator.sampler.samples

    assert s[0, 1] == 3.5196936384257835
    assert s[1, 0] == 11.143811671048994
    assert s[2, 0] == 10.162512455643435
    assert s[3, 1] == 0.8541521389437781
    assert s[4, 1] == 1.0095454025762525
def test_probability_model_importance_sampling():
    # Generate data from a probability model, here a Gaussian pdf, then learn its parameters,
    # mean and covariance, from this data

    np.random.seed(100)
    mu, sigma = 10, 1  # true mean and standard deviation
    np.random.seed(1)
    data = np.random.normal(mu, sigma, 100).reshape((-1, 1))

    p0 = Uniform(loc=0., scale=15)
    p1 = Lognormal(s=1., loc=0., scale=1.)
    prior = JointIndependent(marginals=[p0, p1])

    # create an instance of class Model
    candidate_model = DistributionModel(distributions=Normal(loc=None,
                                                             scale=None),
                                        n_parameters=2,
                                        prior=prior)

    sampling = ImportanceSampling(random_state=1)

    bayes_estimator = BayesParameterEstimation(sampling_class=sampling,
                                               inference_model=candidate_model,
                                               data=data,
                                               nsamples=10000)
    bayes_estimator.sampler.resample()
    s_posterior = bayes_estimator.sampler.unweighted_samples

    assert s_posterior[0, 1] == 0.8616126410951304
    assert s_posterior[9999, 0] == 10.02449120238032
Ejemplo n.º 3
0
def test_bic():
    data = Gamma(a=2, loc=0, scale=2).rvs(nsamples=500, random_state=12)
    m0 = DistributionModel(distributions=Gamma(a=None, loc=None, scale=None), n_parameters=3, name='gamma')
    m1 = DistributionModel(distributions=Exponential(loc=None, scale=None), n_parameters=2, name='exponential')
    m2 = DistributionModel(distributions=ChiSquare(df=None, loc=None, scale=None),
                           n_parameters=3, name='chi-square')

    candidate_models = [m0, m1, m2]
    mle1 = MLE(inference_model=m0, random_state=0, data=data)
    mle2 = MLE(inference_model=m1, random_state=0, data=data)
    mle3 = MLE(inference_model=m2, random_state=0, data=data)
    selector = InformationModelSelection(parameter_estimators=[mle1, mle2, mle3], criterion=BIC(),
                                         n_optimizations=[5]*3)
    selector.sort_models()
    assert 0.5000000575021204 == selector.probabilities[0]
    assert 0.4999999424978796 == selector.probabilities[1]
    assert 3.939737591540338e-18 == selector.probabilities[2]
Ejemplo n.º 4
0
def test_aic():
    data = Gamma(a=2, loc=0, scale=2).rvs(nsamples=500, random_state=12)

    m0 = DistributionModel(distributions=Gamma(a=None, loc=None, scale=None), n_parameters=3, name='gamma')
    m1 = DistributionModel(distributions=Exponential(loc=None, scale=None), n_parameters=2, name='exponential')
    m2 = DistributionModel(distributions=ChiSquare(df=None, loc=None, scale=None),
                           n_parameters=3, name='chi-square')

    candidate_models = [m0, m1, m2]
    mle1 = MLE(inference_model=m0, random_state=0, data=data)
    mle2 = MLE(inference_model=m1, random_state=0, data=data)
    mle3 = MLE(inference_model=m2, random_state=0, data=data)
    selector = InformationModelSelection(parameter_estimators=[mle1, mle2, mle3], criterion=AIC(),
                                         n_optimizations=[5]*3)
    selector.sort_models()
    assert 2285.9685816790425 == selector.criterion_values[0]
    assert 2285.9685821390594 == selector.criterion_values[1]
    assert 2368.9477307542193 == selector.criterion_values[2]
Ejemplo n.º 5
0
def test_simple_probability_model():
    np.random.seed(1)
    mu, sigma = 0, 0.1  # true mean and standard deviation
    data_1 = np.random.normal(mu, sigma, 1000).reshape((-1, 1))
    # set parameters to be learnt as None
    dist = Normal(loc=None, scale=None)
    candidate_model = DistributionModel(distributions=dist, n_parameters=2)

    ml_estimator = MLE(inference_model=candidate_model,
                       data=data_1,
                       n_optimizations=3,
                       random_state=1)

    assert ml_estimator.mle[0] == 0.003881247615960185
    assert ml_estimator.mle[1] == 0.09810041339322118
Ejemplo n.º 6
0
import numpy as np
from UQpy.inference import *
from UQpy.distributions import *
from UQpy.inference.inference_models.DistributionModel import DistributionModel

# data used throughout
from UQpy.inference.information_criteria import BIC, AICc
from UQpy.sampling.mcmc import MetropolisHastings

data = [0., 1., -1.5, -0.2]
# first candidate model, 1-dimensional
prior = Lognormal(s=1., loc=0., scale=1.)
dist = Normal(loc=0., scale=None)
candidate_model = DistributionModel(n_parameters=1,
                                    distributions=dist,
                                    prior=prior)
candidate_model_no_prior = DistributionModel(n_parameters=1,
                                             distributions=dist)
# second candidate model, 2-dimensional
prior2 = JointIndependent(
    [Uniform(loc=0., scale=0.5),
     Lognormal(s=1., loc=0., scale=1.)])
dist2 = Uniform(loc=None, scale=None)
candidate_model2 = DistributionModel(n_parameters=2,
                                     distributions=dist2,
                                     prior=prior2)


def test_mle():
    ml_estimator = MLE(inference_model=candidate_model,
                       data=data,