def generateKSSampleEstimatedParameters(nrepeat,samplesize):
    """
    nrepeat : Number of repetitions, size of the table
    samplesize : the size of each sample to generate from the Uniform distribution
    """
    distfactory = ot.UniformFactory()
    refdist = ot.Uniform(0,1)
    D = ot.Sample(nrepeat,1)
    for i in range(nrepeat):
        sample = refdist.getSample(samplesize)
        trialdist = distfactory.build(sample)
        D[i,0] = computeKSStatistics(sample,trialdist)
    return D
Beispiel #2
0
import openturns as ot
from matplotlib import pyplot as plt
from openturns.viewer import View
ot.RandomGenerator.SetSeed(0)
factory = ot.UniformFactory()
ref = factory.build()
dimension = ref.getDimension()
if dimension <= 2:
    sample = ref.getSample(50)
    distribution = factory.build(sample)
    if dimension == 1:
        distribution.setDescription(['$t$'])
        pdf_graph = distribution.drawPDF(256)
        cloud = ot.Cloud(sample, ot.Sample(sample.getSize(), 1))
        cloud.setColor('blue')
        cloud.setPointStyle('fcircle')
        pdf_graph.add(cloud)
        fig = plt.figure(figsize=(10, 4))
        plt.suptitle(str(distribution))
        pdf_axis = fig.add_subplot(111)
        View(pdf_graph, figure=fig, axes=[pdf_axis], add_legend=False)
    else:
        sample = ref.getSample(500)
        distribution.setDescription(['$t_0$', '$t_1$'])
        pdf_graph = distribution.drawPDF([256]*2)
        cloud = ot.Cloud(sample)
        cloud.setColor('red')
        cloud.setPointStyle('fcircle')
        pdf_graph.add(cloud)
        fig = plt.figure(figsize=(10, 4))
        plt.suptitle(str(distribution))
Beispiel #3
0
for i in range(continuousDistributionNumber):
    continuousSampleCollection[i] = continuousDistributionCollection[
        i].getSample(size)
    continuousSampleCollection[i].setName(
        continuousDistributionCollection[i].getName())
    sampleCollection[i] = continuousSampleCollection[i]
for i in range(discreteDistributionNumber):
    discreteSampleCollection[i] = discreteDistributionCollection[i].getSample(
        size)
    discreteSampleCollection[i].setName(
        discreteDistributionCollection[i].getName())
    sampleCollection[continuousDistributionNumber +
                     i] = discreteSampleCollection[i]

factoryCollection = ot.DistributionFactoryCollection(3)
factoryCollection[0] = ot.UniformFactory()
factoryCollection[1] = ot.BetaFactory()
factoryCollection[2] = ot.NormalFactory()
aSample = ot.Uniform(-1.5, 2.5).getSample(size)
model, best_bic = ot.FittingTest.BestModelBIC(aSample, factoryCollection)
print("best model BIC=", repr(model))
model, best_result = ot.FittingTest.BestModelKolmogorov(
    aSample, factoryCollection)
print("best model Kolmogorov=", repr(model))

# BIC adequation
resultBIC = ot.SquareMatrix(distributionNumber)
for i in range(distributionNumber):
    for j in range(distributionNumber):
        value = ot.FittingTest.BIC(sampleCollection[i],
                                   distributionCollection[j], 0)
Beispiel #4
0
coll.add(rm)
weights.add(-2.5)
coll.add(ot.Gamma(3.0, 4.0, -2.0))
weights.add(2.5)
distribution = ot.RandomMixture(coll, weights)
print("distribution=", repr(distribution))
print("distribution=", distribution)
mu = distribution.getMean()[0]
sigma = distribution.getStandardDeviation()[0]
for i in range(10):
    x = mu + (-3.0 + 6.0 * i / 9.0) * sigma
    print("pdf( %.6f )=%.6f" % (x, distribution.computePDF(x)))

# Tests of the projection mechanism
collFactories = [
    ot.UniformFactory(),
    ot.NormalFactory(),
    ot.TriangularFactory(),
    ot.ExponentialFactory(),
    ot.GammaFactory()
]
#, TrapezoidalFactory()
result, norms = distribution.project(collFactories)
print("projections=", result)
print("norms=", norms)
# ------------------------------ Multivariate tests ------------------------------#
# 2D RandomMixture
collection = [ot.Normal(0.0, 1.0)] * 3

weightMatrix = ot.Matrix(2, 3)
weightMatrix[0, 0] = 1.0
Beispiel #5
0
rm = ot.RandomMixture(coll, weights)
coll.add(rm)
weights.add(-2.5)
coll.add(ot.Gamma(3.0, 4.0, -2.0))
weights.add(2.5)
distribution = ot.RandomMixture(coll, weights)
print("distribution=", repr(distribution))
print("distribution=", distribution)
mu = distribution.getMean()[0]
sigma = distribution.getStandardDeviation()[0]
for i in range(10):
    x = mu + (-3.0 + 6.0 * i / 9.0) * sigma
    print("pdf( %.6f )=%.6f" % (x, distribution.computePDF(x)))

# Tests of the projection mechanism
collFactories = [ot.UniformFactory(), ot.NormalFactory(
), ot.TriangularFactory(), ot.ExponentialFactory(), ot.GammaFactory()]
# , TrapezoidalFactory()
result, norms = distribution.project(collFactories)
print("projections=", result)
print("norms=", norms)
# ------------------------------ Multivariate tests ------------------------------#
# 2D RandomMixture
collection = [ot.Normal(0.0, 1.0)] * 3

weightMatrix = ot.Matrix(2, 3)
weightMatrix[0, 0] = 1.0
weightMatrix[0, 1] = -2.0
weightMatrix[0, 2] = 1.0
weightMatrix[1, 0] = 1.0
weightMatrix[1, 1] = 1.0
Beispiel #6
0
    print("children(", nod, ") : ", ndag.getChildren(nod))

order = ndag.getTopologicalOrder()
marginals = [ot.Uniform(0.0, 1.0) for i in range(order.getSize())]
copulas = list()
for i in range(order.getSize()):
    d = 1 + ndag.getParents(i).getSize()
    print("i=", i, ", d=", d)
    if d == 1:
        copulas.append(ot.IndependentCopula(1))
    else:
        R = ot.CorrelationMatrix(d)
        for i in range(d):
            for j in range(i):
                R[i, j] = 0.5 / d
        copulas.append(ot.Student(5.0, [0.0] * d, [1.0] * d, R).getCopula())

cbn = otagrum.ContinuousBayesianNetwork(ndag, marginals, copulas)
size = 300
sample = cbn.getSample(size)
# ContinuousBayesianNetworkFactory
marginalsFactory = ot.UniformFactory()
copulasFactory = ot.BernsteinCopulaFactory()
threshold = 0.1
maxParents = 5
factory = otagrum.ContinuousBayesianNetworkFactory(marginalsFactory,
                                                   copulasFactory, ndag,
                                                   threshold, maxParents)
cbn = factory.build(sample)
print('cbn=', cbn)
Beispiel #7
0
# %%
dag = learner.learnDAG()

# %%
showDot(dag.toDot())

# %%
# Learning parameters
# Bernstein copulas are used to learn the local conditional copulas associated to each node

# %%
m_list = []
lcc_list = []
for i in range(train.getDimension()):
    m_list.append(ot.UniformFactory().build(train.getMarginal(i)))
    indices = [i] + [int(n) for n in ndag.getParents(i)]
    dim_lcc = len(indices)
    if dim_lcc == 1:
        bernsteinCopula = ot.IndependentCopula(1)
    elif dim_lcc > 1:
        K = otagrum.ContinuousTTest.GetK(len(train), dim_lcc)
        bernsteinCopula = ot.EmpiricalBernsteinCopula(
            train.getMarginal(indices), K, False)
    lcc_list.append(bernsteinCopula)

# %%
# We can now create the learned CBN

# %%
lcbn = otagrum.ContinuousBayesianNetwork(ndag, m_list, lcc_list)  # Learned CBN