Exemplo n.º 1
0
def learning(sample, method, parameters):
    if method == "cpc":
        binNumber, alpha = parameters
        learner = otagr.ContinuousPC(sample, binNumber, alpha)

        ndag = learner.learnDAG()

        TTest = otagr.ContinuousTTest(sample, alpha)
        jointDistributions = []
        for i in range(ndag.getSize()):
            d = 1 + ndag.getParents(i).getSize()
            if d == 1:
                bernsteinCopula = ot.Uniform(0.0, 1.0)
            else:
                K = TTest.GetK(len(sample), d)
                indices = [int(n) for n in ndag.getParents(i)]
                indices = [i] + indices
                bernsteinCopula = ot.EmpiricalBernsteinCopula(
                    sample.getMarginal(indices), K, False)
            jointDistributions.append(bernsteinCopula)

        bn = named_dag_to_bn(ndag)

    elif method == "elidan":
        #print(sample.getDescription())
        max_parents, n_restart_hc = parameters
        copula, dag = hc.hill_climbing(sample, max_parents, n_restart_hc)[0:2]
        #bn = dag_to_bn(dag, Tstruct.names())
        bn = dag_to_bn(dag, sample.getDescription())
    else:
        print("Wrong entry for method argument !")

    return bn
def KSB_learning(data):
    # Less naive estimation of the coefficients distribution using
    # univariate kernel smoothing for the marginals and a Bernstein copula
    print("Build KSB coefficients distribution")
    size = data.getSize()
    dimension = data.getDimension()
    t0 = time()
    marginals = [
        ot.HistogramFactory().build(data.getMarginal(i))
        for i in range(dimension)
    ]
    # marginals = [ot.KernelSmoothing().build(data.getMarginal(i)) for i in range(dimension)]
    plot_marginals("KSB_marginals", marginals)
    copula = ot.EmpiricalBernsteinCopula(data, size)
    #copula = ot.BernsteinCopulaFactory().build(data)
    distribution = ot.ComposedDistribution(marginals, copula)
    print("t=", time() - t0, "s")
    return distribution
Exemplo n.º 3
0
import openturns as ot
from matplotlib import pyplot as plt
from openturns.viewer import View
if ot.FrankCopula().__class__.__name__ == 'EmpiricalBernsteinCopula':
    sample = ot.Dirichlet([1.0, 2.0, 3.0]).getSample(100)
    copula = ot.EmpiricalBernsteinCopula(sample, 4)
elif ot.FrankCopula().__class__.__name__ == 'ExtremeValueCopula':
    copula = ot.ExtremeValueCopula(ot.SymbolicFunction("t", "t^3/2-t/2+1"))
elif ot.FrankCopula(
).__class__.__name__ == 'MaximumEntropyOrderStatisticsCopula':
    marginals = [ot.Beta(1.5, 3.2, 0.0, 1.0), ot.Beta(2.0, 4.3, 0.5, 1.2)]
    copula = ot.MaximumEntropyOrderStatisticsCopula(marginals)
elif ot.FrankCopula().__class__.__name__ == 'NormalCopula':
    R = ot.CorrelationMatrix(2)
    R[1, 0] = 0.8
    copula = ot.NormalCopula(R)
elif ot.FrankCopula().__class__.__name__ == 'SklarCopula':
    student = ot.Student(3.0, [1.0] * 2, [3.0] * 2, ot.CorrelationMatrix(2))
    copula = ot.SklarCopula(student)
else:
    copula = ot.FrankCopula()
if copula.getDimension() == 1:
    copula = ot.FrankCopula(2)
copula.setDescription(['$u_1$', '$u_2$'])
pdf_graph = copula.drawPDF()
cdf_graph = copula.drawCDF()
fig = plt.figure(figsize=(10, 4))
pdf_axis = fig.add_subplot(121)
cdf_axis = fig.add_subplot(122)
View(pdf_graph,
     figure=fig,
#! /usr/bin/env python

from __future__ import print_function
import openturns as ot

ot.TESTPREAMBLE()
ot.PlatformInfo.SetNumericalPrecision(5)

# Instanciate one distribution object
dim = 2
copula = ot.EmpiricalBernsteinCopula(ot.Normal(2).getSample(12), 3)
print("Copula ", repr(copula))
print("Copula ", copula)
print("Mean ", repr(copula.getMean()))
print("Covariance ", repr(copula.getCovariance()))

# Is this copula an elliptical distribution?
print("Elliptical distribution= ", copula.isElliptical())

# Is this copula elliptical ?
print("Elliptical copula= ", copula.hasEllipticalCopula())

# Is this copula independent ?
print("Independent copula= ", copula.hasIndependentCopula())

# Test for realization of distribution
oneRealization = copula.getRealization()
print("oneRealization=", repr(oneRealization))

# Test for sampling
size = 10
Exemplo n.º 5
0
    g.add(c)
    g.setBoundingBox(ot.Interval(
        Y.getMin()-0.5*Y.computeRange(), Y.getMax()+0.5*Y.computeRange()))
    return g


# %%
# generate some multivariate data to estimate, with correlation
f = ot.SymbolicFunction(["U", "xi1", "xi2"], [
                        "sin(U)/(1+cos(U)^2)+0.05*xi1", "sin(U)*cos(U)/(1+cos(U)^2)+0.05*xi2"])
U = ot.Uniform(-0.85*m.pi, 0.85*m.pi)
xi = ot.Normal(2)
X = ot.BlockIndependentDistribution([U, xi])
N = 200
Y = f(X.getSample(N))

# %%
# estimation by multivariate kernel smoothing
multi_ks = ot.KernelSmoothing().build(Y)
view = viewer.View(draw(multi_ks, Y))

# %%
# estimation by empirical beta copula
beta_copula = ot.EmpiricalBernsteinCopula(Y, len(Y))
marginals = [ot.KernelSmoothing().build(Y.getMarginal(j))
             for j in range(Y.getDimension())]
beta_dist = ot.ComposedDistribution(marginals, beta_copula)
view = viewer.View(draw(beta_dist, Y))

viewer.View.ShowAll()
Exemplo n.º 6
0
import openturns as ot
from matplotlib import pyplot as plt
from openturns.viewer import View
if ot.EmpiricalBernsteinCopula(
).__class__.__name__ == 'EmpiricalBernsteinCopula':
    sample = ot.Dirichlet([1.0, 2.0, 3.0]).getSample(100)
    copula = ot.EmpiricalBernsteinCopula(sample, 4)
elif ot.EmpiricalBernsteinCopula().__class__.__name__ == 'ExtremeValueCopula':
    copula = ot.ExtremeValueCopula(ot.SymbolicFunction("t", "t^3/2-t/2+1"))
elif ot.EmpiricalBernsteinCopula(
).__class__.__name__ == 'MaximumEntropyOrderStatisticsCopula':
    marginals = [ot.Beta(1.5, 3.2, 0.0, 1.0), ot.Beta(2.0, 4.3, 0.5, 1.2)]
    copula = ot.MaximumEntropyOrderStatisticsCopula(marginals)
elif ot.EmpiricalBernsteinCopula().__class__.__name__ == 'NormalCopula':
    R = ot.CorrelationMatrix(2)
    R[1, 0] = 0.8
    copula = ot.NormalCopula(R)
elif ot.EmpiricalBernsteinCopula().__class__.__name__ == 'SklarCopula':
    student = ot.Student(3.0, [1.0] * 2, [3.0] * 2, ot.CorrelationMatrix(2))
    copula = ot.SklarCopula(student)
else:
    copula = ot.EmpiricalBernsteinCopula()
if copula.getDimension() == 1:
    copula = ot.EmpiricalBernsteinCopula(2)
copula.setDescription(['$u_1$', '$u_2$'])
pdf_graph = copula.drawPDF()
cdf_graph = copula.drawCDF()
fig = plt.figure(figsize=(10, 4))
pdf_axis = fig.add_subplot(121)
cdf_axis = fig.add_subplot(122)
View(pdf_graph,
Exemplo n.º 7
0
import openturns as ot
from matplotlib import pyplot as plt
from openturns.viewer import View

mySample = ot.Dirichlet([1.0, 2.0, 3.0]).getSample(100)
myOrderStatCop = ot.EmpiricalBernsteinCopula(mySample, 4)
myOrderStatCop.setDescription(['$u_1$', '$u_2$'])
graphPDF = myOrderStatCop.drawPDF()
graphCDF = myOrderStatCop.drawCDF()


fig = plt.figure(figsize=(8, 4))
plt.suptitle("EmpiricalBernsteinCopula: pdf and cdf")
pdf_axis = fig.add_subplot(121)
cdf_axis = fig.add_subplot(122)
pdf_axis.set_xlim(auto=True)
cdf_axis.set_xlim(auto=True)

View(graphPDF, figure=fig, axes=[pdf_axis], add_legend=True)
View(graphCDF, figure=fig, axes=[cdf_axis], add_legend=True)
T = []
for d in dimensions:
    print("Dimension: ", d)
    cm = np.reshape([correlation] * d**2, (d, d))
    np.fill_diagonal(cm, 1)
    cm = ot.CorrelationMatrix(cm)
    normal_copula = ot.NormalCopula(cm)
    normal_sample = normal_copula.getSample(1000000)
    t = []
    for s in sizes:
        print("    Size: ", s)
        sample = normal_sample[:s]
        K = otagr.ContinuousTTest_GetK(s, 2)

        start = time.time()
        bc = ot.EmpiricalBernsteinCopula(sample, K, False)
        end = time.time()

        t.append(end - start)
    T.append(t)
T = np.array(T)

for i in range(len(dimensions)):
    plt.plot(sizes, T[i], label=str(dimensions[i]) + 'D')
plt.legend()
plt.show()

for i in range(len(sizes)):
    plt.plot(dimensions, T.T[i], label=(str(sizes[i])))
plt.legend()
plt.show()
Exemplo n.º 9
0
# %%
# Learning parameters
# Bernstein copulas are used to learn the local conditional copulas associated to each node

# %%
m_list = []
lcc_list = []
for i in range(train.getDimension()):
    m_list.append(ot.UniformFactory().build(train.getMarginal(i)))
    indices = [i] + [int(n) for n in ndag.getParents(i)]
    dim_lcc = len(indices)
    if dim_lcc == 1:
        bernsteinCopula = ot.IndependentCopula(1)
    elif dim_lcc > 1:
        K = otagrum.ContinuousTTest.GetK(len(train), dim_lcc)
        bernsteinCopula = ot.EmpiricalBernsteinCopula(
            train.getMarginal(indices), K, False)
    lcc_list.append(bernsteinCopula)

# %%
# We can now create the learned CBN

# %%
lcbn = otagrum.ContinuousBayesianNetwork(ndag, m_list, lcc_list)  # Learned CBN

# %%
# And compare the mean loglikelihood between the true and the learned models


# %%
def compute_mean_LL(cbn, test):
    ll = 0