Exemple #1
0
def test_multivariate_aggregation():
    """Check different aggregation functions for multivariate aggregation, as well as a copula."""

    # For sum
    marginal_pdfs = [
        [1 / 3, 1 / 3, 1 / 3],
        [1 / 4, 1 / 4, 2 / 4],
        [1 / 6, 1 / 6, 2 / 3],
    ]
    marginal_cdfs = np.cumsum(marginal_pdfs, axis=1)
    cdf_p, cdf_v = multivariate_marginal_to_univariate_joint_cdf(marginal_cdfs,
                                                                 a=10,
                                                                 b=100)
    assert all(np.diff(cdf_v) >= 0) and all(
        np.diff(cdf_p) >= 0)  # Check for non-decreasing cdf
    assert cdf_v[
        0] == 30 and cdf_v[-1] == 300  # Check range of aggregated outcomes
    assert (cdf_p[0] == 1 / 3 * 1 / 4 * 1 / 6
            and cdf_p[-1] == 1)  # Check range of cumulative probabilities

    # For mean
    cdf_p, cdf_v = multivariate_marginal_to_univariate_joint_cdf(
        marginal_cdfs, agg_function=np.mean, a=10, b=100)
    assert all(np.diff(cdf_v) >= 0) and all(
        np.diff(cdf_p) >= 0)  # Check for non-decreasing cdf
    assert cdf_v[
        0] == 10 and cdf_v[-1] == 100  # Check range of aggregated outcomes
    assert (cdf_p[0] == 1 / 3 * 1 / 4 * 1 / 6
            and cdf_p[-1] == 1)  # Check range of cumulative probabilities

    # For a normal copula with a correlation matrix with positive correlation between 1st and 2nd variable
    R = ot.CorrelationMatrix(len(marginal_cdfs))
    R[0, 1] = 0.25
    cdf_p, cdf_v = multivariate_marginal_to_univariate_joint_cdf(
        marginal_cdfs, copula=ot.NormalCopula(R), a=10, b=100)
    assert all(np.diff(cdf_v) >= 0) and all(
        np.diff(cdf_p) >= 0)  # Check for non-decreasing cdf
    assert cdf_v[
        0] == 30 and cdf_v[-1] == 300  # Check range of aggregated outcomes
    assert (cdf_p[0] > 1 / 3 * 1 / 4 * 1 / 6
            and cdf_p[-1] == 1)  # Check range of cumulative probabilities

    # For a normal copula with a correlation matrix with negative correlation between 1st and 2nd variable
    R = ot.CorrelationMatrix(len(marginal_cdfs))
    R[0, 1] = -0.25
    cdf_p, cdf_v = multivariate_marginal_to_univariate_joint_cdf(
        marginal_cdfs, copula=ot.NormalCopula(R), a=10, b=100)
    assert all(np.diff(cdf_v) >= 0) and all(
        np.diff(cdf_p) >= 0)  # Check for non-decreasing cdf
    assert cdf_v[
        0] == 30 and cdf_v[-1] == 300  # Check range of aggregated outcomes
    assert (cdf_p[0] < 1 / 3 * 1 / 4 * 1 / 6
            and cdf_p[-1] == 1)  # Check range of cumulative probabilities
Exemple #2
0
def generateSampleWithConditionalIndependance3(size=1000):
    dim = 5
    # 0T4 | 1,2,3 <=> r34 = (-r01*r13*r23*r24+r01*r14*r23^2+r02*r13^2*r24-r02*r13*r14*r23-r03*r12*r13*r24-r03*r12*r14
    # *r23+2*r04*r12*r13*r23+r01*r12*r24+r02*r12*r14+r03*r13*r14+r03*r23*r24-r04*r12^2-r04*r13^2-r04*r23^2-r01*r14-r02
    # *r24+r04)/(r01*r12*r23+r02*r12*r13-r03*r12^2-r01*r13-r02*r23+r03)
    R = ot.CorrelationMatrix(dim)
    R[0, 1] = 0.9
    R[0, 2] = 0.9
    R[0, 3] = 0.9
    R[0, 4] = 0.9
    R[1, 2] = 0.9
    R[1, 3] = 0.9
    R[1, 4] = 0.9
    R[2, 3] = 0.9
    R[2, 4] = 0.95
    R[3,
      4] = (-R[0, 1] * R[1, 3] * R[2, 3] * R[2, 4] +
            R[0, 1] * R[1, 4] * R[2, 3]**2 + R[0, 2] * R[1, 3]**2 * R[2, 4] -
            R[0, 2] * R[1, 3] * R[1, 4] * R[2, 3] - R[0, 3] * R[1, 2] *
            R[1, 3] * R[2, 4] - R[0, 3] * R[1, 2] * R[1, 4] * R[2, 3] +
            2 * R[0, 4] * R[1, 2] * R[1, 3] * R[2, 3] +
            R[0, 1] * R[1, 2] * R[2, 4] + R[0, 2] * R[1, 2] * R[1, 4] +
            R[0, 3] * R[1, 3] * R[1, 4] + R[0, 3] * R[2, 3] * R[2, 4] -
            R[0, 4] * R[1, 2]**2 - R[0, 4] * R[1, 3]**2 -
            R[0, 4] * R[2, 3]**2 - R[0, 1] * R[1, 4] - R[0, 2] * R[2, 4] +
            R[0, 4]) / (R[0, 1] * R[1, 2] * R[2, 3] +
                        R[0, 2] * R[1, 2] * R[1, 3] - R[0, 3] * R[1, 2]**2 -
                        R[0, 1] * R[1, 3] - R[0, 2] * R[2, 3] + R[0, 3])
    copula = ot.NormalCopula(R)
    copula.setDescription(["X" + str(i) for i in range(dim)])
    return copula.getSample(
        size)  # .exportToCSVFile("conditional_independence_04_123.csv")
Exemple #3
0
def test_multivariate_aggregation_with_unmatched_bins_and_dependence(
        multivariate_test_cdfs):
    """Check multivariate aggregation where the outcomes of each variable are completely different,
    and the variables are correlated."""

    marginal_cdf_p, marginal_cdf_v = multivariate_test_cdfs
    dim = len(marginal_cdf_p)

    # Make a correlation matrix with positive correlation between each pair of adjacent variables (needs at least 2D)
    R = ot.CorrelationMatrix(dim)
    for d in range(1, dim):
        R[d - 1, d] = 0.25
    cdf_p, cdf_v = multivariate_marginal_to_univariate_joint_cdf(
        marginal_cdf_p,
        marginal_cdfs_v=marginal_cdf_v,
        copula=ot.NormalCopula(R))
    assert all(np.diff(cdf_v) >= 0) and all(
        np.diff(cdf_p) >= 0)  # Check for non-decreasing cdf
    assert (cdf_v[0] >= 10 * dim
            and cdf_v[-1] <= 100 * dim)  # Check range of aggregated outcomes
    assert cdf_p[0] >= 0 and (cdf_p[-1] < 1 or cdf_p[-1] == approx(1)
                              )  # Check range of cumulative probabilities

    cdf_p_2, cdf_v_2 = multivariate_marginal_to_univariate_joint_cdf(
        marginal_cdf_p,
        marginal_cdfs_v=marginal_cdf_v,
        copula=ot.NormalCopula(R),
        n_draws=1000,
    )
    assert len(cdf_p_2) == 1000
def generateDataForSpecificInstance(size):
  R = ot.CorrelationMatrix(3)
  R[0, 1] = 0.5
  R[0, 2] = 0.45
  collection = [ot.FrankCopula(3.0), ot.NormalCopula(R), ot.ClaytonCopula(2.0)]
  copula = ot.ComposedCopula(collection)
  return copula.getSample(size)
def define_distribution():
    """
    Define the distribution of the training example (beam).
    Return a ComposedDistribution object from openTURNS
    """
    sample_E = ot.Sample.ImportFromCSVFile("sample_E.csv")
    kernel_smoothing = ot.KernelSmoothing(ot.Normal())
    bandwidth = kernel_smoothing.computeSilvermanBandwidth(sample_E)
    E = kernel_smoothing.build(sample_E, bandwidth)
    E.setDescription(['Young modulus'])

    F = ot.LogNormal()
    F.setParameter(ot.LogNormalMuSigma()([30000, 9000, 15000]))
    F.setDescription(['Load'])

    L = ot.Uniform(250, 260)
    L.setDescription(['Length'])

    I = ot.Beta(2.5, 4, 310, 450)
    I.setDescription(['Inertia'])

    marginal_distributions = [F, E, L, I]
    SR_cor = ot.CorrelationMatrix(len(marginal_distributions))
    SR_cor[2, 3] = -0.2
    copula = ot.NormalCopula(ot.NormalCopula.GetCorrelationFromSpearmanCorrelation(SR_cor))

    return(ot.ComposedDistribution(marginal_distributions, copula))
Exemple #6
0
def test_marginal_distributions_with_residual_probability():
    """Aggregate three time slots with CDFs that are not fully specified.
    That means each has a residual probability that their outcome is higher than the highest value given.
    """

    # Make sure incomplete cdf functions can still be transformed (a higher outcome with cp=1 can be assumed to exist)
    marginal_cdfs = [
        [1 / 3, 2 / 4, 3 / 4],
        [1 / 4, 4 / 6, 5 / 6],
        [1 / 6, 5 / 9, 8 / 9],
    ]
    cdf_p, _ = multivariate_marginal_to_univariate_joint_cdf(marginal_cdfs)
    assert all(np.diff(cdf_p) >= 0)  # Check for non-decreasing cdf
    assert (
        cdf_p[-1] < 1
    )  # Check that the assumed outcome with cp=1 is not actually returned
    a = (
        cdf_p[-1] - cdf_p[-2]
    )  # The probability of the highest outcome for each of the three variables
    b = 1 / 4 * 1 / 6 * 3 / 9  # The probability for independent random variables
    assert a == approx(b)  # Check the expected outcome
    # Make a correlation matrix with negative correlation between the first and second variable
    R = ot.CorrelationMatrix(len(marginal_cdfs))
    R[0, 1] = -0.25
    cdf_p, _ = multivariate_marginal_to_univariate_joint_cdf(
        marginal_cdfs, copula=ot.NormalCopula(R))
    assert all(np.diff(cdf_p) >= 0)  # Check for non-decreasing cdf
    a = (
        cdf_p[-1] - cdf_p[-2]
    )  # The probability of the highest outcome for each of the three variables
    assert (
        a < b
    )  # Check the expected outcome is now lower (if x1 is high, than x2 is less likely to be high)
    # Make a correlation matrix with positive correlation between the first and second variable
    R = ot.CorrelationMatrix(len(marginal_cdfs))
    R[0, 1] = 0.25
    cdf_p, _ = multivariate_marginal_to_univariate_joint_cdf(
        marginal_cdfs, copula=ot.NormalCopula(R))
    assert all(np.diff(cdf_p) >= 0)  # Check for non-decreasing cdf
    a = (
        cdf_p[-1] - cdf_p[-2]
    )  # The probability of the highest outcome for each of the three variables
    assert (
        a > b
    )  # Check the expected outcome is now higher (if x1 is high, than x2 is likely to be high, too)
def generate_gaussian_copulas(ndag, r=0.8):
    lcc = []
    for k in range(ndag.getSize()):
        d = 1 + ndag.getParents(k).getSize()
        R = ot.CorrelationMatrix(d)
        for i in range(d):                                                            
            for j in range(i):                                                        
                R[i, j] = r                                                           
        lcc.append(ot.Normal([0.0]*d, [1.0]*d, R).getCopula()) 
    return lcc
Exemple #8
0
def generateSampleWithConditionalIndependance1(size=1000):
    dim = 3
    # 0T2 | 1 <=> r12 = r02/r01
    R = ot.CorrelationMatrix(dim)
    R[0, 1] = 0.95
    R[0, 2] = 0.9
    R[1, 2] = R[0, 2] / R[0, 1]
    copula = ot.NormalCopula(R)
    copula.setDescription(["X" + str(i) for i in range(dim)])
    return copula.getSample(
        size)  # ".exportToCSVFile("conditional_independence_02_1.csv")
def generate_gaussian_data(ndag, size, r=0.8):
    order = ndag.getTopologicalOrder()
    copulas = []
    for k in range(order.getSize()):
        d = 1 + ndag.getParents(k).getSize()
        R = ot.CorrelationMatrix(d)
        for i in range(d):
            for j in range(i):
                R[i, j] = r
        copulas.append(ot.NormalCopula(R))
    cbn = otagr.ContinuousBayesianNetwork(ndag, [ot.Uniform(0., 1.)]*ndag.getSize(), copulas)
    sample = cbn.getSample(size)
    return sample
Exemple #10
0
def generate_student_data(ndag, size, r=0.8):
    order = ndag.getTopologicalOrder()
    jointDistributions = []
    for k in range(order.getSize()):
        d = 1 + ndag.getParents(k).getSize()
        R = ot.CorrelationMatrix(d)
        for i in range(d):
            for j in range(i):
                R[i, j] = r
        jointDistributions.append(
            ot.Student(5.0, [0.0] * d, [1.0] * d, R).getCopula())
    copula = otagr.ContinuousBayesianNetwork(ndag, jointDistributions)
    sample = copula.getSample(size)
    return sample
Exemple #11
0
    def _getRho(self):

        if self._alpha is None:
            self.getAlpha()

        dim = len(self._alpha)
        self._rho = ot.CorrelationMatrix(dim)

        for i in range(dim):
            for j in range(dim):
                if i < j:
                    self._rho[i, j] = np.dot(self._alpha[i], self._alpha[j])

        return self._rho
Exemple #12
0
def generateSampleWithConditionalIndependance2(size=1000):
    dim = 4
    # 0T3 | 1,2 <=> r23 = (r03*(r12^2-1)+r13*(r01-r02*r12)) / (r01*r12-r02)
    R = ot.CorrelationMatrix(dim)
    R[0, 1] = 0.9
    R[0, 2] = 0.9
    R[0, 3] = 0.9
    R[1, 2] = 0.9
    R[1, 3] = 0.95
    R[2, 3] = (R[0, 3] * (R[1, 2]**2 - 1.0) + R[1, 3] *
               (R[0, 1] - R[0, 2] * R[1, 2])) / (R[0, 1] * R[1, 2] - R[0, 2])
    copula = ot.NormalCopula(R)
    copula.setDescription(["X" + str(i) for i in range(dim)])
    return copula.getSample(
        size)  # .exportToCSVFile("conditional_independence_03_12.csv")
 def test_CrossCutDistribution2(self):
     # Create a Funky distribution
     corr = ot.CorrelationMatrix(2)
     corr[0, 1] = 0.2
     copula = ot.NormalCopula(corr)
     x1 = ot.Normal(-1.0, 1.0)
     x2 = ot.Normal(2.0, 1.0)
     x_funk = ot.ComposedDistribution([x1, x2], copula)
     # Create a Punk distribution
     x1 = ot.Normal(1.0, 1.0)
     x2 = ot.Normal(-2.0, 1.0)
     x_punk = ot.ComposedDistribution([x1, x2], copula)
     distribution = ot.Mixture([x_funk, x_punk], [0.5, 1.0])
     referencePoint = distribution.getMean()
     crossCut = otbenchmark.CrossCutDistribution(distribution)
     # Avoid failing on CircleCi
     # _tkinter.TclError: no display name and no $DISPLAY environment variable
     try:
         _ = crossCut.drawMarginalPDF()
         _ = crossCut.drawConditionalPDF(referencePoint)
     except Exception as e:
         print(e)
    def __init__(self):
        self.dim = 4  # number of inputs
        # Young's modulus E
        self.E = ot.Beta(0.9, 3.5, 65.0e9, 75.0e9)  # in N/m^2
        self.E.setDescription("E")
        self.E.setName("Young modulus")

        # Load F
        self.F = ot.LogNormal()  # in N
        self.F.setParameter(ot.LogNormalMuSigma()([300.0, 30.0, 0.0]))
        self.F.setDescription("F")
        self.F.setName("Load")

        # Length L
        self.L = ot.Uniform(2.5, 2.6)  # in m
        self.L.setDescription("L")
        self.L.setName("Length")

        # Moment of inertia I
        self.I = ot.Beta(2.5, 4.0, 1.3e-7, 1.7e-7)  # in m^4
        self.I.setDescription("I")
        self.I.setName("Inertia")

        # physical model
        self.model = ot.SymbolicFunction(['E', 'F', 'L', 'I'],
                                         ['F*L^3/(3*E*I)'])

        # correlation matrix
        self.R = ot.CorrelationMatrix(self.dim)
        self.R[2, 3] = -0.2
        self.copula = ot.NormalCopula(
            ot.NormalCopula.GetCorrelationFromSpearmanCorrelation(self.R))
        self.distribution = ot.ComposedDistribution(
            [self.E, self.F, self.L, self.I], self.copula)

        # special case of an independent copula
        self.independentDistribution = ot.ComposedDistribution(
            [self.E, self.F, self.L, self.I])
Exemple #15
0
def hill_climbing(D, max_parents=4, restart=1):
    N = D.getDimension()
    # Compute the estimate of the gaussian copula
    kendall_tau = D.computeKendallTau()
    #print(kendall_tau)
    pearson_r = ot.CorrelationMatrix(np.sin((np.pi / 2) * kendall_tau))

    # Create the gaussian copula with parameters pearson_r
    # if pearson_r isn't PSD, a regularization is done
    eps = 1e-6
    done = False
    while not done:
        try:
            gaussian_copula = ot.NormalCopula(pearson_r)
            done = True
        except:
            print("Regularization")
            for i in range(pearson_r.getDimension()):
                for j in range(i):
                    pearson_r[i, j] /= 1 + eps

    # Initialization
    G = du.create_empty_dag(N)
    score = sc.bic_score(D, gaussian_copula, G)

    best_graph = G
    best_score = score

    for r in range(restart):
        if r != 0:
            G = du.create_random_dag(N, max_parents)
        G, score = one_hill_climbing(D, gaussian_copula, G, max_parents)
        if score > best_score:
            best_graph = G
            best_score = score

    return gaussian_copula, best_graph, best_score
"""

# %%
# In this example we are going to draw clouds of points from a data sample.

# %%
from __future__ import print_function
import openturns as ot
import openturns.viewer as viewer
from matplotlib import pylab as plt
ot.Log.Show(ot.Log.NONE)

# %%
# Create 2-d samples to visualize
N = 500
R = ot.CorrelationMatrix(2)
R[0, 1] = -0.7
# 2d N(1,1) with correlation
sample1 = ot.Normal([1.0] * 2, [1.0] * 2, R).getSample(N)
sample2 = ot.Normal(2).getSample(N)  # 2d N(0,1) independent

# %%
# Create cloud drawables
cloud1 = ot.Cloud(sample1, 'blue', 'fsquare', 'First Cloud')
cloud2 = ot.Cloud(sample2, 'red', 'fsquare', 'Second Cloud')

# Then, assemble it into a graph
myGraph2d = ot.Graph('2d clouds', 'x1', 'x2', True, 'topright')
myGraph2d.add(cloud1)
myGraph2d.add(cloud2)
view = viewer.View(myGraph2d)
Exemple #17
0
# %%
# First, we define a regular 2-d mesh
discretization = [10, 5]
mesher = ot.IntervalMesher(discretization)
lowerBound = [0.0, 0.0]
upperBound = [2.0, 1.0]
interval = ot.Interval(lowerBound, upperBound)
mesh = mesher.build(interval)
graph = mesh.draw()
graph.setTitle('Regular 2-d mesh')
view = viewer.View(graph)

# %%
# We now create a field from a mesh and some values
values = ot.Normal([0.0] * 2, [1.0] * 2,
                   ot.CorrelationMatrix(2)).getSample(len(mesh.getVertices()))
for i in range(len(values)):
    x = values[i]
    values[i] = 0.05 * x / x.norm()
field = ot.Field(mesh, values)

# %%
# We can export the `field` to a VTK files. It can be
# read later with an external program such as Paraview.
field.exportToVTKFile('field.vtk')

# %%
# Display figures
plt.show()
for i in range(obsSize):
    for j in range(chainDim):
        p[i, j] = (-2 + 5. * i / 9.)**j
print('p=', p)

fullModel = ot.SymbolicFunction(['p1', 'p2', 'p3', 'x1', 'x2', 'x3'],
                                ['p1*x1+p2*x2+p3*x3', '1.0'])
linkFunction = ot.ParametricFunction(fullModel, range(chainDim),
                                     [0.0] * chainDim)

# instrumental distribution
instrumental = ot.Uniform(-1., 1.)

# prior distribution
sigma0 = [10.0] * chainDim
Q0 = ot.CorrelationMatrix(chainDim)  # precision matrix
Q0_inv = ot.CorrelationMatrix(chainDim)  # variance matrix
for i in range(chainDim):
    Q0_inv[i, i] = sigma0[i] * sigma0[i]
    Q0[i, i] = 1.0 / Q0_inv[i, i]
print('Q0=', Q0)

mu0 = [0.0] * chainDim
prior = ot.Normal(mu0, Q0_inv)  # x0 ~ N(mu0, sigma0)
print('x~', prior)

# start from te mean x0=(0.,0.,0.)
print('x0=', mu0)

# conditional distribution y~N(z, 1.0)
conditional = ot.Normal()
#marginals piping
marginals_p =[distribution_D, distribution_D_cover,
              distribution_d70, distribution_h_exit,
              distribution_k,
              distribution_L, distribution_m_p]
distribution_p = ot.ComposedDistribution(marginals_p)

#create marginal distribution
marginals = [distribution_D, distribution_D_cover,
             distribution_d70, distribution_h_exit,
             distribution_i_ch, distribution_k,
             distribution_L, distribution_m_p, 
             distribution_m_u]

#create copula uplift
RS_u = ot.CorrelationMatrix(len(marginals_u))
R_u = ot.NormalCopula.GetCorrelationFromSpearmanCorrelation(RS_u)
copula_u = ot.NormalCopula(R_u)

#create copula heave
RS_h = ot.CorrelationMatrix(len(marginals_h))
R_h = ot.NormalCopula.GetCorrelationFromSpearmanCorrelation(RS_h)
copula_h = ot.NormalCopula(R_h)

#create copula piping
RS_p = ot.CorrelationMatrix(len(marginals_p))
R_p = ot.NormalCopula.GetCorrelationFromSpearmanCorrelation(RS_p)
copula_p = ot.NormalCopula(R_p)

#create copula
RS = ot.CorrelationMatrix(len(marginals))
Exemple #20
0
#! /usr/bin/env python

import openturns as ot

ot.TESTPREAMBLE()

# Instantiate one distribution object
dimension = 3
meanPoint = ot.Point(dimension, 1.0)
meanPoint[0] = 0.5
meanPoint[1] = -0.5
sigma = ot.Point(dimension, 1.0)
sigma[0] = 2.0
sigma[1] = 3.0
R = ot.CorrelationMatrix(dimension)
for i in range(1, dimension):
    R[i, i - 1] = 0.5

# Create a collection of distribution
aCollection = ot.DistributionCollection()

aCollection.add(ot.Normal(meanPoint, sigma, R))
meanPoint += ot.Point(meanPoint.getDimension(), 1.0)
aCollection.add(ot.Normal(meanPoint, sigma, R))
meanPoint += ot.Point(meanPoint.getDimension(), 1.0)
aCollection.add(ot.Normal(meanPoint, sigma, R))

# Instantiate one distribution object
distribution = ot.Mixture(aCollection, ot.Point(aCollection.getSize(), 2.0))
print("Distribution ", repr(distribution))
print("Weights = ", repr(distribution.getWeights()))
Exemple #21
0
# **We assume that the random variables E, F, L and I are dependent and
# associated with a gaussian copula which correlation matrix:**
#
# .. math::
#    \begin{pmatrix}
#    1 & 0 & 0 & 0 \\
#    0 & 1 & 0 & 0 \\
#    0 & 0 & 1 & -0.2 \\
#    0 & 0 & -0.2 & 1 \\
#    \end{pmatrix}

# %%
# We implement this correlation:

# Create the Spearman correlation matrix of the input random vector
RS = ot.CorrelationMatrix(4)
RS[2, 3] = -0.2

# Evaluate the correlation matrix of the Normal copula from RS
R = ot.NormalCopula.GetCorrelationFromSpearmanCorrelation(RS)

# Create the Normal copula parametrized by R
mycopula = ot.NormalCopula(R)

# %%
# And we endly create the composed input probability distribution.
inputDistribution = ot.ComposedDistribution([E, F, L, I], mycopula)
inputDistribution.setDescription(("E", "F", "L", "I"))

# %%
# Create the event whose probability we want to estimate:
import openturns as ot
from matplotlib import pyplot as plt
from openturns.viewer import View
if (ot.ClaytonCopula().__class__.__name__ == 'SklarCopula'):
    myStudent = ot.Student(3.0, [1.0] * 2, [3.0] * 2, ot.CorrelationMatrix(2))
    copula = ot.SklarCopula(myStudent)
else:
    copula = ot.ClaytonCopula()
if copula.getDimension() == 1:
    copula = ot.ClaytonCopula(2)
copula.setDescription(['$u_1$', '$u_2$'])
pdf_graph = copula.drawPDF()
cdf_graph = copula.drawCDF()
fig = plt.figure(figsize=(10, 4))
plt.suptitle(str(copula))
pdf_axis = fig.add_subplot(121)
cdf_axis = fig.add_subplot(122)
View(pdf_graph,
     figure=fig,
     axes=[pdf_axis],
     add_legend=False,
     square_axes=True)
View(cdf_graph,
     figure=fig,
     axes=[cdf_axis],
     add_legend=False,
     square_axes=True)
Exemple #23
0
test_model(myModel)

myDefautModel = ot.FractionalBrownianMotionModel(2.0, 3.0, 0.25)
print('myDefautModel = ', myDefautModel)
test_model(myDefautModel)

myModel = ot.SphericalModel([2.0] * inputDimension, [3.0], 4.5)
test_model(myModel)

myDefautModel = ot.DiracCovarianceModel()
print('myDefautModel = ', myDefautModel)
test_model(myDefautModel)

amplitude = [1.5 + 2.0 * k for k in range(2)]
dimension = 2
spatialCorrelation = ot.CorrelationMatrix(dimension)
for j in range(dimension):
    for i in range(j + 1, dimension):
        spatialCorrelation[i,
                           j] = (i + 1.0) / dimension - (j + 1.0) / dimension
myModel = ot.DiracCovarianceModel(inputDimension, amplitude,
                                  spatialCorrelation)
test_model(myModel, x1=[0.5, 0.0], x2=[0.5, 0.0])

myDefautModel = ot.ProductCovarianceModel()
print('myDefautModel = ', myDefautModel)
test_model(myDefautModel)

cov1 = ot.AbsoluteExponential([2.0], [3.0])
cov2 = ot.SquaredExponential([2.0], [3.0])
myModel = ot.ProductCovarianceModel([cov1, cov2])
#! /usr/bin/env python

import openturns as ot

ot.TESTPREAMBLE()

marginals = [ot.Normal(), ot.Uniform()]
R = ot.CorrelationMatrix(2, [1.0, 0.5, 0.5, 1.0])
copula = ot.NormalCopula(R)
factories = [
    ot.SoizeGhanemFactory(ot.ComposedDistribution(marginals)),
    ot.SoizeGhanemFactory(ot.ComposedDistribution(marginals, copula), False),
    ot.SoizeGhanemFactory(ot.ComposedDistribution(marginals, copula), True)
]
x = [0.5] * 2
kMax = 5
ot.ResourceMap.SetAsUnsignedInteger("IteratedQuadrature-MaximumSubIntervals",
                                    2048)
ot.ResourceMap.SetAsScalar("IteratedQuadrature-MaximumError", 1.0e-6)
for soize in factories:
    distribution = soize.getMeasure()
    print('SoizeGhanem=', soize)
    functions = list()
    for k in range(kMax):
        functions.append(soize.build(k))
        print('SoizeGhanem(', k, ')=', functions[k].getEvaluation())
        print('SoizeGhanem(', k, ')(', x, '=', functions[k](x))
    M = ot.SymmetricMatrix(kMax)
    for m in range(kMax):
        for n in range(m + 1):
# %%
from __future__ import print_function
import openturns as ot
import openturns.viewer as viewer
from matplotlib import pylab as plt
ot.Log.Show(ot.Log.NONE)

# %%
# Create data to visualize

# Create the model Y = x1^2 + x2
model = ot.SymbolicFunction(["x1", "x2"], ["x1^2+x2"])

# Create the input distribution and random vector X
myCorMat = ot.CorrelationMatrix(2)
myCorMat[0, 1] = -0.6
inputDist = ot.Normal([0., 0.], myCorMat)
inputDist.setDescription(['X1', 'X2'])

inputVector = ot.RandomVector(inputDist)

# Create the output random vector Y=model(X)
output = ot.CompositeRandomVector(model, inputVector)

# Generate the input sample
N = 500
X = inputVector.getSample(N)

# Evaluate the associated output sample
Y = model(X)
Exemple #26
0

def cleanPoint(inPoint):
    dim = inPoint.getDimension()
    for i in range(dim):
        if (fabs(inPoint[i]) < 1.e-10):
            inPoint[i] = 0.0
    return inPoint


ot.PlatformInfo.SetNumericalPrecision(5)
# Instanciate one distribution object
for dim in range(1, 5):
    meanPoint = [0.0] * dim
    sigma = [1.0 + i for i in range(dim)]
    R = ot.CorrelationMatrix(dim)
    for i in range(1, dim):
        R[i, i - 1] = 0.5

    distribution = ot.Normal(meanPoint, sigma, R)

    distribution.setName("A normal distribution")
    description = ["Marginal " + str(1 + i) for i in range(dim)]
    distribution.setDescription(description)

    print("Parameters collection=",
          repr(distribution.getParametersCollection()))
    for i in range(6):
        print("standard moment n=", i, " value=",
              distribution.getStandardMoment(i))
    print("Standard representative=", distribution.getStandardRepresentative())
Exemple #27
0
import sys

try:

    from openturns.viewer import View
    import openturns as ot

    # Curve
    graph = ot.Normal().drawCDF()
    # graph.draw('curve1.png')
    view = View(graph, pixelsize=(800, 600), plot_kw={'color': 'blue'})
    # view.save('curve1.png')
    view.show()

    # Contour
    graph = ot.Normal([1, 2], [3, 5], ot.CorrelationMatrix(2)).drawPDF()
    # graph.draw('curve2.png')
    view = View(graph)
    # view.save('curve2.png')
    view.show()

    # Histogram tests
    normal = ot.Normal(1)
    size = 100
    sample = normal.getSample(size)
    graph = ot.HistogramFactory().build(sample, 10).drawPDF()
    # graph.draw('curve3.png')
    view = View(graph)
    # view.save('curve3.png')
    view.show()
Exemple #28
0
from __future__ import print_function
import openturns as ot
import sys
import openturns.testing as ott
import math as m

ot.TESTPREAMBLE()

ot.ResourceMap.SetAsBool('Distribution-MinimumVolumeLevelSetBySampling', True)
ot.ResourceMap.SetAsUnsignedInteger(
    'Distribution-MinimumVolumeLevelSetSamplingSize', 500)

# 2-d test
dists = [
    ot.Normal([-1.0, 2.0], [1.0] * 2, ot.CorrelationMatrix(2)),
    ot.Normal([1.0, -2.0], [1.5] * 2, ot.CorrelationMatrix(2))
]
mixture = ot.Mixture(dists)

# 3-d test
R1 = ot.CovarianceMatrix(3)
R1[2, 1] = -0.25
R2 = ot.CovarianceMatrix(3)
R2[1, 0] = 0.5
R2[2, 1] = -0.3
R2[0, 0] = 1.3
print(R2)
dists = [ot.Normal([1.0, -2.0, 3.0], R1), ot.Normal([-1.0, 2.0, -2.0], R2)]
mixture = ot.Mixture(dists, [2.0 / 3.0, 1.0 / 3.0])
import openturns as ot
from matplotlib import pyplot as plt
from openturns.viewer import View
if ot.Beta().__class__.__name__ == 'ComposedDistribution':
    correlation = ot.CorrelationMatrix(2)
    correlation[1, 0] = 0.25
    aCopula = ot.NormalCopula(correlation)
    marginals = [ot.Normal(1.0, 2.0), ot.Normal(2.0, 3.0)]
    distribution = ot.ComposedDistribution(marginals, aCopula)
elif ot.Beta().__class__.__name__ == 'CumulativeDistributionNetwork':
    distribution = ot.CumulativeDistributionNetwork([ot.Normal(2),ot.Dirichlet([0.5, 1.0, 1.5])], ot.BipartiteGraph([[0,1], [0,1]]))
elif ot.Beta().__class__.__name__ == 'Histogram':
    distribution = ot.Histogram([-1.0, 0.5, 1.0, 2.0], [0.45, 0.4, 0.15])
else:
    distribution = ot.Beta()
dimension = distribution.getDimension()
if dimension == 1:
    distribution.setDescription(['$x$'])
    pdf_graph = distribution.drawPDF()
    cdf_graph = distribution.drawCDF()
    fig = plt.figure(figsize=(10, 4))
    plt.suptitle(str(distribution))
    pdf_axis = fig.add_subplot(121)
    cdf_axis = fig.add_subplot(122)
    View(pdf_graph, figure=fig, axes=[pdf_axis], add_legend=False)
    View(cdf_graph, figure=fig, axes=[cdf_axis], add_legend=False)
elif dimension == 2:
    distribution.setDescription(['$x_1$', '$x_2$'])
    pdf_graph = distribution.drawPDF()
    fig = plt.figure(figsize=(10, 5))
    plt.suptitle(str(distribution))
Exemple #30
0
# Using some reference values
# See https://en.wikipedia.org/wiki/Kernel_(statistics)#Kernel_functions_in_common_use
# First Normal dist with default ctor
distribution = ot.Normal()
ott.assert_almost_equal(distribution.getRoughness(),
                        0.5 /m.sqrt(m.pi))

# Dimension 2 (Fix https://github.com/openturns/openturns/issues/1485)
# Indep copula : product of integrales
distribution = ot.Normal(2)
ott.assert_almost_equal(distribution.getRoughness(),
                        compute_roughness_sampling(distribution))

# 2D Normal with scale & correlation
# This allows checking that Normal::getRoughness is well implemented
corr = ot.CorrelationMatrix(2)
corr[1, 0] = 0.3
distribution = ot.Normal([0, 0], [1, 2], corr)
ott.assert_almost_equal(distribution.getRoughness(),
                        compute_roughness_sampling(distribution))

distribution = ot.Epanechnikov()
ott.assert_almost_equal(distribution.getRoughness(), 3/5)

distribution = ot.Triangular()
ott.assert_almost_equal(distribution.getRoughness(), 2/3)

distribution = ot.Distribution(Quartic())
ott.assert_almost_equal(distribution.getRoughness(), 5/7)

# Testing Histogram ==> getSingularities