示例#1
0
def define_distribution():
    """
    Define the distribution of the training example (beam).
    Return a ComposedDistribution object from openTURNS
    """
    sample_E = ot.Sample.ImportFromCSVFile("sample_E.csv")
    kernel_smoothing = ot.KernelSmoothing(ot.Normal())
    bandwidth = kernel_smoothing.computeSilvermanBandwidth(sample_E)
    E = kernel_smoothing.build(sample_E, bandwidth)
    E.setDescription(['Young modulus'])

    F = ot.LogNormal()
    F.setParameter(ot.LogNormalMuSigma()([30000, 9000, 15000]))
    F.setDescription(['Load'])

    L = ot.Uniform(250, 260)
    L.setDescription(['Length'])

    I = ot.Beta(2.5, 4, 310, 450)
    I.setDescription(['Inertia'])

    marginal_distributions = [F, E, L, I]
    SR_cor = ot.CorrelationMatrix(len(marginal_distributions))
    SR_cor[2, 3] = -0.2
    copula = ot.NormalCopula(ot.NormalCopula.GetCorrelationFromSpearmanCorrelation(SR_cor))

    return(ot.ComposedDistribution(marginal_distributions, copula))
    def __init__(self):
        self.dim = 4  # number of inputs
        # Young's modulus E
        self.E = ot.Beta(0.9, 3.5, 65.0e9, 75.0e9)  # in N/m^2
        self.E.setDescription("E")
        self.E.setName("Young modulus")

        # Load F
        self.F = ot.LogNormal()  # in N
        self.F.setParameter(ot.LogNormalMuSigma()([300.0, 30.0, 0.0]))
        self.F.setDescription("F")
        self.F.setName("Load")

        # Length L
        self.L = ot.Uniform(2.5, 2.6)  # in m
        self.L.setDescription("L")
        self.L.setName("Length")

        # Moment of inertia I
        self.I = ot.Beta(2.5, 4.0, 1.3e-7, 1.7e-7)  # in m^4
        self.I.setDescription("I")
        self.I.setName("Inertia")

        # physical model
        self.model = ot.SymbolicFunction(['E', 'F', 'L', 'I'],
                                         ['F*L^3/(3*E*I)'])

        # correlation matrix
        self.R = ot.CorrelationMatrix(self.dim)
        self.R[2, 3] = -0.2
        self.copula = ot.NormalCopula(
            ot.NormalCopula.GetCorrelationFromSpearmanCorrelation(self.R))
        self.distribution = ot.ComposedDistribution(
            [self.E, self.F, self.L, self.I], self.copula)

        # special case of an independent copula
        self.independentDistribution = ot.ComposedDistribution(
            [self.E, self.F, self.L, self.I])
示例#3
0
# -*- coding: utf-8 -*-
# Copyright 2016 EDF. This software was developed with the collaboration of
# Phimeca Engineering (Sylvain Girard, [email protected]).
"""Run multiple FMU simulations in parallel."""

#§ Identifying the platform
import platform
key_platform = (platform.system(), platform.architecture()[0])
# Call to either 'platform.system' or 'platform.architecture' *after*
# importing pyfmi causes a segfault.
dict_platform = {("Linux", "64bit"): "linux64", ("Windows", "64bit"): "win64"}

#§ Define the input distribution
import openturns as ot

E = ot.Beta(0.93, 3.2, 28000000.0, 48000000.0)
F = ot.LogNormalMuSigma(3.0e4, 9000.0, 15000.0).getDistribution()
L = ot.Uniform(250.0, 260.0)
I = ot.Beta(2.5, 4.0, 310.0, 450.0)

# Create the input probability distribution of dimension 4
inputDistribution = ot.ComposedDistribution([E, F, L, I])
# Give a description of each component of the input distribution
inputDistribution.setDescription(("E", "F", "L", "I"))
# Create the input random vector
inputRandomVector = ot.RandomVector(inputDistribution)

#§ FMU model
import otfmi
from pyfmi.fmi import FMUException
import sys
示例#4
0
from matplotlib import pylab as plt
ot.Log.Show(ot.Log.NONE)

# %%
# Create a function R^n --> R^p
# For example R^4 --> R
myModel = ot.SymbolicFunction(['x1', 'x2', 'x3', 'x4'],
                              ['1+x1*x2 + 2*x3^2+x4^4'])

# Create a distribution of dimension n
# for example n=3 with indpendent components
distribution = ot.ComposedDistribution([
    ot.Normal(),
    ot.Uniform(),
    ot.Gamma(2.75, 1.0),
    ot.Beta(2.5, 1.0, -1.0, 2.0)
])

# %%
# Prepare the input/output samples
sampleSize = 250
X = distribution.getSample(sampleSize)
Y = myModel(X)
dimension = X.getDimension()

# %%
# build the orthogonal basis
coll = [
    ot.StandardDistributionPolynomialFactory(distribution.getMarginal(i))
    for i in range(dimension)
]
# The following parameter value leads to fast simulations.

# %%
maxDegree = 4

# %%
# For real tests, we suggest using the following parameter value:

# %%
# maxDegree = 7

# %%
# Let us define the parameters of the cantilever beam problem.

# %%
dist_E = ot.Beta(0.9, 2.2, 2.8e7, 4.8e7)
dist_E.setDescription(["E"])
F_para = ot.LogNormalMuSigma(3.0e4, 9.0e3, 15.0e3)  # in N
dist_F = ot.ParametrizedDistribution(F_para)
dist_F.setDescription(["F"])
dist_L = ot.Uniform(250.0, 260.0)  # in cm
dist_L.setDescription(["L"])
dist_I = ot.Beta(2.5, 1.5, 310.0, 450.0)  # in cm^4
dist_I.setDescription(["I"])

myDistribution = ot.ComposedDistribution([dist_E, dist_F, dist_L, dist_I])

dim_input = 4  # dimension of the input
dim_output = 1  # dimension of the output

示例#6
0
    print('parameters      =', repr(parameters))
    print('parameters (ref)=',
          repr(referenceDistribution[testCase].getParametersCollection()))
    print('parameter       =', repr(distribution[testCase].getParameter()))
    print('parameter desc  =',
          repr(distribution[testCase].getParameterDescription()))
    print('marginal 0      =', repr(distribution[testCase].getMarginal(0)))
    print('Standard representative=',
          referenceDistribution[testCase].getStandardRepresentative())
# Check simplification
candidates = [
    ot.Normal(1.0, 2.0),
    ot.Uniform(1.0, 2.0),
    ot.Exponential(1.0, 2.0),
    ot.TruncatedDistribution(ot.WeibullMin(), 1.5, 7.8),
    ot.Beta(1.5, 6.3, -1.0, 2.0)
]
intervals = [
    ot.Interval(-1.0, 4.0),
    ot.Interval(0.2, 2.4),
    ot.Interval(2.5, 65.0),
    ot.Interval(2.5, 6.0),
    ot.Interval(-2.5, 6.0)
]
for i in range(len(candidates)):
    d = ot.TruncatedDistribution(candidates[i], intervals[i])
    print("d=", d, "simplified=", d.getSimplifiedVersion())

# Check that we can set the bounds independently
truncated = ot.TruncatedDistribution()
truncated.setDistribution(ot.Normal(20.0, 7.5))
示例#7
0
ot.Log.Show(ot.Log.NONE)

# %%
# We first create the data :
distribution = ot.Normal(2.0, 0.5)
sample1 = distribution.getSample(100)

# %%
# We draw the Henry line plot and expect a good fitting :
graph = ot.VisualTest.DrawHenryLine(sample1)
view = viewer.View(graph)

# %%
# For comparison sake e draw the Henry line plot for a Beta distribution. The result is expected to be bad.
sample2 = ot.Beta(0.7, 0.9, 0.0, 2.0).getSample(100)
graph = ot.VisualTest.DrawHenryLine(sample2)
view = viewer.View(graph)

# %%
# Normality tests
# ---------------
#
# We use two tests to check whether a sample follows a normal distribution :
#
#  - the Anderson-Darling test
#  - the Cramer-Von Mises test
#

# %%
# We first generate two samples, one from a standard unit gaussian and another from a Gumbel
示例#8
0
    simulationResult = ot.ProbabilitySimulationResult(ot.ThresholdEvent(), 0.5,
                                                      0.01, 150, 4)
    myStudy.add('simulationResult', simulationResult)

    cNameList = [
        'LHS', 'DirectionalSampling', 'SimulationSensitivityAnalysis',
        'ProbabilitySimulationAlgorithm'
    ]
    for cName in cNameList:
        otClass = getattr(ot, cName)
        instance = otClass()
        print('--', cName, instance)
        myStudy.add(cName, instance)

    # Create a Beta distribution
    beta = ot.Beta(3.0, 2.0, -1.0, 4.0)
    myStudy.add('beta', beta)

    # Create an analytical Function
    input = ot.Description(3)
    input[0] = 'a'
    input[1] = 'b'
    input[2] = 'c'
    formulas = ot.Description(3)
    formulas[0] = 'a+b+c'
    formulas[1] = 'a-b*c'
    formulas[2] = '(a+2*b^2+3*c^3)/6'
    analytical = ot.SymbolicFunction(input, formulas)
    analytical.setName('analytical')
    analytical.setOutputDescription(['z1', 'z2', 'z3'])
    myStudy.add('analytical', analytical)
示例#9
0
import openturns as ot
from matplotlib import pyplot as plt
from openturns.viewer import View
if ot.FrankCopula().__class__.__name__ == 'EmpiricalBernsteinCopula':
    sample = ot.Dirichlet([1.0, 2.0, 3.0]).getSample(100)
    copula = ot.EmpiricalBernsteinCopula(sample, 4)
elif ot.FrankCopula().__class__.__name__ == 'ExtremeValueCopula':
    copula = ot.ExtremeValueCopula(ot.SymbolicFunction("t", "t^3/2-t/2+1"))
elif ot.FrankCopula(
).__class__.__name__ == 'MaximumEntropyOrderStatisticsCopula':
    marginals = [ot.Beta(1.5, 3.2, 0.0, 1.0), ot.Beta(2.0, 4.3, 0.5, 1.2)]
    copula = ot.MaximumEntropyOrderStatisticsCopula(marginals)
elif ot.FrankCopula().__class__.__name__ == 'NormalCopula':
    R = ot.CorrelationMatrix(2)
    R[1, 0] = 0.8
    copula = ot.NormalCopula(R)
elif ot.FrankCopula().__class__.__name__ == 'SklarCopula':
    student = ot.Student(3.0, [1.0] * 2, [3.0] * 2, ot.CorrelationMatrix(2))
    copula = ot.SklarCopula(student)
else:
    copula = ot.FrankCopula()
if copula.getDimension() == 1:
    copula = ot.FrankCopula(2)
copula.setDescription(['$u_1$', '$u_2$'])
pdf_graph = copula.drawPDF()
cdf_graph = copula.drawCDF()
fig = plt.figure(figsize=(10, 4))
pdf_axis = fig.add_subplot(121)
cdf_axis = fig.add_subplot(122)
View(pdf_graph,
     figure=fig,
# %%
from __future__ import print_function
import openturns as ot
import openturns.viewer as viewer
from matplotlib import pylab as plt
ot.Log.Show(ot.Log.NONE)

# %%
# Create a function R^n --> R^p
# For example R^4 --> R
myModel = ot.SymbolicFunction(['x1', 'x2', 'x3', 'x4'], ['1+x1*x2 + 2*x3^2+x4^4'])

# Create a distribution of dimension n
# for example n=3 with indpendent components
distribution = ot.ComposedDistribution(
    [ot.Normal(), ot.Uniform(), ot.Gamma(2.75, 1.0), ot.Beta(2.5, 1.0, -1.0, 2.0)])

# %%
# Prepare the input/output samples
sampleSize = 250
X = distribution.getSample(sampleSize)
Y = myModel(X)
dimension = X.getDimension()

# %%
# build the orthogonal basis
coll = [ot.StandardDistributionPolynomialFactory(distribution.getMarginal(i)) for i in range(dimension)]
enumerateFunction = ot.LinearEnumerateFunction(dimension)
productBasis = ot.OrthogonalProductPolynomialFactory(coll, enumerateFunction)

# %%
示例#11
0
def Beta(r=2.0, t=4.0, a=-1.0, b=1.0):
    """
    InverseGamma compatibility shim.
    """
    return ot.Beta(r, t - r, a, b)
示例#12
0
#! /usr/bin/env python

from __future__ import print_function
import openturns as ot
from math import fabs
import openturns.testing as ott

ot.TESTPREAMBLE()
ot.RandomGenerator.SetSeed(0)

continuousDistributionCollection = ot.DistributionCollection()
discreteDistributionCollection = ot.DistributionCollection()
distributionCollection = ot.DistributionCollection()

beta = ot.Beta(2.0, 1.0, 0.0, 1.0)
distributionCollection.add(beta)
continuousDistributionCollection.add(beta)

gamma = ot.Gamma(1.0, 2.0, 3.0)
distributionCollection.add(gamma)
continuousDistributionCollection.add(gamma)

gumbel = ot.Gumbel(1.0, 2.0)
distributionCollection.add(gumbel)
continuousDistributionCollection.add(gumbel)

lognormal = ot.LogNormal(1.0, 1.0, 2.0)
distributionCollection.add(lognormal)
continuousDistributionCollection.add(lognormal)

logistic = ot.Logistic(1.0, 1.0)
示例#13
0
E.setDescription(['Young modulus'])

# In[3]:

F = ot.LogNormal()
F.setParameter(ot.LogNormalMuSigma()([30000, 9000, 15000]))
F.setDescription(['Load'])

# In[4]:

L = ot.Uniform(250, 260)
L.setDescription(['Length'])

# In[5]:

I = ot.Beta(2.5, 4, 310, 450)
I.setDescription(['Inertia'])

# We now fix the order of the marginal distributions in the joint distribution. Order must match in the implementation of the physical model (to come).

# In[6]:

marginal_distributions = [F, E, L, I]

# Let then define the dependence structure as a Normal copula with a single non-zero Spearman correlation between components 2 and 3 of the final random vector, that is $L$ and $I$.

# In[7]:

SR_cor = ot.CorrelationMatrix(len(marginal_distributions))
SR_cor[2, 3] = -0.2
copula = ot.NormalCopula(
示例#14
0
import openturns as ot
import numpy as np
from matplotlib import pyplot as plt
import sys
from openturns import viewer
from scipy.stats import beta, betaprime

Beta = ot.Beta(5.957 + 1, 20.151 + 2 + 5.957, -7.223, 52.559)
Beta_PDF = Beta.drawPDF()
Beta_draw = Beta_PDF.getDrawable(0)
Beta_draw.setColor('blue')
viewer.View(Beta_draw)

# Beta2 =  ot.Beta(2.61339,7.55733,-2.50357,27.9764)
# Beta2_PDF = Beta2.drawPDF()
# Beta2_PDF.add(Beta_draw)
# viewer.View(Beta2_PDF)

x = np.linspace(-8, 60, 1002)

plt.figure(1)
plt.plot(x,
         beta.pdf(x, 5.957 + 1, 20.151 + 1, loc=-7.223, scale=59.782),
         c='black')
plt.show()
示例#15
0
estimatedDistribution = factory.build(sample)
print("distribution=", repr(distribution))
print("Estimated distribution=", repr(estimatedDistribution))
distribution = ot.Chi(1.0)
sample = distribution.getSample(size)
estimatedDistribution = factory.build(sample)
print("distribution=", repr(distribution))
print("Estimated distribution=", repr(estimatedDistribution))
distribution = ot.Chi(2.5)
sample = distribution.getSample(size)
estimatedDistribution = factory.build(sample)
print("distribution=", repr(distribution))
print("Estimated distribution=", repr(estimatedDistribution))
estimatedDistribution = factory.build()
print("Default distribution=", estimatedDistribution)
estimatedDistribution = factory.build(distribution.getParameter())
print("Distribution from parameters=", estimatedDistribution)
estimatedChi = factory.buildAsChi(sample)
print("Chi          =", distribution)
print("Estimated chi=", estimatedChi)
estimatedChi = factory.buildAsChi()
print("Default chi=", estimatedChi)
estimatedChi = factory.buildAsChi(distribution.getParameter())
print("Chi from parameters=", estimatedChi)

# overflow in boost::math::gamma_q_inv
true_dist = ot.Beta(0.9, 2., 3e7, 5e7)
ot.RandomGenerator.SetSeed(0)
data = true_dist.getSample(229)
ot.ChiFactory().build(data)
示例#16
0
# %%
#
# In this example we are going to evaluate the min and max values of the output variable of interest from a sample and to evaluate the gradient of the limit state function defining the output variable of interest at a particular point.

# %%
from __future__ import print_function
import openturns as ot
import openturns.viewer as viewer
from matplotlib import pylab as plt
import math as m

ot.Log.Show(ot.Log.NONE)

# %%
# Create the marginal distributions of the parameters
dist_E = ot.Beta(0.93, 2.27, 2.8e7, 4.8e7)
dist_F = ot.LogNormalMuSigma(30000, 9000, 15000).getDistribution()
dist_L = ot.Uniform(250, 260)
dist_I = ot.Beta(2.5, 1.5, 3.1e2, 4.5e2)
marginals = [dist_E, dist_F, dist_L, dist_I]
distribution = ot.ComposedDistribution(marginals)

# %%
# Sample inputs
sampleX = distribution.getSample(100)

# %%
# Create the model
model = ot.SymbolicFunction(['E', 'F', 'L', 'I'], ['F*L^3/(3*E*I)'])

# %%
#! /usr/bin/env python

from __future__ import print_function
import openturns as ot

ot.PlatformInfo.SetNumericalPrecision(3)
distribution = ot.Beta(2.3, 2.2, -1.0, 1.0)
print('distribution=', distribution)
sample = distribution.getSample(1000)
factory = ot.MaximumLikelihoodFactory(ot.Beta())
inf_distribution = factory.build(sample)
print('estimated distribution=', inf_distribution)

# set (a,b) out of (r, t, a, b)
factory.setKnownParameter([-1.0, 1.0], [2, 3])
inf_distribution = factory.build(sample)
print('estimated distribution with bounds=', inf_distribution)

factory = ot.MaximumLikelihoodFactory(ot.Exponential())
factory.setKnownParameter([0.1], [1])
print(factory.build())
print(factory.build([3, 0]))
import openturns as ot

ot.TESTPREAMBLE()
ot.PlatformInfo.SetNumericalPrecision(3)

size = 10000

distribution = ot.Gumbel(1.5, -0.5)
print('distribution=', distribution)
sample = distribution.getSample(size)
factory = ot.MethodOfMomentsFactory(ot.Gumbel())
inf_distribution = factory.build(sample)
print('estimated distribution=', inf_distribution)

# set (a,b) out of (r, t, a, b)
distribution = ot.Beta(2.3, 2.2, -1.0, 1.0)
print('distribution=', distribution)
sample = distribution.getSample(size)
factory = ot.MethodOfMomentsFactory(ot.Beta())
factory.setKnownParameter([-1.0, 1.0], [2, 3])
inf_distribution = factory.build(sample)
print('estimated distribution=', inf_distribution)

# with bounds
data = [
    0.6852, 0.9349, 0.5884, 1.727, 1.581, 0.3193, -0.5701, 1.623, 2.210,
    -0.3440, -0.1646
]
sample = ot.Sample([[x] for x in data])
size = sample.getSize()
xMin = sample.getMin()[0]
示例#19
0
model_fmu = otfmi.FMUFunction(path_fmu,
                              inputs_fmu=["E", "F", "L", "I"],
                              outputs_fmu="y")

# %%
# We test the function wrapping the deviation model on a point:
import openturns as ot
point = ot.Point([3e7, 2e4, 255, 350])
model_evaluation = model_fmu(point)
print("Running the FMU: deviation = {}".format(model_evaluation))

# %%
# We define probability laws on the 4 uncertain inputs:

E = ot.Beta(0.93, 3.2, 2.8e7, 4.8e7)
F = ot.LogNormal()
F.setParameter(ot.LogNormalMuSigma()([30.e3, 9e3, 15.e3]))
L = ot.Uniform(250.0, 260.0)
I = ot.Beta(2.5, 4.0, 310.0, 450.0)

# %%
# According to the laws of mechanics, when the length L increases, the moment
# of inertia I decreases.
# The variables L and I are thus negatively correlated.
#
# **We assume that the random variables E, F, L and I are dependent and
# associated with a gaussian copula which correlation matrix:**
#
# .. math::
#    \begin{pmatrix}
#    f(x) = \frac{(x-a)^{\alpha - 1} (b - x)^{\beta - 1}}{(b - a)^{\alpha + \beta - 1} B(\alpha, \beta)}
#
#
# for any :math:`x\in[a,b]`, where :math:`B` is Euler's beta function.
#
# For any :math:`y,z>0`, the beta function is:
#
# .. math::
#    B(y,z) = \int_0^1 t^{y-1} (1-t)^{z-1} dt.
#

# %%
# The `Beta` class uses the native parametrization.

# %%
distribution = ot.Beta(2.5, 2.5, -1, 2)
graph = distribution.drawPDF()
view = viewer.View(graph)

# %%
# The `BetaMuSigma` class provides another parametrization, based on the expectation :math:`\mu` and the standard deviation  :math:`\sigma` of the random variable:
#
# .. math::
#    \mu = a + \frac{(b-a)\alpha}{\alpha+\beta}
#
#
# and
#
# .. math::
#    \sigma^2 = \left(\frac{b-a}{\alpha+\beta}\right)^2 \frac{\alpha\beta}{\alpha+\beta+1}.
#
示例#21
0
import openturns.viewer as viewer
from matplotlib import pylab as plt
ot.Log.Show(ot.Log.NONE)

# %%
# We define the symbolic function which evaluates the output Y depending on the inputs E, F, L and I.

# %%
model = ot.SymbolicFunction(["E", "F", "L", "I"], ["F*L^3/(3*E*I)"])

# %%
# Then we define the distribution of the input random vector.

# %%
# Young's modulus E
E = ot.Beta(0.9, 3.5, 2.5e7, 5.0e7)  # in N/m^2
E.setDescription("E")
# Load F
F = ot.LogNormal()  # in N
F.setParameter(ot.LogNormalMuSigma()([30.e3, 9e3, 15.e3]))
F.setDescription("F")
# Length L
L = ot.Uniform(250., 260.)  # in cm
L.setDescription("L")
# Moment of inertia I
I = ot.Beta(2.5, 4, 310, 450)  # in cm^4
I.setDescription("I")

# %%
# Finally, we define the dependency using a `NormalCopula`.
示例#22
0
sigma = [2.0, 3.0]
R = ot.CorrelationMatrix(dim)
for i in range(1, dim):
    R[i, i - 1] = 0.5

distribution = ot.Normal(meanPoint, sigma, R)
discretization = 100
kernel = ot.KernelSmoothing()
sample = distribution.getSample(discretization)
kernels = ot.DistributionCollection(0)
kernels.add(ot.Normal())
kernels.add(ot.Epanechnikov())
kernels.add(ot.Uniform())
kernels.add(ot.Triangular())
kernels.add(ot.Logistic())
kernels.add(ot.Beta(2.0, 2.0, -1.0, 1.0))
kernels.add(ot.Beta(3.0, 3.0, -1.0, 1.0))
meanExact = distribution.getMean()
covarianceExact = distribution.getCovariance()
for i in range(kernels.getSize()):
    kernel = kernels[i]
    print("kernel=", kernel.getName())
    smoother = ot.KernelSmoothing(kernel)
    smoothed = smoother.build(sample)
    bw = smoother.getBandwidth()
    print("kernel bandwidth=[ %.6g" % bw[0], ",  %.6g" % bw[1], "]")
    meanSmoothed = smoothed.getMean()
    print("mean(smoothed)=[ %.6g" % meanSmoothed[0],
          ",  %.6g" % meanSmoothed[1], "] mean(exact)=[", meanExact[0], ", ",
          meanExact[1], "]")
    covarianceSmoothed = smoothed.getCovariance()
#! /usr/bin/env python

from __future__ import print_function
import openturns as ot

ot.TESTPREAMBLE()
ot.RandomGenerator.SetSeed(0)

# Instanciate one distribution object
distribution = ot.MaximumEntropyOrderStatisticsDistribution([
    ot.Trapezoidal(-2.0, -1.1, -1.0, 1.0),
    ot.LogUniform(1.0, 1.2),
    ot.Triangular(3.0, 4.5, 5.0),
    ot.Beta(2.5, 3.5, 4.7, 5.2)
])

dim = distribution.getDimension()
print("Distribution ", distribution)

# Is this distribution elliptical ?
print("Elliptical = ", distribution.isElliptical())

# Test for realization of distribution
oneRealization = distribution.getRealization()
print("oneRealization=", repr(oneRealization))

# Test for sampling
size = 10000
oneSample = distribution.getSample(size)
print("oneSample first=", repr(oneSample[0]), " last=",
      repr(oneSample[size - 1]))
import openturns.viewer as viewer
from matplotlib import pylab as plt
ot.Log.Show(ot.Log.NONE)

# %%
# We define the symbolic function which evaluates the output Y depending on the inputs E, F, L and I.

# %%
model = ot.SymbolicFunction(["E", "F", "L", "I"], ["F*L^3/(3*E*I)"])

# %%
# Then we define the distribution of the input random vector. 

# %%
# Young's modulus E
E = ot.Beta(0.9, 2.27, 2.5e7, 5.0e7) # in N/m^2
E.setDescription("E")
# Load F
F = ot.LogNormal() # in N
F.setParameter(ot.LogNormalMuSigma()([30.e3, 9e3, 15.e3]))
F.setDescription("F")
# Length L
L = ot.Uniform(250., 260.) # in cm
L.setDescription("L")
# Moment of inertia I
I = ot.Beta(2.5, 1.5, 310, 450) # in cm^4
I.setDescription("I")

# %%
# Finally, we define the dependency using a `NormalCopula`.
import openturns as ot
from matplotlib import pyplot as plt
from openturns.viewer import View

myDist = [ot.Beta(1.5, 3.2, 0.0, 1.0), ot.Beta(2.0, 4.3, 0.5, 1.2)]
myOrderStatCop = ot.MaximumEntropyOrderStatisticsCopula(myDist)
myOrderStatCop.setDescription(['$u_1$', '$u_2$'])
graphPDF = myOrderStatCop.drawPDF()
graphCDF = myOrderStatCop.drawCDF()

fig = plt.figure(figsize=(8, 4))
plt.suptitle("Max Entropy Order Statistics Copula: pdf and cdf")
pdf_axis = fig.add_subplot(121)
cdf_axis = fig.add_subplot(122)
pdf_axis.set_xlim(auto=True)
cdf_axis.set_xlim(auto=True)

View(graphPDF, figure=fig, axes=[pdf_axis], add_legend=True)
View(graphCDF, figure=fig, axes=[cdf_axis], add_legend=True)
示例#26
0
    print('covariance (ref)=', repr(
        cleanCovariance(referenceDistribution[testCase].getCovariance())))
    parameters = distribution[testCase].getParametersCollection()
    print('parameters      =', repr(parameters))
    print('parameters (ref)=', repr(
        referenceDistribution[testCase].getParametersCollection()))
    print('parameter       =', repr(distribution[testCase].getParameter()))
    print('parameter desc  =',
          repr(distribution[testCase].getParameterDescription()))
    print('marginal 0      =', repr(distribution[testCase].getMarginal(0)))
    for i in range(6):
        print('standard moment n=', i, ' value=',
              referenceDistribution[testCase].getStandardMoment(i))
    print('Standard representative=', referenceDistribution[
          testCase].getStandardRepresentative())
# Check simplification
candidates = [ot.Normal(1.0, 2.0), ot.Uniform(1.0, 2.0), ot.Exponential(1.0, 2.0),
              ot.TruncatedDistribution(ot.Weibull(), 1.5, 7.8),
              ot.Beta(1.5, 7.8, -1.0, 2.0)]
intervals = [ot.Interval(-1.0, 4.0), ot.Interval(0.2, 2.4), ot.Interval(2.5, 65.0),
             ot.Interval(2.5, 6.0), ot.Interval(-2.5, 6.0)]
for i in range(len(candidates)):
    d = ot.TruncatedDistribution(candidates[i], intervals[i])
    print("d=", d, "simplified=", d.getSimplifiedVersion())

# Check that we can set the bounds independently
truncated = ot.TruncatedDistribution()
truncated.setDistribution(ot.Normal(20.0, 7.5))
truncated.setBounds(ot.Interval([0], [80], [True], [False]))
print('after setbounds [email protected]=', truncated.computeQuantile(0.9))
示例#27
0
# %%
# In this example we are going to build maximum entropy statistics distribution, which yields ordered realizations:
#
# .. math::
#    X_1 \leq \dots \leq X_n
#

# %%
import openturns as ot
import openturns.viewer as viewer
from matplotlib import pylab as plt
ot.Log.Show(ot.Log.NONE)

# %%
# create a collection of distribution
coll = [ot.Beta(1.5, 1.7, 0.0, 1.0), ot.Beta(2.0, 2.3, 0.5, 1.2)]

# %%
# create the distribution
distribution = ot.MaximumEntropyOrderStatisticsDistribution(coll)
print(distribution)

# %%
# draw a sample (ordered!)
distribution.getSample(10)

# %%
# draw PDF
graph = distribution.drawPDF()
view = viewer.View(graph)
plt.show()
cloud = ot.Cloud(sample, "blue", "fsquare", "")  # Create the cloud
graph.add(cloud)  # Then, add it to the graph
view = viewer.View(graph)

# %%
# We see that the sample is quite different from the previous sample with independent copula.

# %%
# Draw several distributions in the same plot
# -------------------------------------------

# %%
# It is sometimes convenient to create a plot presenting the PDF and CDF on the same graphics. This is possible thanks to Matplotlib.

# %%
beta = ot.Beta(5, 7, 9, 10)
pdfbeta = beta.drawPDF()
cdfbeta = beta.drawCDF()
exponential = ot.Exponential(3)
pdfexp = exponential.drawPDF()
cdfexp = exponential.drawCDF()

# %%
import openturns.viewer as otv

# %% slideshow={"slide_type": "subslide"}
import pylab as plt
fig = plt.figure(figsize=(12, 4))
ax = fig.add_subplot(2, 2, 1)
_ = otv.View(pdfbeta, figure=fig, axes=[ax])
ax = fig.add_subplot(2, 2, 2)
import openturns as ot
from matplotlib import pyplot as plt
from openturns.viewer import View
if ot.Beta().__class__.__name__ == 'ComposedDistribution':
    correlation = ot.CorrelationMatrix(2)
    correlation[1, 0] = 0.25
    aCopula = ot.NormalCopula(correlation)
    marginals = [ot.Normal(1.0, 2.0), ot.Normal(2.0, 3.0)]
    distribution = ot.ComposedDistribution(marginals, aCopula)
elif ot.Beta().__class__.__name__ == 'CumulativeDistributionNetwork':
    distribution = ot.CumulativeDistributionNetwork([ot.Normal(2),ot.Dirichlet([0.5, 1.0, 1.5])], ot.BipartiteGraph([[0,1], [0,1]]))
elif ot.Beta().__class__.__name__ == 'Histogram':
    distribution = ot.Histogram([-1.0, 0.5, 1.0, 2.0], [0.45, 0.4, 0.15])
else:
    distribution = ot.Beta()
dimension = distribution.getDimension()
if dimension == 1:
    distribution.setDescription(['$x$'])
    pdf_graph = distribution.drawPDF()
    cdf_graph = distribution.drawCDF()
    fig = plt.figure(figsize=(10, 4))
    plt.suptitle(str(distribution))
    pdf_axis = fig.add_subplot(121)
    cdf_axis = fig.add_subplot(122)
    View(pdf_graph, figure=fig, axes=[pdf_axis], add_legend=False)
    View(cdf_graph, figure=fig, axes=[cdf_axis], add_legend=False)
elif dimension == 2:
    distribution.setDescription(['$x_1$', '$x_2$'])
    pdf_graph = distribution.drawPDF()
    fig = plt.figure(figsize=(10, 5))
    plt.suptitle(str(distribution))
import openturns as ot
from matplotlib import pyplot as plt
from openturns.viewer import View

myDist = [ot.Beta(1.5, 1.7, 0.0, 1.0),  ot.Beta(2.0, 2.3, 0.5, 1.2)]
myOrderStatDist = ot.MaximumEntropyOrderStatisticsDistribution(myDist)
myOrderStatDist.setDescription(['$x_1$', '$x_2$'])
graphPDF = myOrderStatDist.drawPDF()
graphCDF = myOrderStatDist.drawCDF()


fig = plt.figure(figsize=(8, 4))
pdf_axis = fig.add_subplot(121)
cdf_axis = fig.add_subplot(122)
pdf_axis.set_xlim(auto=True)
cdf_axis.set_xlim(auto=True)

View(graphPDF, figure=fig, axes=[pdf_axis], add_legend=True)
View(graphCDF, figure=fig, axes=[cdf_axis], add_legend=True)
fig.suptitle("Max Entropy Order Statistics Distribution: pdf and cdf")