Exemplo n.º 1
0
def define_distribution():
    """
    Define the distribution of the training example (beam).
    Return a ComposedDistribution object from openTURNS
    """
    sample_E = ot.Sample.ImportFromCSVFile("sample_E.csv")
    kernel_smoothing = ot.KernelSmoothing(ot.Normal())
    bandwidth = kernel_smoothing.computeSilvermanBandwidth(sample_E)
    E = kernel_smoothing.build(sample_E, bandwidth)
    E.setDescription(['Young modulus'])

    F = ot.LogNormal()
    F.setParameter(ot.LogNormalMuSigma()([30000, 9000, 15000]))
    F.setDescription(['Load'])

    L = ot.Uniform(250, 260)
    L.setDescription(['Length'])

    I = ot.Beta(2.5, 4, 310, 450)
    I.setDescription(['Inertia'])

    marginal_distributions = [F, E, L, I]
    SR_cor = ot.CorrelationMatrix(len(marginal_distributions))
    SR_cor[2, 3] = -0.2
    copula = ot.NormalCopula(ot.NormalCopula.GetCorrelationFromSpearmanCorrelation(SR_cor))

    return(ot.ComposedDistribution(marginal_distributions, copula))
Exemplo n.º 2
0
    def __init__(self):
        self.dim = 4  # number of inputs
        # Young's modulus E
        self.E = ot.Beta(0.9, 3.5, 65.0e9, 75.0e9)  # in N/m^2
        self.E.setDescription("E")
        self.E.setName("Young modulus")

        # Load F
        self.F = ot.LogNormal()  # in N
        self.F.setParameter(ot.LogNormalMuSigma()([300.0, 30.0, 0.0]))
        self.F.setDescription("F")
        self.F.setName("Load")

        # Length L
        self.L = ot.Uniform(2.5, 2.6)  # in m
        self.L.setDescription("L")
        self.L.setName("Length")

        # Moment of inertia I
        self.I = ot.Beta(2.5, 4.0, 1.3e-7, 1.7e-7)  # in m^4
        self.I.setDescription("I")
        self.I.setName("Inertia")

        # physical model
        self.model = ot.SymbolicFunction(['E', 'F', 'L', 'I'],
                                         ['F*L^3/(3*E*I)'])

        # correlation matrix
        self.R = ot.CorrelationMatrix(self.dim)
        self.R[2, 3] = -0.2
        self.copula = ot.NormalCopula(
            ot.NormalCopula.GetCorrelationFromSpearmanCorrelation(self.R))
        self.distribution = ot.ComposedDistribution(
            [self.E, self.F, self.L, self.I], self.copula)

        # special case of an independent copula
        self.independentDistribution = ot.ComposedDistribution(
            [self.E, self.F, self.L, self.I])
Exemplo n.º 3
0
    estimatedDistribution = factory.build()
    print("Default distribution=", estimatedDistribution)
    estimatedDistribution = factory.build(distribution.getParameter())
    print("Distribution from parameters=", estimatedDistribution)
    estimatedBurr = factory.buildAsBurr(sample)
    print("Burr          =", distribution)
    print("Estimated burr=", estimatedBurr)
    estimatedBurr = factory.buildAsBurr()
    print("Default burr=", estimatedBurr)
    estimatedBurr = factory.buildAsBurr(distribution.getParameter())
    print("Burr from parameters=", estimatedBurr)

    try:
        estimatedBurr = factory.build(ot.Normal(1e-3, 1e-5).getSample(100))
        print('Estimated burr=', estimatedBurr)
    except:
        pass

    ot.RandomGenerator.SetSeed(0)
    try:
        estimatedBurr = factory.build(
            ot.UserDefined(ot.LogNormal(7.71,
                                        1.0056).getSample(500)).getSupport())
        #print('Estimated burr=', estimatedBurr)
    except:
        pass

except:
    import sys
    print("t_BurrFactory_std.py", sys.exc_info()[0], sys.exc_info()[1])
import openturns as ot
from openturns.viewer import View

ot.RandomGenerator.SetSeed(0)
distribution = ot.LogNormal(2.0, 1.0, 0.0)
sample = distribution.getSample(50)
graph = ot.VisualTest.DrawHenryLine(sample)
View(graph)
Exemplo n.º 5
0
        import pyAgrum.lib.notebook as gnb
        gnb.showInformation(bn)
    except ImportError:
        pass
def showInference(model, evs=None, size=None):
    try:
        # fails outside notebook
        import pyAgrum.lib.notebook as gnb
        gnb.showInference(model, evs=evs, size=size)
    except ImportError:
        pass

# **Probabilistic model**

# Marginal distributions
Torque = ot.LogNormal(0.0, 0.25)
Angle = ot.TruncatedNormal(0.0, 2.0, -8.0, 8.0)
Joint = ot.Uniform(1.8, 2.2)

# Dependence
rho = 0.5
TorqueAngleCopula = ot.NormalCopula(ot.CorrelationMatrix(2, [1.0, rho, rho, 1.0]))
copula = ot.ComposedCopula([TorqueAngleCopula, ot.IndependentCopula(1)])

# Joint distribution if needed
TorqueAngle = ot.ComposedDistribution([Torque, Angle], TorqueAngleCopula)
fullDistribution = ot.ComposedDistribution([Torque, Angle, Joint], copula)

# Leakage angle (rd)
angleMax = 5.0
Exemplo n.º 6
0
# %%
# We define the symbolic function which evaluates the output Y depending on the inputs E, F, L and I.

# %%
model = ot.SymbolicFunction(["E", "F", "L", "I"], ["F*L^3/(3*E*I)"])

# %%
# Then we define the distribution of the input random vector.

# %%
# Young's modulus E
E = ot.Beta(0.9, 3.5, 2.5e7, 5.0e7)  # in N/m^2
E.setDescription("E")
# Load F
F = ot.LogNormal()  # in N
F.setParameter(ot.LogNormalMuSigma()([30.e3, 9e3, 15.e3]))
F.setDescription("F")
# Length L
L = ot.Uniform(250., 260.)  # in cm
L.setDescription("L")
# Moment of inertia I
I = ot.Beta(2.5, 4, 310, 450)  # in cm^4
I.setDescription("I")

# %%
# Finally, we define the dependency using a `NormalCopula`.

# %%
myDistribution = ot.ComposedDistribution([E, F, L, I])
Exemplo n.º 7
0
import openturns as ot
from matplotlib import pyplot as plt
from openturns.viewer import View
if ot.LogNormal().__class__.__name__ == 'ComposedDistribution':
    correlation = ot.CorrelationMatrix(2)
    correlation[1, 0] = 0.25
    aCopula = ot.NormalCopula(correlation)
    marginals = [ot.Normal(1.0, 2.0), ot.Normal(2.0, 3.0)]
    distribution = ot.ComposedDistribution(marginals, aCopula)
elif ot.LogNormal().__class__.__name__ == 'CumulativeDistributionNetwork':
    distribution = ot.CumulativeDistributionNetwork(
        [ot.Normal(2), ot.Dirichlet([0.5, 1.0, 1.5])],
        ot.BipartiteGraph([[0, 1], [0, 1]]))
elif ot.LogNormal().__class__.__name__ == 'Histogram':
    distribution = ot.Histogram([-1.0, 0.5, 1.0, 2.0], [0.45, 0.4, 0.15])
else:
    distribution = ot.LogNormal()
dimension = distribution.getDimension()
if dimension == 1:
    distribution.setDescription(['$x$'])
    pdf_graph = distribution.drawPDF()
    cdf_graph = distribution.drawCDF()
    fig = plt.figure(figsize=(10, 4))
    plt.suptitle(str(distribution))
    pdf_axis = fig.add_subplot(121)
    cdf_axis = fig.add_subplot(122)
    View(pdf_graph, figure=fig, axes=[pdf_axis], add_legend=False)
    View(cdf_graph, figure=fig, axes=[cdf_axis], add_legend=False)
elif dimension == 2:
    distribution.setDescription(['$x_1$', '$x_2$'])
    pdf_graph = distribution.drawPDF()
Exemplo n.º 8
0
    def __init__(
        self,
        threshold=0.0,
        mu1=120.0,
        sigma1=12.0,
        mu2=120.0,
        sigma2=12.0,
        mu3=120.0,
        sigma3=12.0,
        mu4=120.0,
        sigma4=12.0,
        mu5=50.0,
        sigma5=10.0,
        mu6=40.0,
        sigma6=8.0,
    ):
        """
        Creates a reliability problem RP8.

        The event is {g(X) < threshold} where
        X = (x1, x2, x3, x4, x5, x6)
        g(X) = x1 + 2 * x2 + 2 * x3 + x4 - 5 * x5 - 5 * x6
        We have :
                x1 ~ LogNormal(mu1, sigma1)
                x2 ~ LogNormal(mu2, sigma2)
                x3 ~ LogNormal(mu3, sigma3)
                x4 ~ LogNormal(mu4, sigma4)
                x5 ~ LogNormal(mu5, sigma5)
                x6 ~ LogNormal(mu6, sigma6)
        Parameters
        ----------
        threshold : float
            The threshold.
        mu1 : float
            The mean of the X1 LogNormal distribution.
        sigma1 : float
            The standard deviation of the X1 LogNormal distribution.
        mu2 : float
            The mean of the X2 LogNormal distribution.
        sigma2 : float
            The standard deviation of the X2 LogNormal distribution.
        mu3 : float
            The mean of the X3 LogNormal distribution.
        sigma3 : float
            The standard deviation of the X3 LogNormal distribution.
        mu4 : float
            The mean of the X4 LogNormal distribution.
        sigma4 : float
            The standard deviation of the X4 LogNormal distribution.
        mu5 : float
            The mean of the X5 LogNormal distribution.
        sigma5 : float
            The standard deviation of the X5 LogNormal distribution.
        mu6 : float
            The mean of the X6 LogNormal distribution.
        sigma6 : float
            The standard deviation of the X6 LogNormal distribution.
        """

        formula = "x1 + 2 * x2 + 2 * x3 + x4 - 5 * x5 - 5 * x6"

        print(formula)
        limitStateFunction = ot.SymbolicFunction(
            ["x1", "x2", "x3", "x4", "x5", "x6"], [formula]
        )
        X1 = ot.LogNormal(mu1, sigma1, 0.0)
        X1.setDescription(["X1"])
        X2 = ot.LogNormal(mu2, sigma2, 0.0)
        X2.setDescription(["X2"])
        X3 = ot.LogNormal(mu3, sigma3, 0.0)
        X3.setDescription(["X3"])
        X4 = ot.LogNormal(mu4, sigma4, 0.0)
        X4.setDescription(["X4"])
        X5 = ot.LogNormal(mu5, sigma5, 0.0)
        X5.setDescription(["X5"])
        X6 = ot.LogNormal(mu6, sigma6, 0.0)
        X6.setDescription(["X6"])

        myDistribution = ot.ComposedDistribution([X1, X2, X3, X4, X5, X6])
        inputRandomVector = ot.RandomVector(myDistribution)
        outputRandomVector = ot.CompositeRandomVector(
            limitStateFunction, inputRandomVector
        )
        thresholdEvent = ot.ThresholdEvent(outputRandomVector, ot.Less(), threshold)

        name = "RP8"
        probability = 0.000784
        super(ReliabilityProblem8, self).__init__(name, thresholdEvent, probability)
        return None
Exemplo n.º 9
0
        meanPoint[i] = (i + 1) * dim
    distribution = ot.Normal(meanPoint, sigma, R)
    size = 1000
    sample = distribution.getSample(size)
    graph = ot.Graph('Pairs', ' ', ' ', True, 'topright')
    labels = list(['x' + str(i) for i in range(dim)])
    myPairs = ot.Pairs(sample, 'Pairs example', labels, 'green', 'bullet')
    graph.add(myPairs)
    # graph.draw('curve9.png')
    view = View(graph)
    # view.save('curve9.png')
    view.show(block=False)

    # Convergence graph curve
    aCollection = []
    aCollection.append(ot.LogNormal(300., 30., 0., ot.LogNormal.MUSIGMA))
    aCollection.append(ot.Normal(75e3, 5e3))
    myDistribution = ot.ComposedDistribution(aCollection)
    vect = ot.RandomVector(myDistribution)
    LimitState = ot.NumericalMathFunction(('R', 'F'), ('G', ),
                                          ('R-F/(_pi*100.0)', ))
    G = ot.RandomVector(LimitState, vect)
    myEvent = ot.Event(G, ot.Less(), 0.0)
    myAlgo = ot.MonteCarlo(myEvent)
    myAlgo.setMaximumCoefficientOfVariation(0.05)
    myAlgo.setMaximumOuterSampling(int(1e5))
    myAlgo.run()
    graph = myAlgo.drawProbabilityConvergence()
    # graph.draw('curve10.png')
    view = View(graph)
    # view.save('curve10.png')
print('formula_Y0=', model.getFormula('Y0'))
print('stochastic var=', model.getStochasticInputNames())
print('distribution=', model.getDistribution())
print('copula=', model.getCopula())
print('output_YO=', model.getOutputByName('Y0'))
print('input_XO=', model.getInputByName('X0'))
print('inputs names=', model.getInputNames())
print('outputs names=', model.getOutputNames())
print('hasY0', model.hasOutputNamed('Y0'))
print('hasX0', model.hasInputNamed('X0'))

# set attributs values
# in
model.setInputs(inputs)
model.setOutputs(outputs)
model.setDistribution('X1', ot.LogNormal())
model.setFiniteDifferenceStep('X1', 1e-5)
R = ot.CorrelationMatrix(2)
R[0, 1] = 0.25
model.setCopula(['X0', 'X1'], ot.NormalCopula(R))
print('inputs=', model.getInputs())
print('stochastic var=', model.getStochasticInputNames())
print('distribution=', model.getDistribution())
print('copula=', model.getCopula())
# out
model.setFormulas(['sin(X0)+8*X1+0.5'])
print('outputs=', model.getOutputs())

# add variables
# in
X2 = persalys.Input('X2', 10)
Exemplo n.º 11
0
                   MauntzKucherenkoSensitivityAlgorithm, list_sampleSize,
                   ref_sampleSize, ref_nrepetitions)

    # Martinez
    runConvergence(model_ishigami, distribution_ishigami,
                   MartinezSensitivityAlgorithm, list_sampleSize,
                   ref_sampleSize, ref_nrepetitions)

    ################################################################################
    #################                  POUTRE                  #####################
    ################################################################################

    model_poutre = ot.SymbolicFunction(['L', 'b', 'h', 'E', 'F'],
                                       ['F * L^3 / (48 * E * b * h^3 / 12)'])
    model_poutre.setName("poutre")
    L = ot.LogNormal()
    L.setParameter(ot.LogNormalMuSigmaOverMu()([5., .02, 0.]))
    b = ot.LogNormal()
    b.setParameter(ot.LogNormalMuSigmaOverMu()([.2, .05, 0.]))
    h = ot.LogNormal()
    h.setParameter(ot.LogNormalMuSigmaOverMu()([.4, .05, 0.]))
    E = ot.LogNormal()
    E.setParameter(ot.LogNormalMuSigmaOverMu()([3e4, .12, 0.]))
    F = ot.LogNormal()
    F.setParameter(ot.LogNormalMuSigmaOverMu()([.1, .20, 0.]))
    distribution_poutre = ot.ComposedDistribution([L, b, h, E, F])

    # Saltelli
    runConvergence(model_poutre, distribution_poutre,
                   SaltelliSensitivityAlgorithm, list_sampleSize,
                   ref_sampleSize, ref_nrepetitions)
Exemplo n.º 12
0
#!/usr/bin/env python

# -*- coding: utf8 -*-
import openturns as ot

from analytical_functions import gfun_8
from simulation_methods import run_MonteCarlo

# Distributions d'entrée
# ~ dist_X1 = ot.Normal(0., 1.)
# ~ dist_X2 = ot.Normal(0., 1.)
# ~ myDistribution = ot.ComposedDistribution([dist_X1, dist_X2])

dist_X1 = ot.LogNormal(120, 12)
dist_X2 = ot.LogNormal(120, 12)
dist_X3 = ot.LogNormal(120, 12)
dist_X4 = ot.LogNormal(120, 12)
dist_X5 = ot.LogNormal(50, 10)
dist_X6 = ot.LogNormal(40, 8)
myDistribution = ot.ComposedDistribution(
    [dist_X1, dist_X2, dist_X3, dist_X4, dist_X5, dist_X6])
myRandomVector = ot.RandomVector(myDistribution)

# Fonction
# ~ myFunction = ot.PythonFunction(2, 1, gfun_22)
myFunction = ot.PythonFunction(6, 1, gfun_8)

myOutputVector = ot.CompositeRandomVector(myFunction, myRandomVector)

# Evènement fiabiliste
event = ot.Event(myOutputVector, ot.LessOrEqual(), 0.0)
import openturns as ot
import pandas as pd


VAR_CONF_TRAIN = {  # In follinwg comments, conversion_rate is denoted by CR
    "day_of_week": {
        "marg": ot.Uniform(-0.49, 6.49),  # Uniform between 0 and 6
        "corr": 0.01,  # Very weak influence on CR
        "bounds": None,
        "round": 0,
    },
    "price": {
        "marg": ot.LogNormal(5.0, 0.7, 8.0),  # mean is about 50
        "corr": -0.7,  # Negative correlation with CR
        "bounds": [1.0, 500.0],
        "round": 1,
    },
    "ratio_shipping": {
        "marg": ot.Normal(0.2, 0.07),  # mean is 0.3
        "corr": -0.05,  # Negative correlation with CR
        "bounds": [0.05, 0.4],
        "round": 4,
    },
    "shipping_time": {
        "marg": ot.Poisson(3.0),  # mean is 3 days
        "corr": -0.3,  # The longer, the less CR
        "bounds": [1, 14],
        "round": 4,
    },
    "nb_rating": {
        "marg": ot.Geometric(0.02),  # mean is 50
Exemplo n.º 14
0
import openturns as ot
from openturns.viewer import View

dist = ot.LogNormal()
dist.setDescription(['Z'])
sample = dist.getSample(100)
graph = dist.drawPDF()
histogram = ot.VisualTest.DrawHistogram(sample).getDrawable(0)
histogram.setColor('blue')
graph.add(histogram)
View(graph)
Exemplo n.º 15
0
print('atan(dist0):', result)
graph = result.drawPDF()

result = dist1.cosh()
print('cosh(dist1):', result)
graph = result.drawPDF()

result = dist1.sinh()
print('sinh(dist1):', result)
graph = result.drawPDF()

result = dist1.tanh()
print('tanh(dist1):', result)
graph = result.drawPDF()

distG1 = ot.LogNormal(1.0, 1.0, 1.0)
result = distG1.acosh()
print('acosh(distG1):', result)
graph = result.drawPDF()

result = dist1.asinh()
print('asinh(dist1):', result)
graph = result.drawPDF()

result = dist0.atanh()
print('atanh(dist0):', result)
graph = result.drawPDF()

result = dist1.exp()
print('exp(dist1):', result)
graph = result.drawPDF()
Exemplo n.º 16
0
#Lakach
# muLog = 7.43459;sigmaLog = 0.555439;gamma = 4977.04
# marginal1 =ot.LogNormal(muLog, sigmaLog, gamma)
# mu = 0.165352;beta = 0.0193547;
# marginal2 =ot.Logistic(mu, beta)
# theta = -4.2364
# copula = ot.FrankCopula(theta)

#Noruego
mu = 21929.5
sigma = 2251.15
marginal1 = ot.Normal(mu, sigma)
muLog = -1.49583
sigmaLog = 0.134007
gamma = 0.0770454
marginal2 = ot.LogNormal(muLog, sigmaLog, gamma)
theta = -7.83752
copula = ot.FrankCopula(theta)

#BF3
# beta1 = 2458.48;gamma1 = 28953.5
# marginal1= ot.Gumbel(beta1, gamma1)
# beta2 = 0.0489963;gamma2 = 0.156505
# marginal2= ot.Gumbel(beta2, gamma2)
# theta = -5.21511
# copula= ot.FrankCopula(theta)

#bivariate_distribution_data=cond.ot_kernel_copula_fit(Ip_Phit) #Nonparametric variant
bivariate_distribution_data = ot.ComposedDistribution(
    [marginal1, marginal2], copula)  #Parametric variant
marginal_data = [bivariate_distribution_data.getMarginal(i) for i in [0, 1]]
Exemplo n.º 17
0
model_fmu = otfmi.FMUFunction(path_fmu,
                              inputs_fmu=["E", "F", "L", "I"],
                              outputs_fmu="y")

# %%
# We test the function wrapping the deviation model on a point:
import openturns as ot
point = ot.Point([3e7, 2e4, 255, 350])
model_evaluation = model_fmu(point)
print("Running the FMU: deviation = {}".format(model_evaluation))

# %%
# We define probability laws on the 4 uncertain inputs:

E = ot.Beta(0.93, 3.2, 2.8e7, 4.8e7)
F = ot.LogNormal()
F.setParameter(ot.LogNormalMuSigma()([30.e3, 9e3, 15.e3]))
L = ot.Uniform(250.0, 260.0)
I = ot.Beta(2.5, 4.0, 310.0, 450.0)

# %%
# According to the laws of mechanics, when the length L increases, the moment
# of inertia I decreases.
# The variables L and I are thus negatively correlated.
#
# **We assume that the random variables E, F, L and I are dependent and
# associated with a gaussian copula which correlation matrix:**
#
# .. math::
#    \begin{pmatrix}
#    1 & 0 & 0 & 0 \\
Exemplo n.º 18
0
q = case2.computeQuantile(0.95)[0]
print("case 2, q=%.6f" % q)
q = case2.computeQuantile(0.95, True)[0]
print("case 2, q comp=%.6f" % q)
# For ticket 953
atom1 = ot.TruncatedDistribution(ot.Uniform(0.0, 1.0), 0.0, 1.0)
atom2 = ot.Uniform(0.0, 2.0)
sum = atom1 + atom2
print("sum=", sum)
print("CDF=%.6g" % sum.computeCDF(2.0))
print("quantile=", sum.computeQuantile(0.2))
minS = 0.2
maxS = 10.0
muS = (log(minS) + log(maxS)) / 2.0
sigma = (log(maxS) - muS) / 3.0
atom1 = ot.TruncatedDistribution(ot.LogNormal(muS, sigma), minS, maxS)
atom2 = ot.Uniform(0.0, 2.0)
sum = atom1 + atom2
print("sum=", sum)
print("CDF=%.6g" % sum.computeCDF(2.0))
print("quantile=", sum.computeQuantile(0.2))
# For ticket 1129
dist = ot.RandomMixture([ot.Uniform()] * 200)
print("CDF(0)=%.5g" % dist.computeCDF([0]))

# check parameter accessors
dist = ot.Gumbel() + ot.Normal(0, 0.1)
print('before', dist)
p = [1849.41, -133.6, -133.6, 359.172]
dist.setParameter(p)
assert p == dist.getParameter(), "wrong parameters"
Exemplo n.º 19
0
from operator import itemgetter
import openturns.viewer as viewer
from matplotlib import pylab as plt

ot.Log.Show(ot.Log.NONE)

# %%
# borehole model
dimension = 8
input_names = ['rw', 'r', 'Tu', 'Hu', 'Tl', 'Hl', 'L', 'Kw']
model = ot.SymbolicFunction(
    input_names,
    ['(2*pi_*Tu*(Hu-Hl))/(ln(r/rw)*(1+(2*L*Tu)/(ln(r/rw)*rw^2*Kw)+Tu/Tl))'])
coll = [
    ot.Normal(0.1, 0.0161812),
    ot.LogNormal(7.71, 1.0056),
    ot.Uniform(63070.0, 115600.0),
    ot.Uniform(990.0, 1110.0),
    ot.Uniform(63.1, 116.0),
    ot.Uniform(700.0, 820.0),
    ot.Uniform(1120.0, 1680.0),
    ot.Uniform(9855.0, 12045.0)
]
distribution = ot.ComposedDistribution(coll)
distribution.setDescription(input_names)

# %%
# Freeze r, Tu, Tl from model to go faster
selection = [1, 2, 4]
complement = ot.Indices(selection).complement(dimension)
distribution = distribution.getMarginal(complement)
Exemplo n.º 20
0
discreteDistributionCollection = ot.DistributionCollection()
distributionCollection = ot.DistributionCollection()

beta = ot.Beta(2.0, 1.0, 0.0, 1.0)
distributionCollection.add(beta)
continuousDistributionCollection.add(beta)

gamma = ot.Gamma(1.0, 2.0, 3.0)
distributionCollection.add(gamma)
continuousDistributionCollection.add(gamma)

gumbel = ot.Gumbel(1.0, 2.0)
distributionCollection.add(gumbel)
continuousDistributionCollection.add(gumbel)

lognormal = ot.LogNormal(1.0, 1.0, 2.0)
distributionCollection.add(lognormal)
continuousDistributionCollection.add(lognormal)

logistic = ot.Logistic(1.0, 1.0)
distributionCollection.add(logistic)
continuousDistributionCollection.add(logistic)

normal = ot.Normal(1.0, 2.0)
distributionCollection.add(normal)
continuousDistributionCollection.add(normal)

truncatednormal = ot.TruncatedNormal(1.0, 1.0, 0.0, 3.0)
distributionCollection.add(truncatednormal)
continuousDistributionCollection.add(truncatednormal)
Exemplo n.º 21
0
import openturns as ot
from matplotlib import pyplot as plt
from openturns.viewer import View
if ot.LogNormal().__class__.__name__ == 'Bernoulli':
    distribution = ot.Bernoulli(0.7)
elif ot.LogNormal().__class__.__name__ == 'Binomial':
    distribution = ot.Binomial(5, 0.2)
elif ot.LogNormal().__class__.__name__ == 'ComposedDistribution':
    copula = ot.IndependentCopula(2)
    marginals = [ot.Uniform(1.0, 2.0), ot.Normal(2.0, 3.0)]
    distribution = ot.ComposedDistribution(marginals, copula)
elif ot.LogNormal().__class__.__name__ == 'CumulativeDistributionNetwork':
    coll = [ot.Normal(2),ot.Dirichlet([0.5, 1.0, 1.5])]
    distribution = ot.CumulativeDistributionNetwork(coll, ot.BipartiteGraph([[0,1], [0,1]]))
elif ot.LogNormal().__class__.__name__ == 'Histogram':
    distribution = ot.Histogram([-1.0, 0.5, 1.0, 2.0], [0.45, 0.4, 0.15])
elif ot.LogNormal().__class__.__name__ == 'KernelMixture':
    kernel = ot.Uniform()
    sample = ot.Normal().getSample(5)
    bandwith = [1.0]
    distribution = ot.KernelMixture(kernel, bandwith, sample)
elif ot.LogNormal().__class__.__name__ == 'MaximumDistribution':
    coll = [ot.Uniform(2.5, 3.5), ot.LogUniform(1.0, 1.2), ot.Triangular(2.0, 3.0, 4.0)]
    distribution = ot.MaximumDistribution(coll)
elif ot.LogNormal().__class__.__name__ == 'Multinomial':
    distribution = ot.Multinomial(5, [0.2])
elif ot.LogNormal().__class__.__name__ == 'RandomMixture':
    coll = [ot.Triangular(0.0, 1.0, 5.0), ot.Uniform(-2.0, 2.0)]
    weights = [0.8, 0.2]
    cst = 3.0
    distribution = ot.RandomMixture(coll, weights, cst)
Exemplo n.º 22
0
from __future__ import print_function
import openturns as ot
from openturns.viewer import View

# Generate some data
size = 250
sample = ot.LogNormal(0.0, 0.4).getSample(size)

# Estimate the distribution
parametric_estimate = ot.LogNormalFactory().build(sample)
nonparametric_estimate = ot.KernelSmoothing().build(sample)

# Draw a non parametric estimate and the parametric estimate
graph = parametric_estimate.drawPDF(0.0, 4.0)
graph.add(nonparametric_estimate.drawPDF(0.0, 4.0))
graph.add(ot.Cloud(sample, ot.Sample(size, 1)))
graph.setLegendPosition("topright")
graph.setXTitle(r"$x$")
graph.setYTitle(r"$p_X$")
graph.setTitle(r"Parametric vs nonparametric estimation")
graph.setColors(["red", "blue", "green"])
graph.setLegends(["parametric", "nonparametric", "data"])
view = View(graph, (800, 600))
view.save("../plot_distribution_fitting.png")
view.close()