def __init__(self): # Length of the river in meters self.L = 5000.0 # Width of the river in meters self.B = 300.0 self.dim = 4 # number of inputs # Q self.Q = ot.TruncatedDistribution(ot.Gumbel(558., 1013.), 0, ot.TruncatedDistribution.LOWER) self.Q.setDescription(["Q (m3/s)"]) self.Q.setName("Q") # Ks self.Ks = ot.TruncatedDistribution(ot.Normal(30.0, 7.5), 0, ot.TruncatedDistribution.LOWER) self.Ks.setName("Ks") # Zv self.Zv = ot.Uniform(49.0, 51.0) self.Zv.setName("Zv") # Zm self.Zm = ot.Uniform(54.0, 56.0) #Zm.setDescription(["Zm (m)"]) self.Zm.setName("Zm") self.model = ot.SymbolicFunction(['Q', 'Ks', 'Zv', 'Zm'], ['(Q/(Ks*300.*sqrt((Zm-Zv)/5000)))^(3.0/5.0)+Zv-58.5']) self.distribution = ot.ComposedDistribution([self.Q, self.Ks, self.Zv, self.Zm]) self.distribution.setDescription(['Q', 'Ks', 'Zv', 'Zm'])
import sys ot.TESTPREAMBLE() def flooding(X): L, B = 5.0e3, 300.0 Q, K_s, Z_v, Z_m = X alpha = (Z_m - Z_v)/L H = (Q/(K_s*B*m.sqrt(alpha)))**(3.0/5.0) return [H] g = ot.PythonFunction(4, 1, flooding) Q = ot.TruncatedDistribution( ot.Gumbel(558.0, 1013.0), ot.TruncatedDistribution.LOWER) K_s = ot.Dirac(30.0) Z_v = ot.Dirac(50.0) Z_m = ot.Dirac(55.0) inputRandomVector = ot.ComposedDistribution([Q, K_s, Z_v, Z_m]) nbobs = 100 inputSample = inputRandomVector.getSample(nbobs) outputH = g(inputSample) Hobs = outputH + ot.Normal(0.0, 0.1).getSample(nbobs) Qobs = inputSample[:, 0] thetaPrior = [20, 49, 51] model = ot.ParametricFunction(g, [1, 2, 3], thetaPrior) errorCovariance = ot.CovarianceMatrix([[0.5**2]]) sigma = ot.CovarianceMatrix(3) sigma[0, 0] = 5.**2 sigma[1, 1] = 1.**2
#! /usr/bin/env python from __future__ import print_function import openturns as ot distribution = ot.Gumbel(2.0, 2.5) size = 10000 sample = distribution.getSample(size) factory = ot.GumbelFactory() print('Distribution =', repr(distribution)) result = factory.buildEstimator(sample) estimatedDistribution = result.getDistribution() print('Estimated distribution =', repr(estimatedDistribution)) parameterDistribution = result.getParameterDistribution() print('Parameter distribution =', parameterDistribution) defaultDistribution = factory.build() print('Default distribution =', defaultDistribution) fromParameterDistribution = factory.build(distribution.getParameter()) print('Distribution from parameters =', fromParameterDistribution) typedEstimatedDistribution = factory.buildAsGumbel(sample) print('Typed estimated distribution =', typedEstimatedDistribution) defaultTypedDistribution = factory.buildAsGumbel() print('Default typed distribution =', defaultTypedDistribution) typedFromParameterDistribution = factory.buildAsGumbel( distribution.getParameter()) print('Typed distribution from parameters=', typedFromParameterDistribution) result = factory.buildEstimator(sample, ot.GumbelAB()) estimatedDistribution = result.getDistribution() print('Estimated distribution (AB) =', repr(estimatedDistribution)) parameterDistribution = result.getParameterDistribution() print('Parameter distribution (AB) =', parameterDistribution)
# \begin{array}{|ll} # 0 & \mbox{for } y \geq b \mbox{ or } y \leq a\\ # \displaystyle \frac{1}{F_X(b) - F_X(a)}\, p_X(y) & \mbox{for } y\in[a,b] # \end{array} # # Is is also possible to truncate a multivariate distribution. # %% import openturns as ot import openturns.viewer as viewer from matplotlib import pylab as plt ot.Log.Show(ot.Log.NONE) # the original distribution distribution = ot.Gumbel(0.45, 0.6) graph = distribution.drawPDF() view = viewer.View(graph) # %% # truncate on the left truncated = ot.TruncatedDistribution(distribution, 0.2, ot.TruncatedDistribution.LOWER) graph = truncated.drawPDF() view = viewer.View(graph) # %% # truncate on the right truncated = ot.TruncatedDistribution(distribution, 1.5, ot.TruncatedDistribution.UPPER) graph = truncated.drawPDF()
estimatedDistribution = result.getDistribution() print('Estimated distribution =', repr(estimatedDistribution)) parameterDistribution = result.getParameterDistribution() print('Parameter distribution =', parameterDistribution) defaultDistribution = factory.build() print('Default distribution =', defaultDistribution) fromParameterDistribution = factory.build(distribution.getParameter()) print('Distribution from parameters =', fromParameterDistribution) typedEstimatedDistribution = factory.buildAsFrechet(sample) print('Typed estimated distribution =', typedEstimatedDistribution) defaultTypedDistribution = factory.buildAsFrechet() print('Default typed distribution =', defaultTypedDistribution) typedFromParameterDistribution = factory.buildAsFrechet( distribution.getParameter()) print('Typed distribution from parameters=', typedFromParameterDistribution) # More involved test: the sample distribution does not fit the factory # The distributions used : myFrechet = ot.Frechet(1.0, 1.0, 0.0) myGumbel = ot.Gumbel(1.0, 3.0) # We build our mixture sample of size 2*1000=2000. mixtureSample = ot.Sample() sampleFrechet = myFrechet.getSample(1000) sampleGumbel = myGumbel.getSample(1000) mixtureSample.add(sampleFrechet) mixtureSample.add(sampleGumbel) # Build on the mixture sample typedEstimatedFromMixtureSample = factory.buildAsFrechet(mixtureSample) print('Estimated dist from mixture sample=', typedEstimatedFromMixtureSample)
# %% # Create a multivariate model with `ComposedDistribution` # ------------------------------------------------------- # # In this paragraph we use :math:`~openturns.ComposedDistribution` class to # build multidimensional distribution described by its marginal distributions and optionally its dependence structure (a particular copula). # # %% # We first create the marginals of the distribution : # # - a Normal distribution ; # - a Gumbel distribution. # marginals = [ot.Normal(), ot.Gumbel()] # %% # We draw their PDF. We recall that the `drawPDF` command just generates the graph data. It is the viewer module that enables the actual display. graphNormalPDF = marginals[0].drawPDF() graphNormalPDF.setTitle("PDF of the first marginal") graphGumbelPDF = marginals[1].drawPDF() graphGumbelPDF.setTitle("PDF of the second marginal") view = otv.View(graphNormalPDF) view = otv.View(graphGumbelPDF) # %% # The CDF is also available with the `drawCDF` method. # %% # We then have the minimum required to create a bivariate distribution, assuming no dependency structure :
#!/usr/bin/env python # coding: utf-8 from __future__ import print_function import openturns as ot import openturns.testing import persalys myStudy = persalys.Study('myStudy') # Model dist_Q = ot.TruncatedDistribution( ot.Gumbel(1. / 558., 1013.), 0, ot.TruncatedDistribution.LOWER) dist_Ks = ot.TruncatedDistribution( ot.Normal(30.0, 7.5), 0, ot.TruncatedDistribution.LOWER) dist_Zv = ot.Uniform(49.0, 51.0) dist_Zm = ot.Uniform(54.0, 56.0) Q = persalys.Input('Q', 1000., dist_Q, 'Débit maximal annuel (m3/s)') Ks = persalys.Input('Ks', 30., dist_Ks, 'Strickler (m^(1/3)/s)') Zv = persalys.Input('Zv', 50., dist_Zv, 'Côte de la rivière en aval (m)') Zm = persalys.Input('Zm', 55., dist_Zm, 'Côte de la rivière en amont (m)') S = persalys.Output('S', 'Surverse (m)') model = persalys.SymbolicPhysicalModel('myPhysicalModel', [Q, Ks, Zv, Zm], [ S], ['(Q/(Ks*300.*sqrt((Zm-Zv)/5000)))^(3.0/5.0)+Zv-55.5-3.']) myStudy.add(model) # limit state ## limitState = persalys.LimitState('limitState1', model, 'S', ot.Greater(), 0.) myStudy.add(limitState)
import openturns as ot from matplotlib import pyplot as plt from openturns.viewer import View if ot.Gumbel().__class__.__name__ == 'Bernoulli': distribution = ot.Bernoulli(0.7) elif ot.Gumbel().__class__.__name__ == 'Binomial': distribution = ot.Binomial(5, 0.2) elif ot.Gumbel().__class__.__name__ == 'ComposedDistribution': copula = ot.IndependentCopula(2) marginals = [ot.Uniform(1.0, 2.0), ot.Normal(2.0, 3.0)] distribution = ot.ComposedDistribution(marginals, copula) elif ot.Gumbel().__class__.__name__ == 'CumulativeDistributionNetwork': coll = [ot.Normal(2),ot.Dirichlet([0.5, 1.0, 1.5])] distribution = ot.CumulativeDistributionNetwork(coll, ot.BipartiteGraph([[0,1], [0,1]])) elif ot.Gumbel().__class__.__name__ == 'Histogram': distribution = ot.Histogram([-1.0, 0.5, 1.0, 2.0], [0.45, 0.4, 0.15]) elif ot.Gumbel().__class__.__name__ == 'KernelMixture': kernel = ot.Uniform() sample = ot.Normal().getSample(5) bandwith = [1.0] distribution = ot.KernelMixture(kernel, bandwith, sample) elif ot.Gumbel().__class__.__name__ == 'MaximumDistribution': coll = [ot.Uniform(2.5, 3.5), ot.LogUniform(1.0, 1.2), ot.Triangular(2.0, 3.0, 4.0)] distribution = ot.MaximumDistribution(coll) elif ot.Gumbel().__class__.__name__ == 'Multinomial': distribution = ot.Multinomial(5, [0.2]) elif ot.Gumbel().__class__.__name__ == 'RandomMixture': coll = [ot.Triangular(0.0, 1.0, 5.0), ot.Uniform(-2.0, 2.0)] weights = [0.8, 0.2] cst = 3.0 distribution = ot.RandomMixture(coll, weights, cst)
""" # %% # In this example we are going to perform a visual goodness-of-fit test for an 1-d distribution with the QQ plot. # %% from __future__ import print_function import openturns as ot import openturns.viewer as viewer from matplotlib import pylab as plt ot.Log.Show(ot.Log.NONE) # %% # Create data ot.RandomGenerator.SetSeed(0) distribution = ot.Gumbel(0.2, 0.5) sample = distribution.getSample(100) sample.setDescription(['Sample']) # %% # Fit a distribution distribution = ot.GumbelFactory().build(sample) # %% # Draw QQ plot graph = ot.VisualTest.DrawQQplot(sample, distribution) view = viewer.View(graph) # %% # Incorrect proposition graph = ot.VisualTest.DrawQQplot(sample, ot.WeibullMin())
#! /usr/bin/env python import openturns as ot distribution = ot.Gumbel(0.5, 2.5) size = 10000 sample = distribution.getSample(size) factory = ot.GumbelFactory() print('Distribution =', repr(distribution)) result = factory.buildEstimator(sample) estimatedDistribution = result.getDistribution() print('Estimated distribution =', repr(estimatedDistribution)) parameterDistribution = result.getParameterDistribution() print('Parameter distribution =', parameterDistribution) defaultDistribution = factory.build() print('Default distribution =', defaultDistribution) fromParameterDistribution = factory.build(distribution.getParameter()) print('Distribution from parameters =', fromParameterDistribution) typedEstimatedDistribution = factory.buildAsGumbel(sample) print('Typed estimated distribution =', typedEstimatedDistribution) defaultTypedDistribution = factory.buildAsGumbel() print('Default typed distribution =', defaultTypedDistribution) typedFromParameterDistribution = factory.buildAsGumbel( distribution.getParameter()) print('Typed distribution from parameters=', typedFromParameterDistribution) result = factory.buildEstimator(sample, ot.GumbelMuSigma()) estimatedDistribution = result.getDistribution() print('Estimated distribution (mu/sigma) =', repr(estimatedDistribution)) parameterDistribution = result.getParameterDistribution() print('Parameter distribution (mu/sigma) =', parameterDistribution)
# symboilc model ## formula_fake_var = 'x1' formula_y0 = 'cos(0.5*x1) + sin(x2)' formula_y1 = 'cos(0.5*x1) + sin(x2) + x3' symbolicModel = persalys.SymbolicPhysicalModel('symbolicModel', [x1, x2, x3], [fake_var, y0, fake_y0, y1], [ formula_fake_var, formula_y0, formula_y0, formula_y1]) myStudy.add(symbolicModel) # python model ## code = 'from math import cos, sin, sqrt\n\ndef _exec(x1, x2, x3):\n y0 = cos(0.5*x1) + sin(x2) + sqrt(x3)\n return y0\n' pythonModel = persalys.PythonPhysicalModel('pythonModel', [x1, x2, x3], [y0], code) myStudy.add(pythonModel) filename = 'data.csv' cDist = ot.ComposedDistribution([ot.Normal(), ot.Gumbel(), ot.Normal(), ot.Uniform()], ot.ComposedCopula([ot.IndependentCopula(2), ot.GumbelCopula()])) sample = cDist.getSample(200) sample.exportToCSVFile(filename, ' ') # Designs of Experiment ## # fixed design ## ot.RandomGenerator.SetSeed(0) fixedDesign = persalys.FixedDesignOfExperiment('fixedDesign', symbolicModel) inputSample = ot.LHSExperiment(ot.ComposedDistribution([ot.Uniform(0., 10.), ot.Uniform(0., 10.)]), 10).generate() inputSample.stack(ot.Sample(10, [0.5])) fixedDesign.setOriginalInputSample(inputSample) fixedDesign.run() myStudy.add(fixedDesign)
Cons2 = Constraint([12.55, 47.45], dic2) Cons3 = Constraint([49, 51], dic3) Cons4 = Constraint([54, 55], dic4) constraints = Constraints([Cons1, Cons2, Cons3, Cons4]) # ============================================================================= # ========================== INITIAL DISTRIBUTION ============================= # ============================================================================= distribution = [] lower = constraints.Lower() upper = constraints.Upper() # Variable #10 Q distribution.append(ot.Gumbel(0.00524, 626.14)) distribution[0].setParameter(ot.GumbelAB()([1013, 558])) distribution[0] = ot.TruncatedDistribution(distribution[0], float(lower[0]), float(upper[0])) # Variable #22 Ks distribution.append(ot.Normal(30, 7.5)) distribution[1] = ot.TruncatedDistribution(distribution[1], float(lower[1]), float(upper[1])) # Variable #25 Zv distribution.append(ot.Triangular(49, 50, 51)) # Variable #2 Zm distribution.append(ot.Triangular(54, 54.5, 55)) # ============================================================================= # ================================= RUN ======================================= # =============================================================================
def __init__( self, threshold=0.0, a=70.0, b=80.0, mu2=39.0, sigma2=0.1, beta=1342.0, gamma=272.9, mu4=400.0, sigma4=0.1, mu5=250000.0, sigma5=35000.0, ): """ Creates a reliability problem RP14. The event is {g(X) < threshold} where X = (x1, x2, x3, x4, x5) g(X) = x1 + 2 * x2 + 2 * x3 + x4 - 5 * x5 - 5 * x6 We have : x1 ~ Uniform(a, b) x2 ~ Normal(mu2, sigma2) x3 ~ Gumbel-max(mu3, sigma3) x4 ~ Normal(mu4, sigma4) x5 ~ Normal(mu5, sigma5). Parameters ---------- threshold : float The threshold. a, b : parameters of Uniform distribution X1 mu2 : float The mean of the X2 Normal distribution. sigma2 : float The standard deviation of the X2 Normal distribution. beta : float The mean of the X3 Gumbel distribution. gamma : float The standard deviation of the X3 Gumbel distribution. mu4 : float The mean of the X4 Normal distribution. sigma4 : float The standard deviation of the X4 Normal distribution. mu5 : float The mean of the X5 Normal distribution. sigma5 : float The standard deviation of the X5 Normal distribution. """ formula = "x1 - 32 / (pi * x2^3) * sqrt(x3^2 * x4^2 / 10 + x5^2)" print(formula) limitStateFunction = ot.SymbolicFunction( ["x1", "x2", "x3", "x4", "x5"], [formula]) X1 = ot.Uniform(a, b) X1.setDescription(["X1"]) X2 = ot.Normal(mu2, sigma2) X2.setDescription(["X2"]) X3 = ot.Gumbel(beta, gamma) X3.setDescription(["X3"]) X4 = ot.Normal(mu4, sigma4) X4.setDescription(["X4"]) X5 = ot.Normal(mu5, sigma5) X5.setDescription(["X5"]) myDistribution = ot.ComposedDistribution([X1, X2, X3, X4, X5]) inputRandomVector = ot.RandomVector(myDistribution) outputRandomVector = ot.CompositeRandomVector(limitStateFunction, inputRandomVector) thresholdEvent = ot.ThresholdEvent(outputRandomVector, ot.Less(), threshold) name = "RP14" probability = 0.00752 super(ReliabilityProblem14, self).__init__(name, thresholdEvent, probability) return None
y1 = persalys.Output('y1') # model 1 ## formula_fake_var = 'x1+' formula_y0 = 'cos(0.5*x1) + sin(x2)' formula_y1 = 'cos(0.5*x1) + sin(x2) + x3' model1 = persalys.SymbolicPhysicalModel( 'model1', [x1, x2, x3], [fake_var, y0, fake_y0, y1], [formula_fake_var, formula_y0, formula_y0, formula_y1]) myStudy.add(model1) # model 3 ## filename = 'data.csv' cDist = ot.ComposedDistribution( [ot.Normal(), ot.Gumbel(), ot.Normal(), ot.Uniform()], ot.ComposedCopula([ot.IndependentCopula(2), ot.GumbelCopula()])) sample = cDist.getSample(20) sample.exportToCSVFile(filename, ' ') model3 = persalys.DataModel('model3', 'data.csv', [0, 2, 3], [1], ['x_0', 'x_2', 'x_3'], ['x_1']) myStudy.add(model3) # Design of Experiment ## probaDesign = persalys.ProbabilisticDesignOfExperiment('probaDesign', model1, 20, "MONTE_CARLO") probaDesign.run() myStudy.add(probaDesign)
#! /usr/bin/env python from __future__ import print_function import openturns as ot import math as m # ot.Log.Show(ot.Log.ALL) coll = [] # case 1: no transformation coll.append([ot.Normal(), ot.Normal()]) # case 2: same copula left = ot.ComposedDistribution([ot.Normal(), ot.Gumbel()], ot.IndependentCopula(2)) right = ot.ComposedDistribution([ot.Triangular()] * 2, ot.IndependentCopula(2)) coll.append([left, right]) # case 3: same standard space left = ot.ComposedDistribution([ot.Normal(), ot.Gumbel()], ot.IndependentCopula(2)) right = ot.ComposedDistribution([ot.Triangular()] * 2, ot.GumbelCopula()) coll.append([left, right]) # TODO case 4: different standard space for left, right in coll: transformation = ot.DistributionTransformation(left, right) print('left=', left) print('right=', right)
import math as m import sys ot.TESTPREAMBLE() def flooding(X): L, B = 5.0e3, 300.0 Q, K_s, Z_v, Z_m = X alpha = (Z_m - Z_v) / L H = (Q / (K_s * B * m.sqrt(alpha)))**(3.0 / 5.0) return [H] g = ot.PythonFunction(4, 1, flooding) Q = ot.TruncatedDistribution(ot.Gumbel(558.0, 1013.0), ot.TruncatedDistribution.LOWER) K_s = ot.Dirac(30.0) Z_v = ot.Dirac(50.0) Z_m = ot.Dirac(55.0) inputRandomVector = ot.ComposedDistribution([Q, K_s, Z_v, Z_m]) nbobs = 100 inputSample = inputRandomVector.getSample(nbobs) outputH = g(inputSample) Hobs = outputH + ot.Normal(0.0, 0.1).getSample(nbobs) Qobs = inputSample[:, 0] thetaPrior = [20, 49, 51] model = ot.ParametricFunction(g, [1, 2, 3], thetaPrior) errorCovariance = ot.CovarianceMatrix([[0.5**2]]) sigma = ot.CovarianceMatrix(3) sigma[0, 0] = 5.**2
#! /usr/bin/env python import openturns as ot import openturns.testing import persalys import os myStudy = persalys.Study('myStudy') # Model filename = 'data1.csv' ot.RandomGenerator.SetSeed(0) sample = ot.Normal(3).getSample(300) sample.stack(ot.Gumbel().getSample(300)) sample.setDescription(['X0', 'X1', 'X2', 'X3']) sample.exportToCSVFile(filename, ',') columns = [0, 2, 3] model = persalys.DataModel('myDataModel', "data1.csv", columns) myStudy.add(model) print(model) # Inference analysis ## analysis = persalys.InferenceAnalysis('analysis', model) variables = ["X0", "X3"] analysis.setInterestVariables(variables) factories = [ot.NormalFactory(), ot.GumbelFactory()] analysis.setDistributionsFactories("X3", factories) analysis.setLevel(0.1) myStudy.add(analysis) print(analysis)
# 1. The function G def functionCrue(X): L = 5.0e3 B = 300.0 Q, K_s, Z_v, Z_m = X alpha = (Z_m - Z_v) / L H = (Q / (K_s * B * sqrt(alpha)))**(3.0 / 5.0) return [H] # Creation of the problem function f = ot.PythonFunction(4, 1, functionCrue) f.enableHistory() # 2. Random vector definition Q = ot.Gumbel(1. / 558., 1013.) print(Q) ''' Q = ot.Gumbel() Q.setParameter(ot.GumbelAB()([1013., 558.])) print(Q) ''' Q = ot.TruncatedDistribution(Q, 0, inf) unknownKs = 30.0 unknownZv = 50.0 unknownZm = 55.0 K_s = ot.Dirac(unknownKs) Z_v = ot.Dirac(unknownZv) Z_m = ot.Dirac(unknownZm) # 3. View the PDF
def flooding(X): Hd = 3.0 Zb = 55.5 L = 5.0e3 B = 300.0 Zd = Zb + Hd Q, Ks, Zv, Zm = X alpha = (Zm - Zv) / L H = (Q / (Ks * B * alpha**0.5))**0.6 Zc = H + Zv S = Zc - Zd return [S] myFunction = ot.PythonFunction(4, 1, flooding) Q = ot.Gumbel(558.0, 1013.0) Q = ot.TruncatedDistribution(Q, 0.0, ot.SpecFunc.MaxScalar) Ks = ot.Normal(30.0, 7.5) Ks = ot.TruncatedDistribution(Ks, 0.0, ot.SpecFunc.MaxScalar) Zv = ot.Uniform(49.0, 51.0) Zm = ot.Uniform(54.0, 56.0) inputX = ot.ComposedDistribution([Q, Ks, Zv, Zm]) inputX.setDescription(["Q", "Ks", "Zv", "Zm"]) size = 5000 computeSO = True inputDesign = ot.SobolIndicesExperiment(inputX, size, computeSO).generate() outputDesign = myFunction(inputDesign) sensitivityAnalysis = ot.MauntzKucherenkoSensitivityAlgorithm( inputDesign, outputDesign, size)
#! /usr/bin/env python from __future__ import print_function import openturns as ot import math as m # ot.Log.Show(ot.Log.ALL) coll = [] # case 1: no transformation coll.append([ot.Normal(), ot.Normal()]) # case 2: same copula left = ot.ComposedDistribution( [ot.Normal(), ot.Gumbel()], ot.IndependentCopula(2)) right = ot.ComposedDistribution([ot.Triangular()] * 2, ot.IndependentCopula(2)) coll.append([left, right]) # case 3: same standard space left = ot.ComposedDistribution( [ot.Normal(), ot.Gumbel()], ot.IndependentCopula(2)) right = ot.ComposedDistribution([ot.Triangular()] * 2, ot.GumbelCopula()) coll.append([left, right]) # TODO case 4: different standard space for left, right in coll: transformation = ot.DistributionTransformation(left, right) print('left=', left) print('right=', right)
#! /usr/bin/env python from __future__ import print_function import openturns as ot ot.TESTPREAMBLE() ot.PlatformInfo.SetNumericalPrecision(3) size = 10000 distribution = ot.Gumbel(1.5, -0.5) print('distribution=', distribution) sample = distribution.getSample(size) factory = ot.MethodOfMomentsFactory(ot.Gumbel()) inf_distribution = factory.build(sample) print('estimated distribution=', inf_distribution) # set (a,b) out of (r, t, a, b) distribution = ot.Beta(2.3, 2.2, -1.0, 1.0) print('distribution=', distribution) sample = distribution.getSample(size) factory = ot.MethodOfMomentsFactory(ot.Beta()) factory.setKnownParameter([-1.0, 1.0], [2, 3]) inf_distribution = factory.build(sample) print('estimated distribution=', inf_distribution) # with bounds data = [ 0.6852, 0.9349, 0.5884, 1.727, 1.581, 0.3193, -0.5701, 1.623, 2.210, -0.3440, -0.1646 ]
def flood_model(X): L = 5000. # m B = 300. # m Q = X[0] # m^3.s^-1 Ks = X[1] # m^1/3.s^-1 Zv = X[2] # m Zm = X[3] # m Hd = 0. # m Zb = 55.5 # m S = Zv + (Q / (Ks * B * m.sqrt((Zm - Zv) / L)))**(3. / 5) - (Hd + Zb) return [S] function = ot.PythonFunction(dim, 1, flood_model) Q_law = ot.TruncatedDistribution(ot.Gumbel(1.0 / 558.0, 1013.0), 0.0, ot.TruncatedDistribution.LOWER) # alpha=1/b, beta=a | you can use Gumbel(a, b, Gumbel.AB) starting from OT 1.2 Ks_law = ot.TruncatedDistribution(ot.Normal(30.0, 7.5), 0., ot.TruncatedDistribution.LOWER) Zv_law = ot.Triangular(49., 50., 51.) Zm_law = ot.Triangular(54., 55., 56.) coll = ot.DistributionCollection([Q_law, Ks_law, Zv_law, Zm_law]) distribution = ot.ComposedDistribution(coll) x = list(map(lambda dist: dist.computeQuantile(0.5)[0], coll)) fx = function(x) for k in [0.0, 2.0, 5.0, 8.][0:1]: randomVector = ot.RandomVector(distribution) composite = ot.RandomVector(function, randomVector)
L = 5000. # m B = 300. # m Q = X[0] # m^3.s^-1 Ks = X[1] # m^1/3.s^-1 Zv = X[2] # m Zm = X[3] # m Hd = 0. # m Zb = 55.5 # m S = Zv + (Q / (Ks * B * m.sqrt((Zm - Zv) / L)))**(3. / 5) - (Hd + Zb) return [S] function = ot.PythonFunction(dim, 1, flood_model) Q_law = ot.TruncatedDistribution( ot.Gumbel(1. / 558., 1013., ot.Gumbel.ALPHABETA), 0., ot.TruncatedDistribution.LOWER) # alpha=1/b, beta=a | you can use Gumbel(a, b, Gumbel.AB) starting from OT 1.2 Ks_law = ot.TruncatedDistribution(ot.Normal(30.0, 7.5), 0., ot.TruncatedDistribution.LOWER) Zv_law = ot.Triangular(49., 50., 51.) Zm_law = ot.Triangular(54., 55., 56.) coll = ot.DistributionCollection([Q_law, Ks_law, Zv_law, Zm_law]) distribution = ot.ComposedDistribution(coll) x = list(map(lambda dist: dist.computeQuantile(0.5)[0], coll)) fx = function(x) for k in [0.0, 2.0, 5.0, 8.][0:1]: randomVector = ot.RandomVector(distribution) composite = ot.RandomVector(function, randomVector)
q = case2.computeQuantile(0.95, True)[0] print("case 2, q comp=%.6f" % q) # For ticket 953 atom1 = ot.TruncatedDistribution(ot.Uniform(0.0, 1.0), 0.0, 1.0) atom2 = ot.Uniform(0.0, 2.0) sum = atom1 + atom2 print("sum=", sum) print("CDF=%.6g" % sum.computeCDF(2.0)) print("quantile=", sum.computeQuantile(0.2)) minS = 0.2 maxS = 10.0 muS = (log(minS) + log(maxS)) / 2.0 sigma = (log(maxS) - muS) / 3.0 atom1 = ot.TruncatedDistribution(ot.LogNormal(muS, sigma), minS, maxS) atom2 = ot.Uniform(0.0, 2.0) sum = atom1 + atom2 print("sum=", sum) print("CDF=%.6g" % sum.computeCDF(2.0)) print("quantile=", sum.computeQuantile(0.2)) # For ticket 1129 dist = ot.RandomMixture([ot.Uniform()] * 200) print("CDF(0)=%.5g" % dist.computeCDF([0])) # check parameter accessors dist = ot.Gumbel() + ot.Normal(0, 0.1) print('before', dist) p = [1849.41, -133.6, -133.6, 359.172] dist.setParameter(p) assert p == dist.getParameter(), "wrong parameters" print('after ', dist)
import openturns as ot from matplotlib import pyplot as plt from openturns.viewer import View if (ot.Gumbel().__class__.__name__ == 'ComposedDistribution'): correlation = ot.CorrelationMatrix(2) correlation[1, 0] = 0.25 aCopula = ot.NormalCopula(correlation) marginals = [ot.Normal(1.0, 2.0), ot.Normal(2.0, 3.0)] distribution = ot.ComposedDistribution(marginals, aCopula) elif (ot.Gumbel().__class__.__name__ == 'CumulativeDistributionNetwork'): distribution = ot.CumulativeDistributionNetwork( [ot.Normal(2), ot.Dirichlet([0.5, 1.0, 1.5])], ot.BipartiteGraph([[0, 1], [0, 1]])) else: distribution = ot.Gumbel() dimension = distribution.getDimension() if dimension <= 2: if distribution.getDimension() == 1: distribution.setDescription(['$x$']) pdf_graph = distribution.drawPDF() cdf_graph = distribution.drawCDF() fig = plt.figure(figsize=(10, 4)) plt.suptitle(str(distribution)) pdf_axis = fig.add_subplot(121) cdf_axis = fig.add_subplot(122) View(pdf_graph, figure=fig, axes=[pdf_axis], add_legend=False) View(cdf_graph, figure=fig, axes=[cdf_axis], add_legend=False) else: distribution.setDescription(['$x_1$', '$x_2$']) pdf_graph = distribution.drawPDF() fig = plt.figure(figsize=(10, 5))
lag_number, var_model=model, lag_size=lag_size) ##Bivariate input distribution## # #Lakach # muLog = 7.43459;sigmaLog = 0.555439;gamma = 4977.04 # marginal1 =ot.LogNormal(muLog, sigmaLog, gamma) # mu = 0.165352;beta = 0.0193547; # marginal2 =ot.Logistic(mu, beta) # theta = -4.2364 # copula = ot.FrankCopula(theta) #BF3 beta1 = 2458.48 gamma1 = 28953.5 marginal1 = ot.Gumbel(beta1, gamma1) beta2 = 0.0489963 gamma2 = 0.156505 marginal2 = ot.Gumbel(beta2, gamma2) theta = -5.21511 copula = ot.FrankCopula(theta) bivariate_distribution_data = ot.ComposedDistribution([marginal1, marginal2], copula) marginal_data = [bivariate_distribution_data.getMarginal(i) for i in [0, 1]] copula_data = bivariate_distribution_data.getCopula() #Weights# w1 = [1, 1] w = [w0[0] * w1[0], w0[1] * w1[1]] ##Objective function##
# %% # Normality tests # --------------- # # We use two tests to check whether a sample follows a normal distribution : # # - the Anderson-Darling test # - the Cramer-Von Mises test # # %% # We first generate two samples, one from a standard unit gaussian and another from a Gumbel # distribution with parameters :math:`\beta = 1` and :math:`\gamma = 0`. sample1 = ot.Normal().getSample(200) sample2 = ot.Gumbel().getSample(200) # %% # We test the normality of the sample. We can display the result of the test as a yes/no answer with # the `getBinaryQualityMeasure`. We can retrieve the p-value and the threshold with the `getPValue` # and `getThreshold` methods. # %% test_result = ot.NormalityTest.AndersonDarlingNormal(sample1) print('Component is normal?', test_result.getBinaryQualityMeasure(), 'p-value=%.6g' % test_result.getPValue(), 'threshold=%.6g' % test_result.getThreshold()) # %% test_result = ot.NormalityTest.AndersonDarlingNormal(sample2) print('Component is normal?', test_result.getBinaryQualityMeasure(),
ot.TESTPREAMBLE() ot.RandomGenerator.SetSeed(0) continuousDistributionCollection = ot.DistributionCollection() discreteDistributionCollection = ot.DistributionCollection() distributionCollection = ot.DistributionCollection() beta = ot.Beta(2.0, 1.0, 0.0, 1.0) distributionCollection.add(beta) continuousDistributionCollection.add(beta) gamma = ot.Gamma(1.0, 2.0, 3.0) distributionCollection.add(gamma) continuousDistributionCollection.add(gamma) gumbel = ot.Gumbel(1.0, 2.0) distributionCollection.add(gumbel) continuousDistributionCollection.add(gumbel) lognormal = ot.LogNormal(1.0, 1.0, 2.0) distributionCollection.add(lognormal) continuousDistributionCollection.add(lognormal) logistic = ot.Logistic(1.0, 1.0) distributionCollection.add(logistic) continuousDistributionCollection.add(logistic) normal = ot.Normal(1.0, 2.0) distributionCollection.add(normal) continuousDistributionCollection.add(normal)
import openturns as ot from matplotlib import pyplot as plt from openturns.viewer import View if ot.Gumbel().__class__.__name__ == 'ComposedDistribution': correlation = ot.CorrelationMatrix(2) correlation[1, 0] = 0.25 aCopula = ot.NormalCopula(correlation) marginals = [ot.Normal(1.0, 2.0), ot.Normal(2.0, 3.0)] distribution = ot.ComposedDistribution(marginals, aCopula) elif ot.Gumbel().__class__.__name__ == 'CumulativeDistributionNetwork': distribution = ot.CumulativeDistributionNetwork( [ot.Normal(2), ot.Dirichlet([0.5, 1.0, 1.5])], ot.BipartiteGraph([[0, 1], [0, 1]])) elif ot.Gumbel().__class__.__name__ == 'Histogram': distribution = ot.Histogram([-1.0, 0.5, 1.0, 2.0], [0.45, 0.4, 0.15]) else: distribution = ot.Gumbel() dimension = distribution.getDimension() if dimension == 1: distribution.setDescription(['$x$']) pdf_graph = distribution.drawPDF() cdf_graph = distribution.drawCDF() fig = plt.figure(figsize=(10, 4)) plt.suptitle(str(distribution)) pdf_axis = fig.add_subplot(121) cdf_axis = fig.add_subplot(122) View(pdf_graph, figure=fig, axes=[pdf_axis], add_legend=False) View(cdf_graph, figure=fig, axes=[cdf_axis], add_legend=False) elif dimension == 2: distribution.setDescription(['$x_1$', '$x_2$']) pdf_graph = distribution.drawPDF()
#! /usr/bin/env python from __future__ import print_function import openturns as ot import math as m #ot.Log.Show(ot.Log.ALL) coll = [] # case 1: no transformation coll.append([ot.Normal(), ot.Normal()]) # case 2: same copula left = ot.ComposedDistribution([ot.Normal(), ot.Gumbel()], ot.IndependentCopula(2)) right = ot.ComposedDistribution([ot.Triangular()]*2, ot.IndependentCopula(2)) coll.append([left, right]) # case 3: same standard space left = ot.ComposedDistribution([ot.Normal(), ot.Gumbel()], ot.IndependentCopula(2)) right = ot.ComposedDistribution([ot.Triangular()]*2, ot.GumbelCopula()) coll.append([left, right]) #TODO case 4: different standard space for left, right in coll: transformation = ot.DistributionTransformation(left, right) print('left=', left) print('right=', right) print('transformation=', transformation) inverseTransformation = transformation.inverse()