Exemplo n.º 1
0
#!/usr/bin/env python
# coding: utf-8

from __future__ import print_function
import openturns as ot
import persalys

Study_0 = persalys.Study('Study_0')
persalys.Study.Add(Study_0)

# variables
dist_z0 = ot.Uniform(100, 150)
z0 = persalys.Input('z0', 100, dist_z0, '')
dist_v0 = ot.Normal(55, 10)
v0 = persalys.Input('v0', 55, dist_v0, '')
dist_m = ot.Normal(80, 8)
m = persalys.Input('m', 80, dist_m, '')
dist_c = ot.Uniform(0, 30)
c = persalys.Input('c', 16, dist_c, '')
z = persalys.Output('z', '')
z2 = persalys.Output('z2', 'fake output')
inputs = [z0, v0, m, c]
outputs = [z, z2]

# mesh model
meshModel = persalys.GridMeshModel(ot.Interval(0., 12.), [10])

# Python model
code = 'from math import exp\n\ndef _exec(z0,v0,m,c):\n    g = 9.81\n    zmin = 0.\n    tau = m / c\n    vinf = -m * g / c\n\n    # mesh nodes\n    t = getMesh().getVertices()\n\n    z = [max(z0 + vinf * t_i[0] + tau * (v0 - vinf) * (1 - exp(-t_i[0] / tau)), zmin) for t_i in t]\n    z2 = [ t_i[0] for t_i in t]\n\n    return z, z2'
PhysicalModel_1 = persalys.PythonFieldModel('PhysicalModel_1', meshModel,
                                            inputs, outputs, code)
import openturns as ot
import openturns.viewer as viewer
from matplotlib import pylab as plt
ot.Log.Show(ot.Log.NONE)

# %%
# Set the random generator seed
ot.RandomGenerator.SetSeed(0)

# %%
# The standard normal
# -------------------
# The parameters of the standard normal distribution are estimated by a method of moments method.
# Thus the asymptotic parameters distribution is normal and estimated by bootstrap on the initial data.
#
distribution = ot.Normal(0.0, 1.0)
sample = distribution.getSample(50)
estimated = ot.NormalFactory().build(sample)

# %%
# We take a look at the estimated parameters :
print(estimated.getParameter())

# %%
# The `buildEstimator` method gives the asymptotic parameters distribution.
#
fittedRes = ot.NormalFactory().buildEstimator(sample)
paramDist = fittedRes.getParameterDistribution()

# %%
# We draw the 2D-PDF of the parameters
import openturns as ot
from matplotlib import pyplot as plt
from openturns.viewer import View
if ot.MaximumDistribution().__class__.__name__ == 'ComposedDistribution':
    correlation = ot.CorrelationMatrix(2)
    correlation[1, 0] = 0.25
    aCopula = ot.NormalCopula(correlation)
    marginals = [ot.Normal(1.0, 2.0), ot.Normal(2.0, 3.0)]
    distribution = ot.ComposedDistribution(marginals, aCopula)
elif ot.MaximumDistribution().__class__.__name__ == 'CumulativeDistributionNetwork':
    distribution = ot.CumulativeDistributionNetwork([ot.Normal(2),ot.Dirichlet([0.5, 1.0, 1.5])], ot.BipartiteGraph([[0,1], [0,1]]))
elif ot.MaximumDistribution().__class__.__name__ == 'Histogram':
    distribution = ot.Histogram([-1.0, 0.5, 1.0, 2.0], [0.45, 0.4, 0.15])
else:
    distribution = ot.MaximumDistribution()
dimension = distribution.getDimension()
if dimension == 1:
    distribution.setDescription(['$x$'])
    pdf_graph = distribution.drawPDF()
    cdf_graph = distribution.drawCDF()
    fig = plt.figure(figsize=(10, 4))
    plt.suptitle(str(distribution))
    pdf_axis = fig.add_subplot(121)
    cdf_axis = fig.add_subplot(122)
    View(pdf_graph, figure=fig, axes=[pdf_axis], add_legend=False)
    View(cdf_graph, figure=fig, axes=[cdf_axis], add_legend=False)
elif dimension == 2:
    distribution.setDescription(['$x_1$', '$x_2$'])
    pdf_graph = distribution.drawPDF()
    fig = plt.figure(figsize=(10, 5))
    plt.suptitle(str(distribution))
Exemplo n.º 4
0
        super(FUNC, self).__init__(mesh, 2, mesh, 2)
        self.setInputDescription(['R', 'S'])
        self.setOutputDescription(['T', 'U'])

    def _exec(self, X):
        Xs = ot.Sample(X)
        Y = Xs * ([2.0] * Xs.getDimension())
        return Y

F = FUNC()
print('in_dim=' + str(F.getInputDimension())
       + ' out_dim=' + str(F.getOutputDimension())
       + ' spatial_dim=' + str(F.getInputMesh().getDimension()))


X = ot.Field(mesh, ot.Normal(2).getSample(11))
print(F(X.getValues()))

Xsample = ot.ProcessSample(5, X)
print(F(Xsample))


# Instance creation
myFunc = ot.FieldFunction(F)

# Copy constructor
newFunc = ot.FieldFunction(myFunc)

print(('myFunc input dimension= ' + str(myFunc.getInputDimension())))
print(('myFunc output dimension= ' + str(myFunc.getOutputDimension())))
Exemplo n.º 5
0
#! /usr/bin/env python

import openturns as ot

ot.TESTPREAMBLE()
ot.RandomGenerator.SetSeed(0)

try:
    dim = 10
    R = ot.CorrelationMatrix(dim)
    for i in range(dim):
        for j in range(i):
            R[i, j] = (i + j + 1.0) / (2.0 * dim)
    mean = [2.0] * dim
    sigma = [3.0] * dim
    distribution = ot.Normal(mean, sigma, R)

    size = 100
    sample = distribution.getSample(size)
    sampleY = sample.getMarginal(0)

    sampleZ = ot.Sample(size, 1)
    for i in range(size):
        sampleZ[i, 0] = sampleY[i, 0] * sampleY[i, 0]

    discreteSample1 = ot.Poisson(0.1).getSample(size)
    discreteSample2 = ot.Geometric(0.4).getSample(size)

    # ChiSquared Independance test : test if two samples (of sizes not necessarily equal) are independent ?
    # Care : discrete samples only
    # H0 = independent samples
# In this example we are going to estimate the parameters of a parametric by generic numerical optimization of the likelihood.
#
# The likelihood of a sample :math:`(\vect{x}_1, \dots, \vect{x}_n)` according to a parametric density function :math:`p_{\vect{\theta}}` is:
#
# .. math::
#    \ell(\vect{x}_1, \dots, \vect{x}_n,\vect{\theta}) = \prod_{i=1}^n p_{\vect{\theta}}(\vect{x}_i)
#

# %%
import openturns as ot
import math as m
ot.Log.Show(ot.Log.NONE)

# %%
# Create data from a normal PDF with :math:`\mu=4`, :math:`\sigma=1.5`.
sample = ot.Normal(4.0, 1.5).getSample(200)

# %%
# Create the search interval of (:math:`\mu`, :math:`\sigma`) : the constraint is :math:`\sigma>0`.
lowerBound = [-1.0, 1.0e-4]
upperBound = [-1.0, -1.0]
finiteLowerBound = [False, True]
finiteUpperBound = [False, False]
bounds = ot.Interval(lowerBound, upperBound, finiteLowerBound,
                     finiteUpperBound)

# %%
# Create the starting point of the research:
#
# - for :math:`\mu` : the first point,
# - for :math:`\sigma` : a value evaluated from the two first points.
Exemplo n.º 7
0
# If we note :math:`\mathcal{L}_0` a scalar distribution, :math:`f: \mathbb{R} \rightarrow \mathbb{R}` a mapping, then it is possible to create the push-forward distribution :math:`\mathcal{L}` defined by
#
# .. math::
#    \mathcal{L} = f(\mathcal{L}_0)
#

# %%
from __future__ import print_function
import openturns as ot
import openturns.viewer as viewer
from matplotlib import pylab as plt
ot.Log.Show(ot.Log.NONE)

# %%
# create an 1-d distribution
antecedent = ot.Normal()

# %%
# Create an 1-d transformation
f = ot.SymbolicFunction(['x'], ['sin(x)+cos(x)'])

# %%
# Create the composite distribution
distribution = ot.CompositeDistribution(f, antecedent)
graph = distribution.drawPDF()
view = viewer.View(graph)

# %%
# Using the simplified construction
distribution = antecedent.exp()
graph = distribution.drawPDF()
import openturns.viewer as viewer
from matplotlib import pylab as plt
ot.Log.Show(ot.Log.NONE)

# %%
# Draw minimum volume level set in 1D
# -----------------------------------
#
# In this paragraph, we compute the minimum volume level set of a univariate distribution.

# %%
# With a Normal, minimum volume LevelSet
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

# %%
n = ot.Normal()

# %%
graph = n.drawPDF()
view = viewer.View(graph)

# %%
# We want to compute the minimum volume LevelSet which contains `alpha`=90% of the distribution. The `threshold` is the value of the PDF corresponding the `alpha`-probability: the points contained in the LevelSet have a PDF value lower or equal to this threshold.

# %%
alpha = 0.9
levelSet, threshold = n.computeMinimumVolumeLevelSetWithThreshold(alpha)
threshold

# %%
# The `LevelSet` has a `contains` method. Obviously, the point 0 is in the LevelSet.
Exemplo n.º 9
0
        print('no')

    # check that python can find the Viewer module
    # If it fails, check that matplotlib package is installed
    print('2: Viewer (matplotlib)'.ljust(width), end=' ')
    try:
        import openturns.viewer
        print('OK')
    except ImportError:
        print('no')

    # check that OpenTURNS can run R
    # It should produce a file named testDraw.png
    print('3: drawing (R)'.ljust(width), end=' ')
    try:
        graph = ot.Normal().drawPDF()
        fname = 'testDraw.png'
        graph.draw(fname)
        os.remove(fname)
        print('OK')
    except:
        print('no')

    # check XML support
    print('5: serialization (LibXML2)'.ljust(width), end=' ')
    try:
        storageManager = ot.XMLStorageManager('myFile.xml')
        print('OK')
    except:
        print('no')
# We create a Function
myFunc = ot.Function(['x1', 'x2', 'x3', 'x4'], ['y1', 'y2'],
                     ['(x1*x1+x2^3*x1)/(2*x3*x3+x4^4+1)',
                      'cos(x2*x2+x4)/(x1*x1+1+x3^4)'])

# We create a distribution
dim = myFunc.getInputDimension()
meanPoint = [0.2] * dim
sigma = [0.] * dim
for i in range(dim):
    sigma[i] = 0.1 * (i + 1)
R = ot.CorrelationMatrix(dim)
for i in range(1, dim):
    R[i, i - 1] = 0.25

distribution = ot.Normal(meanPoint, sigma, R)

# We create a distribution-based RandomVector
X = ot.RandomVector(distribution)

# We create a composite RandomVector Y from X and myFunction
Y = ot.RandomVector(myFunc, X)

# We create a quadraticCumul algorithm
algo = ot.TaylorExpansionMoments(Y)

# We test the attributes here
print('algo=', algo)

# We compute the several elements provided by the quadratic cumul algorithm
print('First order mean=', repr(algo.getMeanFirstOrder()))
Exemplo n.º 11
0
import openturns as ot
from matplotlib import pyplot as plt
from openturns.viewer import View

mu = [2.0, 2.0]
sigma = [1.0, 1.0]
R = ot.CorrelationMatrix(2)
R[0, 1] = 0.8
myNormal1 = ot.Normal(mu, sigma, R)
R2 = ot.CorrelationMatrix(2)
R2[0, 1] = -0.8
mu2 = [3.0, 3.0]
myNormal2 = ot.Normal(mu2, sigma, R2)

myMixture = ot.Mixture([myNormal1, myNormal2], [0.3, 0.7])
graphPDF = myMixture.drawPDF()
graphPDF.setXTitle('$x_1$')
graphPDF.setYTitle('$x_1$')

graphCDF = myMixture.drawCDF()
graphCDF.setXTitle('$x_1$')
graphCDF.setYTitle('$x_2$')

fig = plt.figure(figsize=(8, 4))
plt.suptitle("Mixture: 0.3*Normal1 + 0.7*Normal2: pdf and cdf")
pdf_axis = fig.add_subplot(121)
cdf_axis = fig.add_subplot(122)
pdf_axis.set_xlim(auto=True)
cdf_axis.set_xlim(auto=True)

View(graphPDF, figure=fig, axes=[pdf_axis], add_legend=True)
Exemplo n.º 12
0
# -*- coding: utf-8 -*-
"""
Faire une étude en tendance centrale sur la formule de 
la flèche d'un tube.
"""

import openturns as ot
from openturns.viewer import View

# 1. Define the function
inputsvars = ["F", "L", "a", "De", "di", "E"]
formula = ["-F*a^2*(L-a)^2/(3*E*L*pi_*(De^4-di^4)/32)"]
g = ot.SymbolicFunction(inputsvars, formula)

# 2. Define the probabilistic model
XF = ot.Normal(1, 0.1)
XL = ot.Normal(1.5, 0.01)
Xa = ot.Uniform(0.7, 1.2)
XD = ot.Triangular(0.75, 0.8, 0.85)
Xd = ot.Triangular(0.09, 0.1, 0.11)
XE = ot.Normal(200000, 2000)
XF.setDescription(["Sollicitation"])
XL.setDescription(["Longueur"])
Xa.setDescription(["Appui"])
XD.setDescription(["Diametre externe"])
Xd.setDescription(["Diametre interne"])
XE.setDescription(["Module d'Young"])

# 3. Create the joint distribution function,
#    the output and the event.
inputDistribution = ot.ComposedDistribution([XF, XL, Xa, XD, Xd, XE])
Exemplo n.º 13
0
#
# - the Spearman test: it tests if there exists a monotonous relation between two scalar samples.
#   If both samples are :math:`\underline{x} = (x_i)_{1 \leq i \leq n}` and :math:`\underline{y}= (y_i)_{1 \leq i \leq n}`,, the Spearman test evaluates the decision variable:
#
# .. math::
#      D = 1-\frac{6\sum_{i=1}^n (r_i - s_i)^2}{n(n^2-1)}
#
# where :math:`r_i = rank(x_i)` and  :math:`s_i = rank(y_i)`. :math:`D` is such that :math:`\sqrt{n-1}D` tends towards the standard normal distribution.
#

# %%
# The continuous case
# ^^^^^^^^^^^^^^^^^^^
#
# We create two different continuous samples :
sample1 = ot.Normal().getSample(100)
sample2 = ot.Normal().getSample(100)

# %%
# We first use the Pearson test and store the result :
resultPearson = ot.HypothesisTest.Pearson(sample1, sample2, 0.10)

# %%
# We can then display the result of the test as a yes/no answer with
# the `getBinaryQualityMeasure`. We can retrieve the p-value and the threshold with the `getPValue`
# and `getThreshold` methods.
print('Component is normal?', resultPearson.getBinaryQualityMeasure(),
      'p-value=%.6g' % resultPearson.getPValue(),
      'threshold=%.6g' % resultPearson.getThreshold())

Exemplo n.º 14
0
    print(ot.FunctionCollection(basis)[i](X))

proxy = ot.DesignProxy(X, phis)
full = range(basisSize)

design = ot.Matrix(proxy.computeDesign(full))
print('design=', design)

methods = [
    ot.SVDMethod(proxy, full),
    ot.CholeskyMethod(proxy, full),
    ot.QRMethod(proxy, full),
    ot.SparseMethod(ot.QRMethod(proxy, full))
]

y = ot.Normal([1.0] * sampleSize, [0.1] * sampleSize,
              ot.CorrelationMatrix(sampleSize)).getRealization()
yAt = design.transpose() * y

for method in methods:
    name = method.__class__.__name__
    print('-- method:', name)

    x = method.solve(y)
    print('solve:', x)

    residual = m.sqrt((design * x - y).normSquare()) / sampleSize
    print('residual: %.6g' % residual)

    try:
        x = method.solveNormal(yAt)
        print('solveNormal:', x)
model = ot.ParametricFunction(g, params, trueParameter)

Theta1 = ot.Dirac(trueParameter[0])
Theta2 = ot.Dirac(trueParameter[1])
Theta3 = ot.Dirac(trueParameter[2])

inputRandomVector = ot.ComposedDistribution([Theta1, Theta2, Theta3])

inputSample = inputRandomVector.getSample(size)
y = g(inputSample)

outputObservationNoiseSigma = 0.05
meanNoise = ot.Point(outputDimension)
covarianceNoise = ot.Point(outputDimension, outputObservationNoiseSigma)
R = ot.IdentityMatrix(outputDimension)
observationOutputNoise = ot.Normal(meanNoise, covarianceNoise, R)

# Add noise
sampleNoise = observationOutputNoise.getSample(size)
y += sampleNoise

candidate = [1.0] * 3
priorCovariance = ot.CovarianceMatrix(3)
for i in range(3):
    priorCovariance[i, i] = 3.0 + (1.0 + i) * (1.0 + i)
    for j in range(i):
        priorCovariance[i, j] = 1.0 / (1.0 + i + j)
errorCovariance = ot.CovarianceMatrix(outputDimension)
for i in range(outputDimension):
    errorCovariance[i, i] = 0.1 * (2.0 + (1.0 + i) * (1.0 + i))
    for j in range(i):
Exemplo n.º 16
0
from __future__ import print_function

import openturns as ot
import otrobopt
full_fun = ot.SymbolicFunction(['x', 'theta'], ['theta*x'])
param_fun = ot.ParametricFunction(full_fun, [1], [1.0])

# Normal distribution associated to parameter theta
thetaDist = ot.Normal(2.0, 0.1)

# Definition of "analytical" mean measure associated to the parametric function
mean_measure = otrobopt.MeanMeasure(param_fun, thetaDist)
variance_measure = otrobopt.VarianceMeasure(param_fun, thetaDist)
# Definition of "discretized" mean measure
N = 10
experiment = ot.LHSExperiment(thetaDist, N)
factory = otrobopt.MeasureFactory(experiment)
discretized_mean_measure_evaluation = factory.build(mean_measure)

coll = [mean_measure, variance_measure]
discretizedColl = factory.buildCollection(coll)
print(coll)

x = [1.0]
print(otrobopt.AggregatedMeasure(coll)(x))
# This is capital J: J(x,xi) = calJ(x+xi), the parametric objective function
JFull = ot.ComposedFunction(calJ, noise)
J = ot.ParametricFunction(JFull, [2, 3], [0.0] * 2)

# This is g, the parametric constraints
gFull = ot.ComposedFunction(calG, noise)
g = ot.ParametricFunction(gFull, [2, 3], [0.0] * 2)

bounds = ot.Interval([-3.0] * 2, [3.0] * 2)
solver = ot.NLopt('LD_SLSQP')
solver.setMaximumIterationNumber(100)

for sigma_xi in [0.1, 0.2, 0.3, 0.4, 0.5]:

    thetaDist = ot.Normal([0.0] * 2, [sigma_xi] * 2, ot.IdentityMatrix(2))
    robustnessMeasure = otrobopt.MeanMeasure(J, thetaDist)
    reliabilityMeasure = otrobopt.JointChanceMeasure(g, thetaDist, ot.Less(),
                                                     0.9)
    problem = otrobopt.RobustOptimizationProblem(robustnessMeasure,
                                                 reliabilityMeasure)
    problem.setBounds(bounds)

    algo = otrobopt.SequentialMonteCarloRobustAlgorithm(problem, solver)
    algo.setMaximumIterationNumber(11)
    algo.setMaximumAbsoluteError(1e-6)
    algo.setInitialSamplingSize(2)  # size of initial xsi discretization
    algo.setSamplingSizeIncrement(ot.PythonFunction(1, 1,
                                                    lambda x: 1.0 * x))  #
    algo.setInitialSearch(
        1000)  # number of multi-start tries, uniform law using bounds
Exemplo n.º 18
0
## we will for both use exponential models with the same base parameters
### The 1D cova model:
model_1D = ot.ExponentialModel([10], [1])

### The 2D cova model:
model_2D = ot.ExponentialModel([1, 1], [1])

##Now finally let's get our two processes and the ditribution.
### The 1D Gaussian process
process_1D = ot.GaussianProcess(model_1D, mesh_1D)

### The 2D Gaussian process
process_2D = ot.GaussianProcess(model_2D, mesh_2D)

### The normal distribution:
scalar_distribution = ot.Normal()

## Now the we have our processes and distributions, let's first evaluate the function
## without any use of a wrapper or anything.
#### First get fields and samples from our processes and distributions
ot.RandomGenerator_SetSeed(888)
field_1D = process_1D.getRealization()
field_2D = process_2D.getRealization()
scalar_0 = [scalar_distribution.getRealization()]

print('For field 1D:\n', field_1D, '\n')
print('For field 2D:\n', field_2D, '\n')
print('For scalar :\n', scalar_0, '\n')
output_dummy_0 = dummyFunction2Wrap(field_2D, field_1D, scalar_0)

print('Output is:\n', output_dummy_0)
Exemplo n.º 19
0
=====================================================
"""
# %%
# In this example we are going to perform a Kolmogorov or a Lilliefors goodness-of-fit test for a 1-d continuous distribution.

# %%
from __future__ import print_function
import openturns as ot

ot.Log.Show(ot.Log.NONE)

# %%
# Create the data.

# %%
distribution = ot.Normal()
sample = distribution.getSample(50)

# %%
# Case 1 : the distribution parameters are known.
# -----------------------------------------------
#
# In the case where the parameters of the distribution are known, we must use the `Kolmogorov` static method and the distribution to be tested.

# %%
result = ot.FittingTest.Kolmogorov(sample, distribution, 0.01)
print('Conclusion=', result.getBinaryQualityMeasure(), 'P-value=',
      result.getPValue())

# %%
# Test succeeded ?
    dim = inPoint.getDimension()
    for i in range(dim):
        if (m.fabs(inPoint[i]) < 1.e-10):
            inPoint[i] = 0.0
    return inPoint


# Instanciate one distribution object
referenceDistribution = [
    ot.TruncatedNormal(2.0, 1.5, 1.0, 4.0),
    ot.TruncatedNormal(2.0, 1.5, 1.0, 200.0),
    ot.TruncatedNormal(2.0, 1.5, -200.0, 4.0),
    ot.TruncatedNormal(2.0, 1.5, 1.0, 4.0)
]
distribution = [
    ot.TruncatedDistribution(ot.Normal(2.0, 1.5), 1.0, 4.0),
    ot.TruncatedDistribution(ot.Normal(2.0, 1.5), 1.0,
                             ot.TruncatedDistribution.LOWER),
    ot.TruncatedDistribution(ot.Normal(2.0, 1.5), 4.0,
                             ot.TruncatedDistribution.UPPER),
    ot.TruncatedDistribution(ot.Normal(2.0, 1.5),
                             ot.Interval([1.0], [4.0], [True], [True]))
]

# add a 2-d test
dimension = 2
size = 70
ref = ot.Normal([2.0] * dimension, ot.CovarianceMatrix(2))
sample = ref.getSample(size)
ks = ot.KernelSmoothing().build(sample)
truncatedKS = ot.TruncatedDistribution(
Exemplo n.º 21
0
dim = 2

# problem
model = ot.SymbolicFunction(['x', 'y'],
                    ['3*(1-x)^2*exp(-x^2-(y+1)^2)-10*(x/5-x^3-y^5)*exp(-x^2-y^2)-exp(-(x+1)^2-y^2)/3'])
bounds = ot.Interval([-3.0] * dim, [3.0] * dim)
problem = ot.OptimizationProblem(model)
problem.setBounds(bounds)

# solver
solver = ot.TNC(problem)

# run locally
solver.setStartingPoint([0.0] * dim)
algo = solver
algo.run()
result = algo.getResult()
print('-- local search x*=', result.getOptimalPoint(),
      'f(x*)=', result.getOptimalValue())

# multistart
distribution = ot.Normal([0.0] * dim, [2.0] * dim, ot.CorrelationMatrix(dim))
experiment = ot.LHSExperiment(distribution, 20)
startingPoints = experiment.generate()
algo = ot.MultiStart(solver, startingPoints)
print('-- algo=', algo)
algo.run()
result = algo.getResult()
print('-- multistart x*=', result.getOptimalPoint(),
      'f(x*)=', result.getOptimalValue())
Exemplo n.º 22
0
#!/usr/bin/env python

from __future__ import print_function
import openturns as ot

ot.TESTPREAMBLE()

# create a function
dim = 4
function = ot.SymbolicFunction(['E', 'F', 'L', 'I'], ['F*L^3/(3.*E*I)'])

# create a distribution
distribution = ot.Normal([50., 1.0, 10.0, 5.0], [1.0] * dim,
                         ot.IdentityMatrix(dim))
vect = ot.RandomVector(distribution)
composite = ot.CompositeRandomVector(function, vect)
event = ot.ThresholdEvent(composite, ot.Less(), -3.0)

# create an ADS algorithm
n = int(1e4)
algo = ot.AdaptiveDirectionalStratification(event)
algo.setMaximumOuterSampling(n)
algo.setGamma([0.6, 0.4])

algo.run()
result = algo.getResult()
print(result)

# ADS-2+
algo2 = algo
algo2.setPartialStratification(True)
# %%
from __future__ import print_function
import openturns as ot
import math as m
ot.Log.Show(ot.Log.NONE)
ot.RandomGenerator.SetSeed(0)

# %%
# generate some multivariate data to estimate, with correlation
cop1 = ot.AliMikhailHaqCopula(0.6)
cop2 = ot.ClaytonCopula(2.5)
copula = ot.ComposedCopula([cop1, cop2])
marginals = [
    ot.Uniform(5.0, 6.0),
    ot.Arcsine(),
    ot.Normal(-40.0, 3.0),
    ot.Triangular(100.0, 150.0, 300.0)
]
distribution = ot.ComposedDistribution(marginals, copula)
sample = distribution.getSample(10000).getMarginal([0, 2, 3, 1])

# %%
# estimate marginals
dimension = sample.getDimension()
marginalFactories = []
for factory in ot.DistributionFactory.GetContinuousUniVariateFactories():
    if str(factory).startswith('Histogram'):
        # ~ non-parametric
        continue
    marginalFactories.append(factory)
estimated_marginals = [
Exemplo n.º 24
0
mesh = mesher.build(interval)
graph = mesh.draw()
graph.setTitle('Regular 2-d mesh')
view = viewer.View(graph)

# %%
# Create a field as a realization of a process
amplitude = [1.0]
scale = [0.2] * 2
myCovModel = ot.ExponentialModel(scale, amplitude)
myProcess = ot.GaussianProcess(myCovModel, mesh)
field = myProcess.getRealization()

# %%
# Create a field from a mesh and some values
values = ot.Normal([0.0] * 2, [1.0] * 2,
                   ot.CorrelationMatrix(2)).getSample(len(mesh.getVertices()))
for i in range(len(values)):
    x = values[i]
    values[i] = 0.05 * x / x.norm()
field = ot.Field(mesh, values)
graph = field.draw()
graph.setTitle('Field on 2-d mesh and 2-d values')
view = viewer.View(graph)

# %%
# Compute the input mean of the field
field.getInputMean()

# %%
# Draw the field without interpolation
graph = field.drawMarginal(0, False)
import openturns as ot
from matplotlib import pyplot as plt
from openturns.viewer import View
if ot.Normal().__class__.__name__ == 'Bernoulli':
    distribution = ot.Bernoulli(0.7)
elif ot.Normal().__class__.__name__ == 'Binomial':
    distribution = ot.Binomial(5, 0.2)
elif ot.Normal().__class__.__name__ == 'ComposedDistribution':
    copula = ot.IndependentCopula(2)
    marginals = [ot.Uniform(1.0, 2.0), ot.Normal(2.0, 3.0)]
    distribution = ot.ComposedDistribution(marginals, copula)
elif ot.Normal().__class__.__name__ == 'CumulativeDistributionNetwork':
    coll = [ot.Normal(2), ot.Dirichlet([0.5, 1.0, 1.5])]
    distribution = ot.CumulativeDistributionNetwork(
        coll, ot.BipartiteGraph([[0, 1], [0, 1]]))
elif ot.Normal().__class__.__name__ == 'Histogram':
    distribution = ot.Histogram([-1.0, 0.5, 1.0, 2.0], [0.45, 0.4, 0.15])
elif ot.Normal().__class__.__name__ == 'KernelMixture':
    kernel = ot.Uniform()
    sample = ot.Normal().getSample(5)
    bandwith = [1.0]
    distribution = ot.KernelMixture(kernel, bandwith, sample)
elif ot.Normal().__class__.__name__ == 'MaximumDistribution':
    coll = [
        ot.Uniform(2.5, 3.5),
        ot.LogUniform(1.0, 1.2),
        ot.Triangular(2.0, 3.0, 4.0)
    ]
    distribution = ot.MaximumDistribution(coll)
elif ot.Normal().__class__.__name__ == 'Multinomial':
    distribution = ot.Multinomial(5, [0.2])
Exemplo n.º 26
0
import openturns as ot
from openturns.viewer import View

ot.RandomGenerator.SetSeed(0)
distribution = ot.Normal(3.0, 3.0)
distribution2 = ot.Normal(2.0, 1.0)
sample = distribution.getSample(150)
graph = ot.VisualTest.DrawQQplot(sample, distribution2)
View(graph)
Exemplo n.º 27
0
import openturns as ot
from matplotlib import pyplot as plt
from openturns.viewer import View
if (ot.Skellam().__class__.__name__ == 'ComposedDistribution'):
    correlation = ot.CorrelationMatrix(2)
    correlation[1, 0] = 0.25
    aCopula = ot.NormalCopula(correlation)
    marginals = [ot.Normal(1.0, 2.0), ot.Normal(2.0, 3.0)]
    distribution = ot.ComposedDistribution(marginals, aCopula)
elif (ot.Skellam().__class__.__name__ == 'CumulativeDistributionNetwork'):
    distribution = ot.CumulativeDistributionNetwork(
        [ot.Normal(2), ot.Dirichlet([0.5, 1.0, 1.5])],
        ot.BipartiteGraph([[0, 1], [0, 1]]))
elif (ot.Skellam().__class__.__name__ == 'Histogram'):
    distribution = ot.Histogram([-1.0, 0.5, 1.0, 2.0], [0.45, 0.4, 0.15])
else:
    distribution = ot.Skellam()
dimension = distribution.getDimension()
if dimension <= 2:
    if distribution.getDimension() == 1:
        distribution.setDescription(['$x$'])
        pdf_graph = distribution.drawPDF()
        cdf_graph = distribution.drawCDF()
        fig = plt.figure(figsize=(10, 4))
        plt.suptitle(str(distribution))
        pdf_axis = fig.add_subplot(121)
        cdf_axis = fig.add_subplot(122)
        View(pdf_graph, figure=fig, axes=[pdf_axis], add_legend=False)
        View(cdf_graph, figure=fig, axes=[cdf_axis], add_legend=False)
    else:
        distribution.setDescription(['$x_1$', '$x_2$'])
Exemplo n.º 28
0
import openturns as ot
from matplotlib import pyplot as plt
from openturns.viewer import View
if ot.InverseChiSquare().__class__.__name__ == 'Bernoulli':
    distribution = ot.Bernoulli(0.7)
elif ot.InverseChiSquare().__class__.__name__ == 'Binomial':
    distribution = ot.Binomial(5, 0.2)
elif ot.InverseChiSquare().__class__.__name__ == 'ComposedDistribution':
    copula = ot.IndependentCopula(2)
    marginals = [ot.Uniform(1.0, 2.0), ot.Normal(2.0, 3.0)]
    distribution = ot.ComposedDistribution(marginals, copula)
elif ot.InverseChiSquare(
).__class__.__name__ == 'CumulativeDistributionNetwork':
    coll = [ot.Normal(2), ot.Dirichlet([0.5, 1.0, 1.5])]
    distribution = ot.CumulativeDistributionNetwork(
        coll, ot.BipartiteGraph([[0, 1], [0, 1]]))
elif ot.InverseChiSquare().__class__.__name__ == 'Histogram':
    distribution = ot.Histogram([-1.0, 0.5, 1.0, 2.0], [0.45, 0.4, 0.15])
elif ot.InverseChiSquare().__class__.__name__ == 'KernelMixture':
    kernel = ot.Uniform()
    sample = ot.Normal().getSample(5)
    bandwith = [1.0]
    distribution = ot.KernelMixture(kernel, bandwith, sample)
elif ot.InverseChiSquare().__class__.__name__ == 'MaximumDistribution':
    coll = [
        ot.Uniform(2.5, 3.5),
        ot.LogUniform(1.0, 1.2),
        ot.Triangular(2.0, 3.0, 4.0)
    ]
    distribution = ot.MaximumDistribution(coll)
elif ot.InverseChiSquare().__class__.__name__ == 'Multinomial':
Exemplo n.º 29
0
formulas = [
    'min(0.1 * (u1 - u2)^2.0 - (u1 + u2) / sqrt(2.0) + 3.0, 0.1 * (u1 - u2)^2.0 + (u1 + u2) / sqrt(2.0) + 3.0, u1 - u2 + 3.5 * sqrt(2.0), -u1 + u2 + 3.5 * sqrt(2.0))'
]
limitState = ot.SymbolicFunction(['u1', 'u2'], formulas)
dim = limitState.getInputDimension()

#
# Probabilistic model
#

mean = ot.Point(dim, 0.0)

sigma = ot.Point(dim, 1.0)

R = ot.IdentityMatrix(dim)
myDistribution = ot.Normal(mean, sigma, R)

#
# Limit state
#

vect = ot.RandomVector(myDistribution)

output = ot.CompositeRandomVector(limitState, vect)

myEvent = ot.ThresholdEvent(output, ot.Less(), 0.0)

#
# Computation
#
bs = 1
import openturns as ot
from openturns.viewer import View

N = 5
ot.RandomGenerator.SetSeed(0)
x = ot.Uniform(0.0, 8.0).getSample(N)
f = ot.SymbolicFunction(['x'], ['15*x+1'])
y = f(x) + ot.Normal(0.0, 20.0).getSample(N)
graph = f.draw(0.0, 8.0)
graph.setTitle('Non significant Pearson coefficient')
graph.setXTitle('u')
graph.setYTitle('v')
cloud = ot.Cloud(x, y)
cloud.setPointStyle('circle')
cloud.setColor('orange')
graph.add(cloud)
View(graph)