Esempio n. 1
0
import openturns.testing as ott

ot.TESTPREAMBLE()
ot.PlatformInfo.SetNumericalPrecision(5)

ot.RandomGenerator.SetSeed(0)


# This model is linear in (a, b, c) and identifiable.
# Derived from y = a + b * x + c * x^2 at x=[-1.0, -0.6, -0.2, 0.2, 0.6, 1.0]
g = ot.SymbolicFunction(
    ["a", "b", "c"],
    [
        "a +  -1.0  * b +  1.0  * c",
        "a +  -0.6  * b +  0.36  * c",
        "a +  -0.2  * b +  0.04  * c",
        "a +  0.2  * b +  0.04  * c",
        "a +  0.6  * b +  0.36  * c",
        "a +  1.0  * b +  1.0  * c",
    ],
)
inputDimension = g.getInputDimension()
outputDimension = g.getOutputDimension()

trueParameter = ot.Point([12.0, 7.0, -8])

parameterDimension = trueParameter.getDimension()

Theta1 = ot.Dirac(trueParameter[0])
Theta2 = ot.Dirac(trueParameter[1])
Theta3 = ot.Dirac(trueParameter[2])
        # Default method: log-likelihood
        m = ot.BernsteinCopulaFactory.ComputeLogLikelihoodBinNumber(sample)
        print("Log-likelihood m=", m)
        est_copula = ot.BernsteinCopulaFactory().build(sample, m)
        max_error = compute_max_error(ref_copula, est_copula)
        print("Max. error=%.5f" % max_error)
        check_bernstein_copula(est_copula)
        # AMISE method
        m = ot.BernsteinCopulaFactory.ComputeAMISEBinNumber(sample)
        print("AMISE m=", m)
        est_copula = ot.BernsteinCopulaFactory().build(sample, m)
        max_error = compute_max_error(ref_copula, est_copula)
        print("Max. error=%.5f" % max_error)
        check_bernstein_copula(est_copula)
        # Penalized Csiszar divergence method
        f = ot.SymbolicFunction("t", "-log(t)")
        m = ot.BernsteinCopulaFactory.ComputePenalizedCsiszarDivergenceBinNumber(
            sample, f)
        print("Penalized Csiszar divergence m=", m)
        est_copula = ot.BernsteinCopulaFactory().build(sample, m)
        max_error = compute_max_error(ref_copula, est_copula)
        print("Max. error=%.5f" % max_error)
        check_bernstein_copula(est_copula)
        print("")

except:
    import sys
    print("t_BernsteinCopulaFactory_std.py",
          sys.exc_info()[0],
          sys.exc_info()[1])
Esempio n. 3
0
#! /usr/bin/env python

import openturns as ot
import openturns.testing as ott

ot.TESTPREAMBLE()
# ot.Log.Show(ot.Log.ALL)

dim = 2

# problem
model = ot.SymbolicFunction(['x', 'y'], [
    '3*(1-x)^2*exp(-x^2-(y+1)^2)-10*(x/5-x^3-y^5)*exp(-x^2-y^2)-exp(-(x+1)^2-y^2)/3'
])
bounds = ot.Interval([-3.0] * dim, [3.0] * dim)
problem = ot.OptimizationProblem(model)
problem.setBounds(bounds)

# solver
solver = ot.TNC(problem)

# run locally
solver.setStartingPoint([0.0] * dim)
algo = solver
algo.run()
result = algo.getResult()
local_optimal_point = [0.296446, 0.320196]
local_optimal_value = [-0.0649359]
ott.assert_almost_equal(result.getOptimalPoint(), local_optimal_point, 1e-5,
                        0.0)
ott.assert_almost_equal(result.getOptimalValue(), local_optimal_value, 1e-5,
Esempio n. 4
0
#! /usr/bin/env python

from __future__ import print_function
import openturns as ot

ot.TESTPREAMBLE()

# First, build two functions from R^3->R
inVar = ['x1', 'x2', 'x3']
formula = ['x1^3 * sin(x2 + 2.5 * x3) - (x1 + x2)^2 / (1.0 + x3^2)']
functions = []
functions.append(ot.SymbolicFunction(inVar, formula))
formula = ['exp(-x1 * x2 + x3) / cos(1.0 + x2 * x3 - x1)']
functions.append(ot.SymbolicFunction(inVar, formula))
# Second, build the weights
coefficients = [0.3, 2.9]
# Third, build the function
myFunction = ot.LinearCombinationFunction(functions, coefficients)
inPoint = ot.NumericalPoint([1.2, 2.3, 3.4])
print('myFunction=', myFunction)
print('Value at ', inPoint, '=', myFunction(inPoint))
print('Gradient at ', inPoint, '=', myFunction.gradient(inPoint))
print('Hessian at ', inPoint, '=', myFunction.hessian(inPoint))
Esempio n. 5
0
print("Morris experiment generated from grid = ", sample1)

print("Use Case #2 : generate trajectories from initial lhs design")
size = 20
# Generate an LHS design
dist = ot.ComposedDistribution(2 * [ot.Uniform(0, 1)])
experiment = ot.LHSExperiment(dist, size, True, False)
lhsDesign = experiment.generate()
print("Initial LHS design = ", lhsDesign)
# Generate designs
morris_experiment_lhs = otmorris.MorrisExperimentLHS(lhsDesign, r)
lhs_bound = morris_experiment_lhs.getBounds()
sample2 = morris_experiment.generate()
print("Morris experiment generated from LHS = ", sample2)

# Define model
model = ot.SymbolicFunction(["x", "y"], ["cos(x)*y + sin(y)*x + x*y -0.1"])

# Define Morris method with two designs
morrisEE1 = otmorris.Morris(sample1, model(sample1), grid_bound)
morrisEE2 = otmorris.Morris(sample2, model(sample2), lhs_bound)
print("Using level grid, E(|EE|)  = ",
      morrisEE1.getMeanAbsoluteElementaryEffects())
print("                  V(|EE|)^{1/2} = ",
      morrisEE1.getStandardDeviationElementaryEffects())

print("Using initial LHS, E(|EE|)  = ",
      morrisEE2.getMeanAbsoluteElementaryEffects())
print("                   V(|EE|)^{1/2} = ",
      morrisEE2.getStandardDeviationElementaryEffects())
#!/usr/bin/env python

from __future__ import print_function
import openturns as ot
import openturns.testing as ott
import math as m


f = ot.SymbolicFunction(
    ['tau', 'alpha'], ['cos(4*tau)*cosh((tau-pi_)/alpha)/cosh(pi_/alpha)'])
alpha = 1.36
rho = ot.ParametricFunction(f, [1], [alpha])

cov = ot.StationaryFunctionalCovarianceModel([1.0], [1.0], rho)
print(cov)

tau = 0.1
c = cov([tau])[0, 0]
print("tau=", tau)
print("c=", c)
c_ref = m.cos(4*tau)*m.cosh((tau-m.pi)/alpha)/m.cosh(m.pi/alpha)
ott.assert_almost_equal(c, c_ref)

assert len(cov.getFullParameter()) == 3, "wrong full parameter"
assert len(cov.getFullParameterDescription()
           ) == 3, "wrong full parameter description"

print(cov.getFullParameter())
print(cov.getFullParameterDescription())

assert len(cov.getActiveParameter()) == 2, "wrong active parameter"
Esempio n. 7
0
sample = distribution.getSample(size)
sampleX = sample.getMarginal(range(1, dim))
sampleY = sample.getMarginal(0)

sampleZ = ot.Sample(size, 1)
for i in range(size):
    sampleZ[i, 0] = sampleY[i, 0]**2
print("LinearModelFisher pvalue=%1.2g" %
      ot.LinearModelTest.LinearModelFisher(sampleY, sampleZ).getPValue())
print("LinearModelResidualMean pvalue=%1.2g" %
      ot.LinearModelTest.LinearModelResidualMean(sampleY, sampleZ).getPValue())

# Durbin Watson
ot.RandomGenerator.SetSeed(5415)
eps = ot.Normal(0, 20)
f = ot.SymbolicFunction('x', '5+2*x+x^2-0.1*x^3')
N = 15
x = ot.Sample([[0], [1.42857], [2.85714], [4.28571], [5.71429], [7.14286],
               [8.57143], [10], [11.4286], [12.8571], [14.2857], [15.7143],
               [17.1429], [18.5714], [20]])
y = f(x) + eps.getSample(N)
linmodel = ot.LinearModelAlgorithm(x, y).getResult().getCoefficients()
dwTest = ot.LinearModelTest.LinearModelDurbinWatson(x, y)
print('Durbin Watson = ', dwTest)

selection = ot.Indices(5)
selection.fill()

selection2 = ot.Indices(1, 0)
sampleX0 = sampleX.getMarginal(0)
#! /usr/bin/env python

from __future__ import print_function
import openturns as ot

ot.TESTPREAMBLE()

try:

    # Set Numerical precision to 4
    ot.PlatformInfo.SetNumericalPrecision(4)
    sampleSize = 40
    spatialDimension = 1

    # Create the function to estimate
    model = ot.SymbolicFunction(["x0"], ["x0"])

    X = ot.Sample(sampleSize, spatialDimension)
    for i in range(sampleSize):
        X[i, 0] = 3.0 + (8.0 * i) / sampleSize
    Y = model(X)

    # Add a small noise to data
    Y += ot.GaussianProcess(ot.AbsoluteExponential([0.1], [0.2]),
                            ot.Mesh(X)).getRealization().getValues()

    basis = ot.LinearBasisFactory(spatialDimension).build()
    # Case of a misspecified covariance model
    covarianceModel = ot.DiracCovarianceModel(spatialDimension)
    print("===================================================\n")
    algo = ot.GeneralLinearModelAlgorithm(X, Y, covarianceModel, basis)
Esempio n. 9
0
#
# Let us note that the post FORM/SORM importance sampling method may be implemented thanks to the ImportanceSampling object, where the importance distribution is defined in the standard space: then, it requires that the event initially defined in the pysical space be transformed in the standard space.
#
# The controlled importance sampling technique is only accessible within the post analytical context.

# %%
from __future__ import print_function
import openturns as ot
import openturns.viewer as viewer
from matplotlib import pylab as plt
import math as m
ot.Log.Show(ot.Log.NONE)

# %%
# Create a model
model = ot.SymbolicFunction(['x1', 'x2'], ['x1^2+x2'])
R = ot.CorrelationMatrix(2)
R[0, 1] = -0.6
inputDist = ot.Normal([0., 0.], R)
inputDist.setDescription(['X1', 'X2'])
inputVector = ot.RandomVector(inputDist)

# Create the output random vector Y=model(X)
Y = ot.CompositeRandomVector(model, inputVector)

# Create the event Y > 4
threshold = 4.0
event = ot.ThresholdEvent(Y, ot.Greater(), threshold)

# %%
# Create a FORM algorithm
Esempio n. 10
0
import openturns as ot
from matplotlib import pyplot as plt
from openturns.viewer import View

# Create the mesher
mesher = ot.LevelSetMesher([50] * 2)

# Create a level set
function = ot.SymbolicFunction(['x0', 'x1'], ['10*(x0^3+x1)^2+x0^2'])
level = 0.5
set = ot.LevelSet(function, level)

# Mesh the level set
mesh = mesher.build(set, ot.Interval([-1.0] * 2, [1.0] * 2))

# Draw the first mesh
graph = mesh.draw()
graph.setXTitle('$x_0$')
graph.setYTitle('$x_1$')

fig = plt.figure(figsize=(10, 4))
plt.suptitle('Mesh of a level set')
graph_axis = fig.add_subplot(111)
graph_axis.set_xlim(auto=True)

View(graph, figure=fig, axes=[graph_axis], add_legend=True)
Esempio n. 11
0
from math import cos, sin

ot.TESTPREAMBLE()

ot.PlatformInfo.SetNumericalPrecision(3)
# Kriging use case
inputDimension = 2

# Learning data
levels = [8., 5.]
box = ot.Box(levels)
inputSample = box.generate()
# Scale each direction
inputSample *= 10

model = ot.SymbolicFunction(['x', 'y'], ['cos(0.5*x) + sin(y)'])
outputSample = model(inputSample)

# Validation data
sampleSize = 10
inputValidSample = ot.ComposedDistribution(
    2 * [ot.Uniform(0, 10.0)]).getSample(sampleSize)
outputValidSample = model(inputValidSample)

# 2) Definition of exponential model
# The parameters have been calibrated using TNC optimization
# and AbsoluteExponential models
covarianceModel = ot.SquaredExponential([7.63, 2.11], [7.38])

# 3) Basis definition
basis = ot.ConstantBasisFactory(inputDimension).build()
# Second order model with parameters
myCovModel = ot.ExponentialModel(scale, amplitude)
print("myCovModel = ", myCovModel)

myProcess1 = ot.GaussianProcess(myCovModel, myTimeGrid)
print("myProcess1 = ", myProcess1)
print("is stationary? ", myProcess1.isStationary())
myProcess1.setSamplingMethod(ot.GaussianProcess.CHOLESKY)
print("mean over ", size, " realizations = ",
      myProcess1.getSample(size).computeMean())
myProcess1.setSamplingMethod(ot.GaussianProcess.GIBBS)
print("mean over ", size, " realizations = ",
      myProcess1.getSample(size).computeMean())

# With constant trend
trend = ot.TrendTransform(ot.SymbolicFunction("t", "4.0"), myTimeGrid)
myProcess2 = ot.GaussianProcess(trend, myCovModel, myTimeGrid)
myProcess2.setSamplingMethod(ot.GaussianProcess.GIBBS)
print("myProcess2 = ", myProcess2)
print("is stationary? ", myProcess2.isStationary())
print("mean over ", size, " realizations= ",
      myProcess2.getSample(size).computeMean())

# With varying trend
trend3 = ot.TrendTransform(ot.SymbolicFunction("t", "sin(t)"), myTimeGrid)
myProcess3 = ot.GaussianProcess(trend3, myCovModel, myTimeGrid)
print("myProcess3 = ", myProcess3)
print("is stationary? ", myProcess3.isStationary())
myProcess3.setSamplingMethod(ot.GaussianProcess.CHOLESKY)
print("mean over ", size, " realizations = ",
      myProcess3.getSample(size).computeMean())
# List available algorithms
for algo in ot.Dlib.GetAlgorithmNames():
    print(algo)

# %%
# More details on dlib algorithms are available `here <http://dlib.net/optimization.html>`_ .

# %%
# Solving an unconstrained problem with conjugate gradient algorithm
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#
# The following example will demonstrate the use of dlib conjugate gradient algorithm to find the minimum of `Rosenbrock function <https://en.wikipedia.org/wiki/Rosenbrock_function>`_. The optimal point can be computed analytically, and its value is [1.0, 1.0].

# %%
# Define the problem based on Rosebrock function
rosenbrock = ot.SymbolicFunction(['x1', 'x2'], ['(1-x1)^2+(x2-x1^2)^2'])
problem = ot.OptimizationProblem(rosenbrock)

# %%
# The optimization algorithm is instanciated from the problem to solve and the name of the algorithm
algo = ot.Dlib(problem, 'cg')
print("Dlib algorithm, type ", algo.getAlgorithmName())
print("Maximum iteration number: ", algo.getMaximumIterationNumber())
print("Maximum evaluation number: ", algo.getMaximumEvaluationNumber())
print("Maximum absolute error: ", algo.getMaximumAbsoluteError())
print("Maximum relative error: ", algo.getMaximumRelativeError())
print("Maximum residual error: ", algo.getMaximumResidualError())
print("Maximum constraint error: ", algo.getMaximumConstraintError())

# %%
# When using conjugate gradient, BFGS/LBFGS, Newton, least squares or trust region methods, optimization proceeds until one of the following criteria is met:
# ------------------------------------------------------------
#
# More complex functions can be created thanks to the `CompositeDistribution` class, but it requires an `f` function. In the following example, we create the distribution of a random variable equal to the exponential of a gaussian variable. Obviously, this is equivalent to the `LogNormal` distribution but this shows how such a distribution could be created.

# %%
# First, we create a distribution.

# %%
N = ot.Normal(0.0, 1.0)
N.setDescription(["Normal"])

# %%
# Secondly, we create a function.

# %%
f = ot.SymbolicFunction(['x'], ['exp(x)'])
f.setDescription(["X", "Exp(X)"])

# %%
# Finally, we create the distribution equal to the exponential of the gaussian random variable.

# %%
dist = ot.CompositeDistribution(f, N)

# %%
graph = dist.drawPDF()
graph.setTitle("Exponential of a gaussian random variable")
view = viewer.View(graph)

# %%
# In order to check the previous distribution, we compare it with the LogNormal distribution.
Esempio n. 15
0
    graph.setLegends(['Posterior', 'Prior'])
    grid.setGraph(0, parameter_index, graph)
_ = View(grid)

# %%
# Define an improper prior
# --------------------------

# %%
# Now, define an improper prior:
#
# .. math::
#   \mathcal \pi(\beta, \alpha) \propto \frac{1}{\beta}.
#

logpdf = ot.SymbolicFunction(['beta', 'alpha'], ['-log(beta)'])
support = ot.Interval([0] * 2, [1] * 2)
support.setFiniteUpperBound([False] * 2)

# %%
# Sample from the posterior distribution

sampler2 = ot.RandomWalkMetropolisHastings(logpdf, support, initialState,
                                           proposal)
sampler2.setLikelihood(conditional, x)
sample2 = sampler2.getSample(1000)
print("Acceptance rate: %s" % (sampler2.getAcceptanceRate()))

# %%
# Plot posterior marginal plots only as prior cannot be drawn meaningfully.
#
import openturns as ot
from matplotlib import pyplot as plt
from openturns.viewer import View
if ot.AliMikhailHaqCopula().__class__.__name__ == 'EmpiricalBernsteinCopula':
    sample = ot.Dirichlet([1.0, 2.0, 3.0]).getSample(100)
    copula = ot.EmpiricalBernsteinCopula(sample, 4)
elif ot.AliMikhailHaqCopula().__class__.__name__ == 'ExtremeValueCopula':
    copula = ot.ExtremeValueCopula(ot.SymbolicFunction("t", "t^3/2-t/2+1"))
elif ot.AliMikhailHaqCopula().__class__.__name__ == 'MaximumEntropyOrderStatisticsCopula':
    marginals = [ot.Beta(1.5, 3.2, 0.0, 1.0),  ot.Beta(2.0, 4.3, 0.5, 1.2)]
    copula = ot.MaximumEntropyOrderStatisticsCopula(marginals)
elif ot.AliMikhailHaqCopula().__class__.__name__ == 'NormalCopula':
    R = ot.CorrelationMatrix(2)
    R[1, 0] = 0.8
    copula = ot.NormalCopula(R)
elif ot.AliMikhailHaqCopula().__class__.__name__ == 'SklarCopula':
    student = ot.Student(3.0, [1.0]*2, [3.0]*2, ot.CorrelationMatrix(2))
    copula =  ot.SklarCopula(student)
else:
    copula = ot.AliMikhailHaqCopula()
if copula.getDimension() == 1:
    copula = ot.AliMikhailHaqCopula(2)
copula.setDescription(['$u_1$', '$u_2$'])
pdf_graph = copula.drawPDF()
cdf_graph = copula.drawCDF()
fig = plt.figure(figsize=(10, 4))
pdf_axis = fig.add_subplot(121)
cdf_axis = fig.add_subplot(122)
View(pdf_graph, figure=fig, axes=[pdf_axis], add_legend=False, square_axes=True)
View(cdf_graph, figure=fig, axes=[cdf_axis], add_legend=False, square_axes=True)
title = str(copula)[:100].split('\n')[0]
Esempio n. 17
0
distX = ot.ComposedDistribution([distX1, distX2])

# %%
# We can draw the bidimensional PDF of the distribution `distX` over :math:`[0,-10] \times [10,10]` :
ot.ResourceMap_SetAsUnsignedInteger("Contour-DefaultLevelsNumber", 8)
graphPDF = distX.drawPDF([0, -10], [10, 10])
graphPDF.setTitle(r'2D-PDF of the input variables $(X_1, X_2)$')
graphPDF.setXTitle(r'$x_1$')
graphPDF.setYTitle(r'$x_2$')
graphPDF.setLegendPosition("bottomright")
view = otv.View(graphPDF)

# %%
# We consider the model :math:`f : (x_1, x_2) \mapsto x_1 x_2` which maps the random input vector :math:`X` to the output variable :math:`Y=f(X) \in \mathbb{R}`. We also draw the isolines of the model `f`.
#
f = ot.SymbolicFunction(['x1', 'x2'], ['x1 * x2'])
graphModel = f.draw([0.0, -10.0], [10.0, 10.0])
graphModel.setXTitle(r'$x_1$')
graphModel.setXTitle(r'$x_2$')
graphModel.setTitle(r'Isolines of the model : $Y = f(X)$')
view = otv.View(graphModel)

# %%
# We want to estimate the probability :math:`P_f` of the output variable to be greater than a prescribed threshold :math:`s=10` : this is the failure event. This probability is simply expressed as an integral :
#
# .. math::
#
#    P_f = \int_{\mathcal{D}} \mathbf{1}_{\mathcal{D}}(x) df_{X_1,X_2}(x)
#
# where :math:`\mathcal{D} = \{ (x_1, x_2) \in [0,+\infty[ \times \mathbb{R} / x_1 x_2 \geq s \}` is the failure domain.
# In the general case the probability density function :math:`f_{X_1,X_2}` and the domain of integration :math:`\mathcal{D}` are difficult to handle.
Esempio n. 18
0
    ot.PlatformInfo.SetNumericalPrecision(3)

    print("================")
    print("Test using NLOpt")
    print("================")
    # Calibration of default optimizer
    ot.ResourceMap.SetAsScalar(
        'GeneralLinearModelAlgorithm-DefaultOptimizationLowerBound', 1.0e-5)
    ot.ResourceMap.SetAsScalar(
        'GeneralLinearModelAlgorithm-DefaultOptimizationUpperBound', 100)
    # Data & estimation
    inputDimension = 1
    X = ot.Normal().getSample(100)
    X = X.sortAccordingToAComponent(0)
    covarianceModel = ot.SquaredExponential([1.0], [1.0])
    model = ot.SymbolicFunction(["x"], ["x - 0.6 * cos(x/3)"])
    Y = model(X)
    basis = ot.QuadraticBasisFactory(inputDimension).build()
    algo = ot.GeneralLinearModelAlgorithm(X, Y, covarianceModel, basis, True)
    algo.setOptimizationAlgorithm(ot.NLopt('LN_NELDERMEAD'))
    algo.run()

    # perform an evaluation
    result = algo.getResult()
    metaModel = result.getMetaModel()
    conditionalCovariance = result.getCovarianceModel()
    residual = metaModel(X) - Y
    assert_almost_equal(residual.computeCenteredMoment(2), [1.06e-05], 1e-5,
                        1e-5)
    assert_almost_equal(conditionalCovariance.getParameter(),
                        [0.702138, 0.00137], 5e-3, 1e-3)
ot.RandomGenerator.SetSeed(0)

# Definition of the marginals
X1 = ot.Uniform(-m.pi, m.pi)
X2 = ot.Uniform(-m.pi, m.pi)
X3 = ot.Uniform(-m.pi, m.pi)

# 3d distribution made with independent marginals
distX = ot.ComposedDistribution([X1, X2, X3])

# Get a sample of it
size = 100
X = distX.getSample(size)

# The Ishigami model
modelIshigami = ot.SymbolicFunction(
    ["X1", "X2", "X3"], ["sin(X1) + 5.0 * (sin(X2))^2 + 0.1 * X3^4 * sin(X1)"])

# Apply model: Y = m(X)
Y = modelIshigami(X)

# We define the covariance models for the HSIC indices.
# For the input, we consider a SquaredExponential covariance model.
covarianceModelCollection = ot.CovarianceModelCollection()

# Input sample
for i in range(3):
    Xi = X.getMarginal(i)
    Cov = ot.SquaredExponential(1)
    Cov.setScale(Xi.computeStandardDeviation())
    covarianceModelCollection.add(Cov)
"""
Create a composed function
==========================
"""
# %%
# In this example we are going to create a composed function :math:`f\circ g`
#

# %%
import openturns as ot
import openturns.viewer as viewer
from matplotlib import pylab as plt
import math as m
ot.Log.Show(ot.Log.NONE)

# %%
# assume f, g functions
g = ot.SymbolicFunction(['x1', 'x2'], ['x1 + x2', '3 * x1 * x2'])
f = ot.SymbolicFunction(['x1', 'x2'], ['2 * x1 - x2'])

# %%
# create the composed function
function = ot.ComposedFunction(f, g)

# %%
# evaluate the function
x = [3.0, 4.0]
y = function(x)
print('x=', x, 'y=', y)
# %%
# Definition of the model
# -----------------------

# %%
import openturns as ot
import openturns.viewer as viewer
from matplotlib import pylab as plt
ot.Log.Show(ot.Log.NONE)

# %%
# We define the symbolic function which evaluates the output Y depending on the inputs E, F, L and I.

# %%
model = ot.SymbolicFunction(["E", "F", "L", "I"], ["F*L^3/(3*E*I)"])

# %%
# Then we define the distribution of the input random vector. 

# %%
# Young's modulus E
E = ot.Beta(0.9, 2.27, 2.5e7, 5.0e7) # in N/m^2
E.setDescription("E")
# Load F
F = ot.LogNormal() # in N
F.setParameter(ot.LogNormalMuSigma()([30.e3, 9e3, 15.e3]))
F.setDescription("F")
# Length L
L = ot.Uniform(250., 260.) # in cm
L.setDescription("L")
ot.Log.Show(ot.Log.NONE)

# %%
# Create the joint distribution of the parameters.

# %%
distribution_R = ot.LogNormalMuSigma(300.0, 30.0, 0.0).getDistribution()
distribution_F = ot.Normal(75e3, 5e3)
marginals = [distribution_R, distribution_F]
distribution = ot.ComposedDistribution(marginals)

# %%
# Create the model.

# %%
model = ot.SymbolicFunction(['R', 'F'], ['R-F/(pi_*100.0)'])

# %%
modelCallNumberBefore = model.getEvaluationCallsNumber()
modelGradientCallNumberBefore = model.getGradientCallsNumber()
modelHessianCallNumberBefore = model.getHessianCallsNumber()

# %%
# To have access to the input and output samples after the simulation, activate the History mechanism.

# %%
model = ot.MemoizeFunction(model)

# %%
# Remove all the values stored in the history mechanism.
# Care : it is done regardless the status of the History mechanism.
Esempio n. 23
0
# %%
graph = ot.Graph("n=%d" % (n), "X1", "X2", True, '')
cloud = ot.Cloud(sample)
cloud.setColor("aquamarine1")
cloud.setPointStyle("fcircle")
graph.add(cloud)
view = viewer.View(graph)

# %%
# Configure the style of points and the thickness of a curve
# ----------------------------------------------------------
#
# Assume that we want to plot the sine curve from -2 to 2. The simplest way is to use the `draw` method of the function.

# %%
g = ot.SymbolicFunction("x", "sin(x)")

# %%
graph = g.draw(-2, 2)
view = viewer.View(graph)

# %%
# I would rather get a dashed curve: let us search for the available line styles.

# %%
ot.Drawable.GetValidLineStyles()

# %%
# In order to use the `Curve` class, it will be easier if we have a method to generate a `Sample` containing points regularly spaced in an interval.

Esempio n. 24
0
def progress(percent):
    sys.stderr.write('-- progress=' + str(percent) + '%\n')


def stop():
    sys.stderr.write('-- stop?\n')
    return False


n = 3
m = 10

x = [[0.5 + i] for i in range(m)]

model = ot.SymbolicFunction(['a', 'b', 'c', 'x'], ['a + b * exp(c * x)'])
p_ref = [2.8, 1.2, 0.5]  # a, b, c
modelx = ot.ParametricFunction(model, [0, 1, 2], p_ref)
y = modelx(x)


def residualFunction_py(p):
    modelx = ot.ParametricFunction(model, [0, 1, 2], p)
    return [modelx(x[i])[0] - y[i, 0] for i in range(m)]


residualFunction = ot.PythonFunction(n, m, residualFunction_py)

bounds = ot.Interval([0, 0, 0], [2.5, 8.0, 19])

for bound in [True, False]:
Esempio n. 25
0
#! /usr/bin/env python

from __future__ import print_function
import openturns as ot

eps = 0.2
# Instance creation
myFunc = ot.SymbolicFunction(
    ['x1', 'x2'], ['x1*sin(x2)', 'cos(x1+x2)', '(x2+1)*exp(x1-2*x2)'])
center = ot.Point(myFunc.getInputDimension())
for i in range(center.getDimension()):
    center[i] = 1.0 + i
myTaylor = ot.LinearTaylor(center, myFunc)
myTaylor.run()
responseSurface = ot.Function(myTaylor.getResponseSurface())
print("myTaylor=", repr(myTaylor))
print("responseSurface=", repr(responseSurface))
print("myFunc(", repr(center), ")=", repr(myFunc(center)))
print("responseSurface(", repr(center), ")=", repr(responseSurface(center)))
inPoint = ot.Point(center)
inPoint[0] += eps
inPoint[1] -= eps / 2
print("myFunc(", repr(inPoint), ")=", repr(myFunc(inPoint)))
print("responseSurface(", repr(inPoint), ")=", repr(responseSurface(inPoint)))
Esempio n. 26
0
elementaryFunctions.append('max')
elementaryFunctions.append('sum')
elementaryFunctions.append('avg')
elementaryFunctions.append('floor')
elementaryFunctions.append('ceil')
elementaryFunctions.append('trunc')
elementaryFunctions.append('round')

# Check the creation of the elementary functions
for func in elementaryFunctions:
    x = [0.4]
    # acosh only defined for 1 <= x <= pi
    if func == 'acosh':
        x[0] = 1.4

    f = ot.SymbolicFunction(['x'], ['2.0*' + func + '(x)'])
    print('f=', f)
    print('f(', x[0], ')=%.4e' % f(x)[0])
    try:
        df = f.gradient(x)[0, 0]
    except:
        pass
    else:
        f.setGradient(
            ot.CenteredFiniteDifferenceGradient(
                ot.ResourceMap.GetAsScalar(
                    'CenteredFiniteDifferenceGradient-DefaultEpsilon'),
                f.getEvaluation()))
        df2 = f.gradient(x)[0, 0]
        print('df(', x[0], ')=%.4e' % df, 'df (FD)=%.4e' % df2)
        if abs(df) > 1e-5:
Esempio n. 27
0
#
# In this section, we generate noisy observations from the sine function.

# %%
import openturns as ot
import pylab as pl
import openturns.viewer as otv

# %%
ot.RandomGenerator.SetSeed(0)

# %%
# We define the function that we are going to approximate.

# %%
g = ot.SymbolicFunction(["x"], ["sin(2*pi_*x)"])

# %%
graph = ot.Graph("Polynomial curve fitting", "x", "y", True, "topright")
# The "unknown" function
curve = g.draw(0, 1)
curve.setColors(["green"])
curve.setLegends(['"Unknown" function'])
graph.add(curve)
view = otv.View(graph)

# %%
# This seems a nice, smooth function to approximate with polynomials.


# %%
Esempio n. 28
0
#! /usr/bin/env python

from __future__ import print_function
import openturns as ot
import math as m

ot.TESTPREAMBLE()

f = ot.SymbolicFunction(['t', 'y0', 'y1'], ['t - y0', 'y1 + t^2'])
phi = ot.VertexValueFunction(f)
solver = ot.RungeKutta(phi)
print('ODE solver=', solver)
initialState = [1.0, -1.0]
nt = 100
timeGrid = [(i**2.0) / (nt - 1.0)**2.0 for i in range(nt)]
print('time grid=', ot.Point(timeGrid))
result = solver.solve(initialState, timeGrid)
print('result=', result)
print('last value=', result[nt - 1])
t = timeGrid[nt - 1]
ref = ot.Point(2)
ref[0] = -1.0 + t + 2.0 * m.exp(-t)
ref[1] = -2.0 + -2.0 * t - t * t + m.exp(t)
print('ref. value=', ref)
grid = ot.RegularGrid(0.0, 0.01, nt)
result = solver.solve(initialState, grid)
print('result=', result)
print('last value=', result[nt - 1])
t = grid.getValue(nt - 1)
ref[0] = -1.0 + t + 2.0 * m.exp(-t)
ref[1] = -2.0 + -2.0 * t - t * t + m.exp(t)
Esempio n. 29
0
# %%
import openturns as ot
import openturns.viewer as viewer
from matplotlib import pylab as plt
import math as m
ot.Log.Show(ot.Log.NONE)

# %%
# Create a mesh
N = 100
mesh = ot.RegularGrid(0.0, 1.0, N)

# %%
# Create the function that acts the values of the mesh
h = ot.SymbolicFunction(['t', 'x1', 'x2'], ['t+x1^2+x2^2'])

# %%
# Create the field function
f = ot.VertexValueFunction(h, mesh)

# %%
# Evaluate f
inF = ot.Normal(2).getSample(N)
outF = f(inF)

# print input/output at first 10 mesh nodes
txy = mesh.getVertices()
txy.stack(inF)
txy.stack(outF)
txy[:10]
"""

# %%
from __future__ import print_function
import openturns as ot
import openturns.viewer as viewer
from matplotlib import pylab as plt
import os
import pickle
ot.Log.Show(ot.Log.NONE)

# %%
# create objects to save
distribution = ot.Normal(4.0, 3.0)
function = ot.SymbolicFunction(['x1', 'x2'], ['x1 + x2'])

# %%
# **With the pickle module**
#
# The objects are retrieved in the same order they are stored.

# %%
# save objects
with open('study.pkl', 'wb') as f:
    pickle.dump(distribution, f)
    pickle.dump(function, f)

# %%
# load saved objects
with open('study.pkl', 'rb') as f: