Beispiel #1
0
    return graph_modes, graph_ev


# %%
# Karhunen-Loeve decomposition of the input process
print("Compute the decomposition of the input process")
threshold = 0.0001
algo_X = ot.KarhunenLoeveP1Algorithm(mesh, process_X.getCovarianceModel(),
                                     threshold)
algo_X.run()
result_X = algo_X.getResult()
phi_X = result_X.getScaledModesAsProcessSample()
lambda_X = result_X.getEigenvalues()

graph_modes_X, graph_ev_X = drawKL(phi_X, lambda_X, mesh, "X")
view = viewer.View(graph_modes_X)

# %%
# Input database generation
print("Sample the input process")
size = 1000
sample_X = process_X.getSample(size)


# %%
# The field model: convolution over an 1-d mesh
class ConvolutionP1(ot.OpenTURNSPythonFieldFunction):
    def __init__(self, p, mesh):
        # 1 = input dimension, the dimension of the input field
        # 1 = output dimension, the dimension of the output field
        # 1 = mesh dimension
X = ot.ComposedDistribution([dist_E, dist_F, dist_L, dist_I])

g = ot.SymbolicFunction(["E", "F", "L", "I"], ["F* L^3 /  (3 * E * I)"])
g.setOutputDescription(["Y (cm)"])

# Pour pouvoir exploiter au mieux les simulations, nous équipons 
# la fonction d'un méchanisme d'historique.
g = ot.MemoizeFunction(g)


# Enfin, nous définissons le vecteur aléatoire de sortie.

XRV = ot.RandomVector(X)
Y = ot.CompositeRandomVector(g, XRV)
Y.setDescription(["Y (cm)"])

# ## Régression linéaire avec LinearLeastSquares

n = 1000
sampleX = X.getSample(n)
sampleY = g(sampleX)

myLeastSquares = ot.LinearLeastSquares(sampleX, sampleY)
myLeastSquares.run()
responseSurface = myLeastSquares.getMetaModel()

val = ot.MetaModelValidation(sampleX, sampleY, responseSurface)

graph = val.drawValidation()
view = otv.View(graph)
Beispiel #3
0
distribution = ot.TruncatedNormal(mu, sigma, a, b)
sample = distribution.getSample(11)

# %%
# In order to see the distribution and the sample, we draw the PDF of the distribution and generate a clouds which X coordinates are the sample values.

# %%
graph = distribution.drawPDF()
graph.setLegends(["TruncatedNormal"])
graph.setColors(["red"])
zeros = ot.Sample(sample.getSize(), 1)
cloud = ot.Cloud(sample, zeros)
cloud.setLegend("Sample")
graph.add(cloud)
graph.setLegendPosition("topleft")
view = viewer.View(graph)

# %%
# The following function computes the log-likelihood of a `TruncatedNormal` which mean and standard deviations are given as input arguments. The lower and upper bounds of the distribution are computed as minimum and maximum of the sample.

# %%
# Define the log-likelihood function
# ----------------------------------

# %%
# The following function evaluates the log-likelihood function given a point :math:`X=(\mu,\sigma`). In order to evaluate the likelihood on the sample, we use a trick: we evaluate the `computeMean` method on the log-PDF sample, then multiply by the sample size. This is much faster than using a `for` loop.


# %%
def logLikelihood(X):
    '''
    contour.setDrawLabels(True)
    # We have to copy the drawable because a Python list stores only pointers
    drawables.append(ot.Drawable(contour))

graphFineTune = ot.Graph("The exact Branin model", r"$x_1$", r"$x_2$", True,
                         '')
graphFineTune.setDrawables(drawables)  # Replace the drawables
graphFineTune.setLegendPosition("")  # Remove the legend
graphFineTune.setColors(palette)  # Add colors

# %%
# We also represent the three minima of the Branin function with orange diamonds :
sample1 = ot.Sample([bm.xexact1, bm.xexact2, bm.xexact3])
cloud1 = ot.Cloud(sample1, 'orange', 'diamond', 'First Cloud')
graphFineTune.add(cloud1)
view = otv.View(graphFineTune)

#
# The values of the exact model at these points are :
print(bm.objectiveFunction(sample1))

# %%
# The Branin function has a global minimum attained at three different points. We shall build a
# metamodel of this function that presents the same behaviour.

# %%
# Definition of the kriging
# -------------------------
#
# We use the :class:`~openturns.KrigingAlgorithm` class to perform the kriging analysis.
# We first generate a design of experiments with LHS and store the input trainig points in `xdata`
Beispiel #5
0
# We consider a simple Monte-Carlo sampling as a design of experiments. This is why we generate an input sample using the `getSample` method of the distribution. Then we evaluate the output using the `model` function.

# %%
sampleSize_train = 10000
X_train = myDistribution.getSample(sampleSize_train)
Y_train = model(X_train)

# %%
# The following figure presents the distribution of the vertical deviations Y on the training sample. We observe that the large deviations occur less often.

# %%
histo = ot.HistogramFactory().build(Y_train).drawPDF()
histo.setXTitle("Vertical deviation (cm)")
histo.setTitle("Distribution of the vertical deviation")
histo.setLegends([""])
view = viewer.View(histo)

# %%
# Create the metamodel
# --------------------

# %%
# We recall that the metamodel writes as:
#
# .. math::
#    f(X_1, \dots, X_d) = \sum_{i=1}^m \prod_{j=1}^d v_j^{(i)} (x_j), \forall x \in \mathbb{R}^d
#
# with:
#
# .. math::
#     v_j^{(i)} (x_j) = \sum_{k=1}^{n_j} \beta_{j,k}^{(i)} \phi_{j,k} (x_j)
    y_obs[i,0] = model(thetaTrue)[0] + noiseSample[i,0]
y_obs

# %%
# - Draw the model vs the observations.

# %%
functionnalModel = ot.ParametricFunction(fullModel, [1,2,3], thetaTrue)
graphModel = functionnalModel.getMarginal(0).draw(xmin,xmax)
observations = ot.Cloud(x_obs,y_obs)
observations = ot.Cloud(x_obs,y_obs)
observations.setColor("red")
graphModel.add(observations)
graphModel.setLegends(["Model","Observations"])
graphModel.setLegendPosition("topleft")
view = viewer.View(graphModel)

# %%
# - Define the distribution of observations :math:`\underline{y} | \underline{z}` conditional on model predictions
#
# Note that its parameter dimension is the one of :math:`\underline{z}`, so the model must be adjusted accordingly

# %%
conditional = ot.Normal()
conditional

# %%
# - Define the mean :math:`\mu_\theta`, the covariance matrix :math:`\Sigma_\theta`, then the prior distribution :math:`\pi(\underline{\theta})` of the parameter :math:`\underline{\theta}`. 

# %%
thetaPriorMean = [-3.,4.,1.]
    print("alpha = ", alpha)
    graph_cp = copy.copy(graph)
    miic_cbn = MIIC_learning(data, alpha)
    miic_sample = generate_CBN_sample("MIIC_{}".format(alpha), miic_cbn,
                                      sample_size)
    miic_pressure = generate_pressure("MIIC_{}".format(alpha), miic_sample)
    draw_pressure(graph_cp, miic_pressure)
    miic_q1 = miic_pressure.computeQuantile(0.01)
    graph_cp.setColors(["black", "red", "blue", "green", "orange"])
    graph_cp.setLegends([
        "Ref ({:.2e})".format(ref_q1[0]), "KS ({:.2e})".format(ks_q1[0]),
        "Bern ({:.2e})".format(ksb_q1[0]), "Gauss ({:.2e})".format(ksg_q1[0]),
        "MIIC_{} ({:.2e})".format(alpha, miic_q1[0])
    ])
    # graph_cp.setLegendFontSize(0.1)
    view = otv.View(graph_cp)
    view.save("new_stationary_pressure_pdf_MIIC_{}.png".format(alpha))
    view.close()

for alpha in [0.01, 0.05, 0.1]:
    print("alpha = ", alpha)
    graph_cp = copy.copy(graph)
    cpc_cbn = CPC_learning(data, 5, alpha)
    cpc_sample = generate_CBN_sample("CPC_{}".format(alpha), cpc_cbn,
                                     sample_size)
    cpc_pressure = generate_pressure("CPC_{}".format(alpha), cpc_sample)
    draw_pressure(graph_cp, cpc_pressure)
    cpc_q1 = cpc_pressure.computeQuantile(0.01)
    graph_cp.setColors(["black", "red", "blue", "green", "orange"])
    graph_cp.setLegends([
        "Ref ({:.2e})".format(ref_q1[0]), "KS ({:.2e})".format(ks_q1[0]),
Beispiel #8
0
Q99_MB1000 = 21.0245
Q99_MS1000 = 25.532802858033193
Q99_EB1000 = 19.5955
Q99_ES1000 = 19.992056136634449
Beta_1000 =  ot.Beta(a_1000,b_1000,xmin_1000,xmax_1000)
Beta_1000_PDF = Beta_1000.drawPDF(xmin,xmax,251)
Beta_1000_draw = Beta_1000_PDF.getDrawable(0)
Beta_PDF.add(Beta_1000_draw)


Beta_PDF.setColors(['red','green','blue','yellow','black','grey','purple','orange','cyan','darkred','darkblue','darkcyan'])
Beta_PDF.setLegends(['MC : 2000 realisations','PC meta-modele : 100 realisations','PC meta-modele : 150 realisations','PC meta-modele : 200 realisations','PC meta-modele : 300 realisations','PC meta-modele : 400 realisations','PC meta-modele : 500 realisations','PC meta-modele : 600 realisations','PC meta-modele : 700 realisations','PC meta-modele : 800 realisations','PC meta-modele : 900 realisations','PC meta-modele : 1000 realisations'])
Beta_PDF.setTitle('Convergence de la loi Beta')
Beta_PDF.setXTitle('Yf[%]')
Beta_PDF.setYTitle('Densite de probabilite')
viewer.View(Beta_PDF)

plt.figure(1)
plt.xlim( -10, 60)
plt.ylim( -0.01, 0.1)
plt.savefig('Loi_Beta.png')

nb = [ 100, 150, 200, 300, 400,  500, 600, 700, 800, 900, 1000]
y_alpha = [a_100, a_150, a_200, a_300, a_400, a_500, a_600, a_700, a_800, a_900, a_1000]
y_beta = [b_100, b_150, b_200, b_300, b_400, b_500, b_600, b_700, b_800, b_900, b_1000]
err = [0.0080976, 0.00537392, 0.00318212, 0.00161066, 0.00110808, 0.000693121, 0.000405553, 0.000343159, 0.000327746, 0.000262136, 0.000234597]

Q90_MB = [Q90_MB100, Q90_MB150, Q90_MB200, Q90_MB300, Q90_MB400, Q90_MB500, Q90_MB600, Q90_MB700, Q90_MB800, Q90_MB900, Q90_MB1000]
Q90_MS = [Q90_MS100, Q90_MS150, Q90_MS200, Q90_MS300, Q90_MS400, Q90_MS500, Q90_MS600, Q90_MS700, Q90_MS800, Q90_MS900, Q90_MS1000]
Q90_EB = [Q90_EB100, Q90_EB150, Q90_EB200, Q90_EB300, Q90_EB400, Q90_EB500, Q90_EB600, Q90_EB700, Q90_EB800, Q90_EB900, Q90_EB1000]
Q90_ES = [Q90_ES100, Q90_ES150, Q90_ES200, Q90_ES300, Q90_ES400, Q90_ES500, Q90_ES600, Q90_ES700, Q90_ES800, Q90_ES900, Q90_ES1000]
# %%
lowerbound = [-4.4] * dim
upperbound = [5.12] * dim
bounds = ot.Interval(lowerbound, upperbound)

# %%
# Plot the iso-values of the objective function
# ---------------------------------------------

# %%
graph = rastrigin.draw(lowerbound, upperbound, [100] * dim)
graph.setTitle("Rastrigin function")
view = viewer.View(graph,
                   legend_kw={
                       'bbox_to_anchor': (1, 1),
                       'loc': "upper left"
                   })
view.getFigure().tight_layout()

# %%
# We see that the Rastrigin function has several local minima. However, there is only one single global minimum at :math:`\vect{x}^\star=(0, 0)`.

# %%
# Create the problem and set the optimization algorithm
# -----------------------------------------------------

# %%
problem = ot.OptimizationProblem(rastrigin)

# %%
# %%
sample = ot.Sample(sample1)
sample.add(sample2)
sample.getSize()

# %%
# In order to see the result, we build a kernel smoothing approximation on the sample. In order to keep it simple, let us use the default bandwidth selection rule.

# %%
factory = ot.KernelSmoothing()
fit = factory.build(sample)

# %%
graph = fit.drawPDF()
view = otv.View(graph)

# %%
# We see that the distribution of the merged sample has two modes. However, these modes are not clearly distinct. To distinguish them, we could increase the sample size. However, it might be interesting to see if the bandwidth selection rule can be better chosen: this is the purpose of the next section.

# %%
# Simulation based on a mixture
# -----------------------------
#
# Since the distribution that we approximate is a mixture, it will be more convenient to create it from the `Mixture` class. It takes as input argument a list of distributions and a list of weights.

# %%
distribution = ot.Mixture([distribution1, distribution2], [w1, w2])

# %%
# Then we generate a sample from it.
# %%
# We draw the trajectories :
graphTraj = sample.drawMarginal(0)
graphTraj.add(sample2.drawMarginal(0))
graphTraj.add(sample3.drawMarginal(0))
graphTraj.setXTitle(r"$x$")
graphTraj.setYTitle(r"$GP_{\nu}(x)$")
graphTraj.setTitle("Random realization from the covariance model")
graphTraj.setColors(["green", "orange", "blue"])
graphTraj.setLegends([r"$p = 0.25$", r"$p = 1$", r"$p = 2$"])

# %%
# We present each covariance model and the corresponding tracjectory side by side.
fig = pl.figure(figsize=(12, 4))
ax_pdf = fig.add_subplot(1, 2, 1)
_ = otv.View(graphModel, figure=fig, axes=[ax_pdf])
ax_cdf = fig.add_subplot(1, 2, 2)
_ = otv.View(graphTraj, figure=fig, axes=[ax_cdf])
_ = fig.suptitle(
    r"Generalized Exponential Model : influence of the p parameter")

# %%
# The blue trajectory corresponding to the parameter :math:`p=2` is smooth as expected as compared with
# the :math:`p=0.25` process which is less regular.

# %%
# The exponential model (:math:`p=1`)
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#
# In the case of the exponential model (:math:`p=1`) we show the influence of the correlation length on
# the trajectories.
standardDeviation_sample = np.sqrt(sample_Y.computeCovariance()[0,0])

# for i in range(N-1):
	# print(format(str('{0:.3f}'.format(asample_Y[i]))))

################
### Loi Beta ###
################

fittedRes = ot.BetaFactory().buildEstimator(sample_YF)
Beta =  fittedRes.getDistribution()
Beta_PDF = Beta.drawPDF(-10.,55.,251)
Beta_PDF.setLegends('PC meta-modele : '+str(N)+' realisations')
Beta_draw = Beta_PDF.getDrawable(0)
Beta_draw.setLegend('PC meta-modele : '+str(N)+' realisations')
viewer.View(Beta_draw)

Mean_Beta = Beta.getMean()
Q90_Beta = Beta.computeQuantile(0.90)
Q99_Beta = Beta.computeQuantile(0.99)

print("")
print("Mean_Beta = ",Mean_Beta)
print("")
print("Q90_Beta = ",Q90_Beta)
print("")
print("Q99_Beta = ",Q99_Beta)
print("")
print("Paramètre de la loi Beta = ",Beta.getParameter())
print("")
Beispiel #13
0
gp = ot.GaussianProcess(kronecker, ot.RegularGrid(0.0, 0.1, 100))

# %%
# Sample and draw a realization of the Gaussian process.

# %%
ot.RandomGenerator.SetSeed(5)
realization = gp.getRealization()
graph = realization.drawMarginal(0)
graph.add(realization.drawMarginal(1))
graph.setYTitle("")
graph.setTitle("")
graph.setColors(ot.Drawable.BuildDefaultPalette(2))
graph.setLegends(["Y1", "Y2"])
graph.setLegendPosition("topleft")
_ = viewer.View(graph)

# %%
# Draw the trajectory on the complex plane.

# %%
graph = realization.draw()
graph.setXTitle("Real part")
graph.setYTitle("Imaginary part")
graph.setTitle("Trajectory on the complex plane")
diagonal = ot.Curve([-1.5, 1.5], [-1.5, 1.5])
diagonal.setLineStyle("dotdash")
diagonal.setColor("grey")
graph.add(diagonal)
_ = viewer.View(graph, square_axes=True)
# We check that the white noise is the one we have previously defined :
myWhiteNoise = process.getWhiteNoise()
print(myWhiteNoise)

# %%
# We generate a possible time series realization :
ts = process.getRealization()
ts.setName('ARMA realization')

# %%
# We draw this time series by specifying the wanted marginal index (only 0 is available here).
graph0 = ts.drawMarginal(0)
graph0.setTitle("One ARMA realization")
graph0.setXTitle("t")
graph0.setYTitle(r"$X_t$")
view = viewer.View(graph0)

# %%
# Generate a k time series
#k = 5
#myProcessSample = process.getSample(k)

# Then get the current state of the ARMA
#armaState = process.getState()
#print("armaState = ")
#print(armaState)

# %%
# We draw a sample of size 6 : it is six different time series.
size = 6
sample = process.getSample(size)
def C(s, t):
    return m.exp(-4.0 * abs(s - t) / (1 + (s * s + t * t)))


# %%
# Create the large covariance matrix
covariance = ot.CovarianceMatrix(mesh.getVerticesNumber())
for k in range(mesh.getVerticesNumber()):
    t = mesh.getVertices()[k]
    for l in range(k + 1):
        s = mesh.getVertices()[l]
        covariance[k, l] = C(s[0], t[0])

# %%
# Create the covariance model
covmodel = ot.UserDefinedCovarianceModel(mesh, covariance)


# %%
# Draw the covariance model
def f(x):
    return [covmodel([x[0]], [x[1]])[0, 0]]


func = ot.PythonFunction(2, 1, f)
func.setDescription(['$s$', '$t$', '$cov$'])
cov_graph = func.draw([-a] * 2, [a] * 2, [512] * 2)
cov_graph.setLegendPosition('')
view = viewer.View(cov_graph)
plt.show()
Beispiel #16
0
    initialization_script=abspath("initialization.mos"),
    start_time=0.0,
    final_time=5.0)

# %%
# ``total_pop`` and ``healing_rate`` values are defined in the initialization
# script, and remain constant over time. We can now set probability laws on the
# function input variable ``infection_rate`` to propagate its uncertainty
# through the model:

lawInfected = ot.Normal(0.01, 0.003)
inputSample = lawInfected.getSample(10)
outputProcessSample = function(inputSample)

# %%
# Visualize the time evolution of the ``infected`` over time, depending on the
# `ìnfection_rate`` value:

import openturns.viewer as viewer
gridLayout = outputProcessSample.draw()
graph = gridLayout.getGraph(0, 0)
graph.setTitle("")
graph.setXTitle("FMU simulation time (s)")
graph.setYTitle("Number of infected")
graph.setLegends(["{:.4f}".format(line[0]) for line in inputSample])
view = viewer.View(graph,
                   legend_kw={
                       "title": "infection rate",
                       "loc": "upper left"
                   })
view.ShowAll()
Beispiel #17
0
lowerbound = [-4.0] * dim
upperbound = [4.0] * dim

# %%
# We know that the global minimum is at the center of the domain. It is stored in the `AckleyModel` data class.
xexact = am.x0

# %%
# The minimum value attained `fexact` is :
fexact = model(xexact)
fexact

# %%
graph = model.draw(lowerbound, upperbound, [100] * dim)
graph.setTitle("Ackley function")
view = viewer.View(graph)

# %%
# We see that the Ackley function has many local minimas. The global minimum, however, is unique and located at the center of the domain.

# %%
# Create the initial kriging
# ^^^^^^^^^^^^^^^^^^^^^^^^^^
#
# Before using the EGO algorithm, we must create an initial kriging. In order to do this, we must create a design of experiment which fills the space. In this situation, the `LHSExperiment` is a good place to start (but other design of experiments may allow to better fill the space). We use a uniform distribution in order to create a LHS design with 50 points.

# %%
listUniformDistributions = [
    ot.Uniform(lowerbound[i], upperbound[i]) for i in range(dim)
]
distribution = ot.ComposedDistribution(listUniformDistributions)
Beispiel #18
0
import matplotlib.pyplot as plt

plt.xlabel("FMU simulation time (s)")
plt.ylabel("Number of Infected")
plt.plot(meshSample, outputSample)
plt.show()

# %%
# Simulate the function on a input :py:class:`openturns.Sample` yields a set of
# fields called :py:class:`openturns.ProcessSample`:

inputSample = ot.Sample([[0.007], [0.005], [0.003]])
outputProcessSample = function(inputSample)
print(outputProcessSample)

# %%
# Visualize the time evolution of the ``infected`` over time, depending on the
# `ìnfection_rate`` value:

import openturns.viewer as viewer

gridLayout = outputProcessSample.draw()
graph = gridLayout.getGraph(0, 0)
graph.setTitle("")
graph.setXTitle("FMU simulation time (s)")
graph.setYTitle("Number of infected")
graph.setLegends([str(line[0]) for line in inputSample])
view = viewer.View(graph, legend_kw={"title": "infection rate"})
view.ShowAll()
# %%
# We define three basic events :math:`E_1=\{(x_0,x_1)~:~x_0 < 0 \}`, :math:`E_2=\{(x_0,x_1)~:~x_1 > 0 \}` and :math:`E_3=\{(x_0,x_1)~:~x_0+x_1>0 \}`.
e1 = ot.ThresholdEvent(Y1, ot.Less(), 0.0)
e2 = ot.ThresholdEvent(Y2, ot.Greater(), 0.0)
e3 = ot.ThresholdEvent(Y3, ot.Greater(), 0.0)

# %%
# The restriction of the domain :math:`E_1` to :math:`[-4,4] \times [-4, 4]` is the grey area.
myGraph = ot.Graph(r'Representation of the event $E_1$', r'$x_1$', r'$x_2$',
                   True, '')
data = [[-4, -4], [0, -4], [0, 4], [-4, 4]]
myPolygon = ot.Polygon(data)
myPolygon.setColor('grey')
myPolygon.setEdgeColor('black')
myGraph.add(myPolygon)
view = otv.View(myGraph)
axes = view.getAxes()
_ = axes[0].set_xlim(-4.0, 4.0)
_ = axes[0].set_ylim(-4.0, 4.0)

# %%
# The restriction of the domain :math:`E_2` to :math:`[-4,4] \times [-4, 4]` is the grey area.
myGraph = ot.Graph(r'Representation of the event $E_2$', r'$x_1$', r'$x_2$',
                   True, '')
data = [[-4, 0], [4, 0], [4, 4], [-4, 4]]
myPolygon = ot.Polygon(data)
myPolygon.setColor('grey')
myPolygon.setEdgeColor('black')
myGraph.add(myPolygon)
view = otv.View(myGraph)
axes = view.getAxes()
Beispiel #20
0
# %%
# The uniform distribution
# ------------------------
#
# Let us create a uniform random variable :math:`\mathcal{U}(2,5)`.

# %%
uniform = ot.Uniform(2, 5)

# %%
# The `drawPDF` method plots the probability density function.

# %%
graph = uniform.drawPDF()
view = viewer.View(graph)

# %%
# The `computePDF` method computes the probability distribution at a specific point.

# %%
uniform.computePDF(3.5)

# %%
# The `drawCDF` method plots the cumulated distribution function.

# %%
graph = uniform.drawCDF()
view = viewer.View(graph)

# %%
Beispiel #21
0
from matplotlib import pylab as plt

# %%
# We consider a standard bivariate Gaussian random vector :math:`X = (X_1, X_2)` :
dim = 2
dist = ot.Normal(dim)

# %%
# We can draw the bidimensional PDF of the distribution `dist` over :math:`[-5,5] \times [-5,5]` :
ot.ResourceMap.SetAsUnsignedInteger("Contour-DefaultLevelsNumber", 8)
graphPDF = dist.drawPDF([-5, -5], [5, 5])
graphPDF.setTitle(r'2D-PDF of the input variables $(X_1, X_2)$')
graphPDF.setXTitle(r'$x_1$')
graphPDF.setYTitle(r'$x_2$')
graphPDF.setLegendPosition("bottomright")
view = otv.View(graphPDF)

# %%
# We then define a model :math:`f` which maps a 2D-vector X = (X_1,X_2) to a
# scalar output `Y = f(X)`.
f = ot.SymbolicFunction(['x0', 'x1'], ['5.0-x1-0.5*(x0-0.1)^2'])
graphModel = f.draw([-8.0, -8.0], [8.0, 8.0])
graphModel.setXTitle(r'$x_1$')
graphModel.setXTitle(r'$x_2$')
graphModel.setTitle(r'Isolines of the model : $Y = f(X)$')
view = otv.View(graphModel)

# %%
# We create random vectors for the input and output variables :
X = ot.RandomVector(dist)
Y = ot.CompositeRandomVector(f, X)
Beispiel #22
0
outputData = f(inputData)

# %%
# The contour line associated with the 0.0 value for the first marginal.
mycontour0 = ot.Contour(xx, yy, outputData.getMarginal(0), ot.Point([0.0]), ot.Description(["0.0"]))
mycontour0.setColor("black")
mycontour0.setLineStyle("dashed")
graphModel0.add(mycontour0)

# %%
# The contour line associated with the 1.0 value for the first marginal.
mycontour1 = ot.Contour(xx, yy, outputData.getMarginal(0), ot.Point([1.0]), ot.Description(["1.0"]))
mycontour1.setColor("black")
mycontour1.setLineStyle("dashed")
graphModel0.add(mycontour1)
view = otv.View(graphModel0)

# %%
# The contour line associated with the 0.0 value for the second marginal.
mycontour2 = ot.Contour(xx, yy, outputData.getMarginal(1), ot.Point([0.0]), ot.Description(["0.0"]))
mycontour2.setColor("black")
mycontour2.setLineStyle("dashed")
graphModel1.add(mycontour2)

# %%
# The contour line associated with the 1.0 value for the second marginal.
mycontour3 = ot.Contour(xx, yy, outputData.getMarginal(1), ot.Point([1.0]), ot.Description(["1.0"]))
mycontour3.setColor("black")
mycontour3.setLineStyle("dashed")
graphModel1.add(mycontour3)
view = otv.View(graphModel1)
Beispiel #23
0
# Generate the input sample
N = 500
X = inputVector.getSample(N)

# Evaluate the associated output sample
Y = model(X)
Y.setDescription("Y")

print(Y.getMin(), Y.getMax(), Y.computeQuantilePerComponent(0.9))

# %%
# Example 1: value based scale to describe the Y range
minValue = 3.35
maxValue = 20.0
quantileScale = False
graphCobweb = ot.VisualTest.DrawParallelCoordinates(X, Y, minValue, maxValue,
                                                    'red', quantileScale)
graphCobweb.setLegendPosition('bottomright')
view = viewer.View(graphCobweb)

# %%
# Example 2: rank based scale to describe the Y range
minValue = 0.9
maxValue = 1.0
quantileScale = True
graphCobweb = ot.VisualTest.DrawParallelCoordinates(X, Y, minValue, maxValue,
                                                    'red', quantileScale)
graphCobweb.setLegendPosition('bottomright')
view = viewer.View(graphCobweb)
plt.show()
Beispiel #24
0
# %%
dist2 = ot.ComposedDistribution([ot.Normal()] * 2, ot.ClaytonCopula(0.2))
sample2 = dist2.getSample(N)
sample2.setName('sample2')

# %%
# We change the parameter for the evaluation of E(Wi) thanks to the `ResourceMap` :
ot.ResourceMap.SetAsUnsignedInteger(
    'VisualTest-KendallPlot-MonteCarloSize', 25)

# %%
# We can test a specific copula model for a given sample,
copula_test = ot.GumbelCopula(3)
graph = ot.VisualTest.DrawKendallPlot(sample1, copula_test)
view = viewer.View(graph)

# %%
# or test whether two samples have the same copula model
graph = ot.VisualTest.DrawKendallPlot(sample1, sample2)
view = viewer.View(graph)

# %%
# The first test gives a positive result as the blue curve is near the identity line which is not the case for the second test.


# %%
# Graphical copula validation
# ---------------------------
#
# In this paragraph we visualize an estimated copula versus the data in the rank space.
#
# The simplest way to create a graphics is to use the `draw` method. The `Normal` distribution for example provides a method to draw the density function of the gaussian distribution.

# %%
import openturns as ot
import openturns.viewer as viewer
from matplotlib import pylab as plt
ot.Log.Show(ot.Log.NONE)

# %%
n = ot.Normal()
n

# %%
graph = n.drawPDF()
view = viewer.View(graph)

# %%
# To configure the look of the plot, we can first observe the type of graphics returned by the `drawPDF` method returns: it is a `Graph`.

# %%
graph = n.drawPDF()
type(graph)

# %%
# The `Graph` class provides several methods to configure the legends, the title and the colors. Since a graphics  can contain several sub-graphics, the `setColors` takes a list of colors as inputs argument: each item of the list corresponds to the sub-graphics.

# %%
graph.setXTitle("N")
graph.setYTitle("PDF")
graph.setTitle(
Beispiel #26
0
# We define a generalized extreme value distribution with parameters :math:`\mu = 0.0`, :math:`\sigma = 1.0` and :math:`\xi = 0.0`
myDistribution = ot.GeneralizedExtremeValue(0.0, 1.0, 0.0)

# %%
# The `GeneralizedExtremeValue` class acts as a proxy class. We can get the actual distribution (Weibull, Frechet or Gumbel) with the `getActualDistribution` method :
print(myDistribution.getActualDistribution())

# %%
# For the chosen parameters it is a Gumbel distribution with parameters
# :math:`beta=1.0` and :math:`\gamma = 0.0`.

# %%
# We draw its PDF and CDF :
graphPDF = myDistribution.drawPDF()
graphPDF.setTitle(r"PDF of the GEV with parameters $\mu = 0.0$, $\sigma = 1.0$ and $\xi = 0.0$ ")
view = otv.View(graphPDF)

graphCDF = myDistribution.drawCDF()
graphCDF.setTitle(r"CDF of the GEV with parameters $\mu = 0.0$, $\sigma = 1.0$ and $\xi = 0.0$ ")
view = otv.View(graphCDF)

# %%
# The Generalized Pareto Distribution (GPD)
# -----------------------------------------
#
# In this paragraph we turn to the definition of a :class:`~openturns.GeneralizedPareto` distribution.
# For instance we build a generalized Pareto distribution with parameters :math:`\sigma = 1.0`, :math:`\xi = 0.0` and :math:`u = 0.0` :
# 
myGPD = ot.GeneralizedPareto(1.0, 0.0, 0.0)

# %%
# %%
# Design point in the standard U* space.

# %%
print(result.getStandardSpaceDesignPoint())

# %%
# Design point in the physical X space.

# %%
print(result.getPhysicalSpaceDesignPoint())

# %%
# Importance factors
graph = result.drawImportanceFactors()
view = viewer.View(graph)

# %%
marginalSensitivity, otherSensitivity = result.drawHasoferReliabilityIndexSensitivity(
)
marginalSensitivity.setLegends(["E", "F", "L", "I"])
marginalSensitivity.setLegendPosition('bottom')
view = viewer.View(marginalSensitivity)

# %%
marginalSensitivity, otherSensitivity = result.drawEventProbabilitySensitivity(
)
marginalSensitivity.setLegends(["E", "F", "L", "I"])
marginalSensitivity.setLegendPosition('bottom')
view = viewer.View(marginalSensitivity)
Beispiel #28
0
# %%
# The `getMetaModel` method returns the metamodel where the parameters have been estimated.

# %%
responseSurface = result.getMetaModel()

# %%
# Plot the output of our model depending on `x2` with `x1=0.5`.

# %%
xmin = -5.
xmax = 5.
x1value = 0.5
parametricModelGraph = ot.ParametricFunction(
    model, [0], [x1value]).draw(xmin, xmax)
parametricModelGraph.setColors(["red"])
graphMetamodel = ot.ParametricFunction(
    responseSurface, [0], [x1value]).draw(xmin, xmax)
graphMetamodel.setColors(["blue"])
parametricModelGraph.add(graphMetamodel)
parametricModelGraph.setLegends(["Model", "Meta-Model"])
parametricModelGraph.setLegendPosition("topright")
view = viewer.View(parametricModelGraph)
plt.show()

# %%
# We see that the metamodel is equal to the trend because it takes into account the fact that the mean of the gaussian process is zero.
#
# This `GeneralLinearModelAlgorithm` class is the main building block of the `KrigingAlgorithm`. This is why most basic use cases are based on the `KrigingAlgorithm` instead of the `GeneralLinearModelAlgorithm`, because this allows to condition the gaussian process.
"""
Create a geometric distribution
===============================
"""
# %%
# In this example we are going to create a geometric distribution with parameter :math:`p = 0.7`.

# %%
from __future__ import print_function
import openturns as ot
import openturns.viewer as viewer
from matplotlib import pylab as plt
ot.Log.Show(ot.Log.NONE)

distribution = ot.Geometric(0.7)
print(distribution)

# %%
sample = distribution.getSample(10)
print(sample)

# %%
graph = distribution.drawCDF()
view = viewer.View(graph)
plt.show()
#

# %%
distX1 = ot.Exponential(1.0)
distX2 = ot.Normal()
distX = ot.ComposedDistribution([distX1, distX2])

# %%
# We can draw the bidimensional PDF of the distribution `distX` over :math:`[0,-10] \times [10,10]` :
ot.ResourceMap.SetAsUnsignedInteger("Contour-DefaultLevelsNumber", 8)
graphPDF = distX.drawPDF([0, -10], [10, 10])
graphPDF.setTitle(r'2D-PDF of the input variables $(X_1, X_2)$')
graphPDF.setXTitle(r'$x_1$')
graphPDF.setYTitle(r'$x_2$')
graphPDF.setLegendPosition("bottomright")
view = otv.View(graphPDF)

# %%
# We consider the model :math:`f : (x_1, x_2) \mapsto x_1 x_2` which maps the random input vector :math:`X` to the output variable :math:`Y=f(X) \in \mathbb{R}`. We also draw the isolines of the model `f`.
#
f = ot.SymbolicFunction(['x1', 'x2'], ['x1 * x2'])
graphModel = f.draw([0.0, -10.0], [10.0, 10.0])
graphModel.setXTitle(r'$x_1$')
graphModel.setXTitle(r'$x_2$')
graphModel.setTitle(r'Isolines of the model : $Y = f(X)$')
view = otv.View(graphModel)

# %%
# We want to estimate the probability :math:`P_f` of the output variable to be greater than a prescribed threshold :math:`s=10` : this is the failure event. This probability is simply expressed as an integral :
#
# .. math::