def ot_drawcontour(ot_draw):
    ot.ResourceMap_SetAsUnsignedInteger("Contour-DefaultLevelsNumber", 7)
    drawables = ot_draw.getDrawables()
    levels = []
    for i in range(len(drawables)):
        contours = drawables[i]
        levels.append(contours.getLevels()[0])
    ot.ResourceMap.SetAsUnsignedInteger('Drawable-DefaultPalettePhase',
                                        len(levels))  #Colors
    palette = ot.Drawable.BuildDefaultPalette(len(levels))  #Colors
    newcontour = ot_draw.getDrawable(0)
    drawables = list()
    for i in range(len(levels)):
        newcontour.setLevels([levels[i]])  # Inline the level values
        newcontour.setDrawLabels(True)
        newcontour.setLabels([str("{:.3e}".format(levels[i]))])
        # We have to copy the drawable because a Python list stores only pointers
        drawables.append(ot.Drawable(newcontour))
    graphFineTune = ot.Graph("", "", "", True, '')
    graphFineTune.setDrawables(drawables)
    graphFineTune.setColors(palette)  # Add colors
    return graphFineTune
Exemple #2
0
# %%
# The two distributions are very different, with a spiky posterior distribution. This shows that the calibration is very sensible to the observations. 

# %%
# Tuning the posterior distribution estimation
# --------------------------------------------
#
# The "GaussianNonLinearCalibration-BootstrapSize" key controls the posterior distribution estimation.
#
# * If "GaussianNonLinearCalibration-BootstrapSize" > 0 (by default it is equal to 100), then a bootstrap resample algorithm is used to see the dispersion of the MAP estimator. This allows to see the variability of the estimator with respect to the finite observation sample.
# * If "GaussianNonLinearCalibration-BootstrapSize" is zero, then the Gaussian linear calibration estimator is used (i.e. the `GaussianLinearCalibration` class) at the optimum. This is called the Laplace approximation. 
#
# We must configure the key before creating the object (otherwise changing the parameter does not change the result). 

# %%
ot.ResourceMap_SetAsUnsignedInteger("GaussianNonLinearCalibration-BootstrapSize",0) 

# %%
algo = ot.GaussianNonLinearCalibration(mycf, Qobs, Hobs, thetaPrior, sigma, errorCovariance)

# %%
algo.run()

# %%
calibrationResult = algo.getResult()

# %%
graph = calibrationResult.drawParameterDistributions()

plt.show()
# %%
Exemple #3
0
# We consider a bivariate random vector :math:`X = (X_1, X_2)` with the following independent marginals :
#
# - an exponential distribution with parameter :math:`\lambda=1`, :math:`X_1 \sim \mathcal{E}(1.0)` ;
# - a standard unit gaussian :math:`X_2 \sim \mathcal{N}(0,1)`.
#
# The support of the input vector is :math:`[0, +\infty[ \times \mathbb{R}`
#

# %%
distX1 = ot.Exponential(1.0)
distX2 = ot.Normal()
distX = ot.ComposedDistribution([distX1, distX2])

# %%
# We can draw the bidimensional PDF of the distribution `distX` over :math:`[0,-10] \times [10,10]` :
ot.ResourceMap_SetAsUnsignedInteger("Contour-DefaultLevelsNumber", 8)
graphPDF = distX.drawPDF([0, -10], [10, 10])
graphPDF.setTitle(r'2D-PDF of the input variables $(X_1, X_2)$')
graphPDF.setXTitle(r'$x_1$')
graphPDF.setYTitle(r'$x_2$')
graphPDF.setLegendPosition("bottomright")
view = otv.View(graphPDF)

# %%
# We consider the model :math:`f : (x_1, x_2) \mapsto x_1 x_2` which maps the random input vector :math:`X` to the output variable :math:`Y=f(X) \in \mathbb{R}`. We also draw the isolines of the model `f`.
#
f = ot.SymbolicFunction(['x1', 'x2'], ['x1 * x2'])
graphModel = f.draw([0.0, -10.0], [10.0, 10.0])
graphModel.setXTitle(r'$x_1$')
graphModel.setXTitle(r'$x_2$')
graphModel.setTitle(r'Isolines of the model : $Y = f(X)$')