Exemple #1
0
# %%
# Create the model (x1,x2) --> (y) = (4.*x1+5.*x2)
model = ot.SymbolicFunction(['x1', 'x2'], ['4.*x1+5.*x2'])

# %%
# Create the input independent joint distribution
distribution = ot.Normal(2)
distribution.setDescription(['X1', 'X2'])

# %%
# Create the correlated input distribution
S = ot.CorrelationMatrix(2)
S[1, 0] = 0.3
R = ot.NormalCopula.GetCorrelationFromSpearmanCorrelation(S)
copula = ot.NormalCopula(R)
distribution_corr = ot.ComposedDistribution([ot.Normal()] * 2, copula)

# %%
# ANCOVA needs a functional decomposition of the model
enumerateFunction = ot.LinearEnumerateFunction(2)
productBasis = ot.OrthogonalProductPolynomialFactory([ot.HermiteFactory()]*2, enumerateFunction)
adaptiveStrategy = ot.FixedStrategy(productBasis, enumerateFunction.getStrataCumulatedCardinal(4))
samplingSize = 250
projectionStrategy = ot.LeastSquaresStrategy(ot.MonteCarloExperiment(samplingSize))
algo = ot.FunctionalChaosAlgorithm(model, distribution, adaptiveStrategy, projectionStrategy)
algo.run()
result = ot.FunctionalChaosResult(algo.getResult())

# %%
# Create the input sample taking account the correlation
size = 2000
Exemple #2
0
import openturns as ot
import math as m
import openturns.viewer as viewer
from matplotlib import pylab as plt

ot.Log.Show(ot.Log.NONE)

# %%
# **LHS and space filling**

# %%
N = 100
# Considering independent Uniform distributions of dimension 3
# Bounds are (-1,1), (0,2) and (0, 0.5)
distribution = ot.ComposedDistribution(
    [ot.Uniform(-1.0, 1.0),
     ot.Uniform(0.0, 2.0),
     ot.Uniform(0.0, 0.5)])
# Random LHS
lhs = ot.LHSExperiment(distribution, N)
lhs.setAlwaysShuffle(True)  # randomized
design = lhs.generate()
# C2
c2 = ot.SpaceFillingC2().evaluate(design)
# PhiP with default p
phip = ot.SpaceFillingPhiP().evaluate(design)
# mindist
mindist = ot.SpaceFillingMinDist().evaluate(design)
# For p->infinity
phip_inf = ot.SpaceFillingPhiP(100).evaluate(design)
print(phip, mindist, phip_inf)
import openturns as ot
from matplotlib import pyplot as plt
from openturns.viewer import View
if (ot.Burr().__class__.__name__ == 'ComposedDistribution'):
    correlation = ot.CorrelationMatrix(2)
    correlation[1, 0] = 0.25
    aCopula = ot.NormalCopula(correlation)
    marginals = [ot.Normal(1.0, 2.0), ot.Normal(2.0, 3.0)]
    distribution = ot.ComposedDistribution(marginals, aCopula)
elif (ot.Burr().__class__.__name__ == 'CumulativeDistributionNetwork'):
    distribution = ot.CumulativeDistributionNetwork(
        [ot.Normal(2), ot.Dirichlet([0.5, 1.0, 1.5])],
        ot.BipartiteGraph([[0, 1], [0, 1]]))
else:
    distribution = ot.Burr()
dimension = distribution.getDimension()
if dimension <= 2:
    if distribution.getDimension() == 1:
        distribution.setDescription(['$x$'])
        pdf_graph = distribution.drawPDF()
        cdf_graph = distribution.drawCDF()
        fig = plt.figure(figsize=(10, 4))
        plt.suptitle(str(distribution))
        pdf_axis = fig.add_subplot(121)
        cdf_axis = fig.add_subplot(122)
        View(pdf_graph, figure=fig, axes=[pdf_axis], add_legend=False)
        View(cdf_graph, figure=fig, axes=[cdf_axis], add_legend=False)
    else:
        distribution.setDescription(['$x_1$', '$x_2$'])
        pdf_graph = distribution.drawPDF()
        fig = plt.figure(figsize=(10, 5))
Exemple #4
0
#! /usr/bin/env python

import openturns as ot

ot.TESTPREAMBLE()

# Test default constructor
print("experiment0=", repr(ot.GaussProductExperiment().generate()))

distribution = ot.ComposedDistribution(
    [ot.Exponential(), ot.Triangular(-1.0, -0.5, 1.0)])
marginalSizes = [3, 6]
# Test the constructor based on marginal degrees
print("experiment1=", ot.GaussProductExperiment(marginalSizes))
# Test the constructor based on distribution
print("experiment2=", ot.GaussProductExperiment(distribution))
# Test the constructor based on marginal degrees and distribution
experiment = ot.GaussProductExperiment(distribution, marginalSizes)
print("experiment = ", experiment)
sample, weights = experiment.generateWithWeights()
print("sample = ", repr(sample))
print("weights = ", repr(weights))
Exemple #5
0
graph.setTitle("Rastrigin function")
view = viewer.View(graph)

# %%
# We see that the Rastrigin function has several local minimas. However, there is only one single global minimum at :math:`\mathbf{x}^\star=(0, 0)`.

# %%
# Define the starting points
# --------------------------

# %%
# The starting points are computed from `Uniform` distributions in the input domain. We consider a set of 100 starting points.

# %%
U = ot.Uniform(-5.12, 5.12)
distribution = ot.ComposedDistribution([U] * dim)

# %%
size = 100
startingPoints = distribution.getSample(size)

# %%
graph = rastrigin.draw(lowerbound, upperbound, [100] * dim)
graph.setTitle("Rastrigin function")
cloud = ot.Cloud(startingPoints)
cloud.setPointStyle("bullet")
cloud.setColor("black")
graph.add(cloud)
view = viewer.View(graph)

# %%
Exemple #6
0
# %%
# borehole model
dimension = 8
input_names = ['rw', 'r', 'Tu', 'Hu', 'Tl', 'Hl', 'L', 'Kw']
model = ot.SymbolicFunction(input_names,
                            ['(2*pi_*Tu*(Hu-Hl))/(ln(r/rw)*(1+(2*L*Tu)/(ln(r/rw)*rw^2*Kw)+Tu/Tl))'])
coll = [ot.Normal(0.1, 0.0161812),
         ot.LogNormal(7.71, 1.0056),
         ot.Uniform(63070.0, 115600.0),
         ot.Uniform(990.0, 1110.0),
         ot.Uniform(63.1, 116.0),
         ot.Uniform(700.0, 820.0),
         ot.Uniform(1120.0, 1680.0),
         ot.Uniform(9855.0, 12045.0)]
distribution = ot.ComposedDistribution(coll)
distribution.setDescription(input_names)

# %%
# Freeze r, Tu, Tl from model to go faster
selection = [1,2,4]
complement = ot.Indices(selection).complement(dimension)
distribution = distribution.getMarginal(complement)
model = ot.ParametricFunction(model, selection, distribution.getMarginal(selection).getMean())
input_names_copy = list(input_names)
input_names = itemgetter(*complement)(input_names)
dimension = len(complement)

# %%
# design of experiment
size = 1000
Exemple #7
0
    def run(self):
        """
        Build the POD models.

        Notes
        -----
        This method build the kriging model. First the censored data
        are filtered if needed. The Box Cox transformation is performed if it is
        enabled. Then it builds the POD models : conditional samples are 
        simulated for each defect size, then the distributions of the probability
        estimator (for MC simulation) are built. Eventually, a sample of this
        distribution is used to compute the mean POD and the POD at the confidence
        level.
        """

        # run the chaos algorithm and get result if not given
        if not self._userKriging:
            if self._verbose:
                print('Start optimizing covariance model parameters...')
            # build the kriging algorithm without optimizer
            algoKriging = self._buildKrigingAlgo(self._input, self._signals)
            # optimize the covariance model parameters and return the kriging
            # algorithm with the run launched
            if LooseVersion(ot.__version__) == '1.9':
                llDim = algoKriging.getReducedLogLikelihoodFunction(
                ).getInputDimension()
            else:
                llDim = algoKriging.getLogLikelihoodFunction(
                ).getInputDimension()
            lowerBound = [0.001] * llDim
            upperBound = [50] * llDim
            algoKriging = self._estimKrigingTheta(algoKriging, lowerBound,
                                                  upperBound,
                                                  self._initialStartSize)
            algoKriging.run()
            if self._verbose:
                print('Kriging optimizer completed')
            self._krigingResult = algoKriging.getResult()

        # compute the Q2
        self._Q2 = self._computeQ2(self._input, self._signals,
                                   self._krigingResult)
        if self._verbose:
            print('kriging validation Q2 (>0.9): {:0.4f}'.format(self._Q2))

        # set default uniform distribution with min and max of the given defect sizes
        if self._distribution is None:
            inputMin = self._input.getMin()
            inputMin[0] = np.min(self._defectSizes)
            inputMax = self._input.getMax()
            inputMax[0] = np.max(self._defectSizes)
            marginals = [
                ot.Uniform(inputMin[i], inputMax[i]) for i in range(self._dim)
            ]
            self._distribution = ot.ComposedDistribution(marginals)

        # compute the sample containing the POD values for all defect
        self._PODPerDefect = ot.NumericalSample(
            self._simulationSize * self._samplingSize, self._defectNumber)
        for i, defect in enumerate(self._defectSizes):
            self._PODPerDefect[:, i] = self._computePODSamplePerDefect(
                defect, self._detectionBoxCox, self._krigingResult,
                self._distribution, self._simulationSize, self._samplingSize)
            if self._verbose:
                updateProgress(i, self._defectNumber,
                               'Computing POD per defect')

        # compute the mean POD
        meanPOD = self._PODPerDefect.computeMean()
        # create the interpolate function of the POD model
        interpModel = interp1d(self._defectSizes,
                               np.array(meanPOD),
                               kind='linear')
        self._PODmodel = ot.PythonFunction(1, 1, interpModel)
Exemple #8
0
#!/usr/bin/env python
# coding:utf-8
"""Sample distribution.

Generate sampling using OpenTURNS. 

"""
import openturns as ot
import numpy as np
import json

n_samples = 100
dists = [ot.Uniform(20., 40.), ot.Normal(2345., 400.)]

settings_path = './'

with open(settings_path + 'settings.json', 'r') as f:
    settings = json.load(f)

distribution = ot.ComposedDistribution(dists, ot.IndependentCopula(len(dists)))
experiment = ot.LHSExperiment(distribution, n_samples, True, True)
sample = np.array(experiment.generate()).tolist()

settings['space']['sampling'] = sample

with open(settings_path + 'settings.json', 'w') as f:
    json.dump(settings, f, indent=4)
symbolicModel = persalys.SymbolicPhysicalModel(
    'symbolicModel', [x1, x2, x3], [fake_var, y0, fake_y0, y1],
    [formula_fake_var, formula_y0, formula_y0, formula_y1])

myStudy.add(symbolicModel)

# python model ##
code = 'from math import cos, sin, sqrt\n\ndef _exec(x1, x2, x3):\n    y0 = cos(0.5*x1) + sin(x2) + sqrt(x3)\n    return y0\n'
pythonModel = persalys.PythonPhysicalModel('pythonModel', [x1, x2, x3], [y0],
                                           code)
myStudy.add(pythonModel)

filename = 'data.csv'
cDist = ot.ComposedDistribution(
    [ot.Normal(), ot.Gumbel(),
     ot.Normal(), ot.Uniform()],
    ot.ComposedCopula([ot.IndependentCopula(2),
                       ot.GumbelCopula()]))
sample = cDist.getSample(200)
sample.exportToCSVFile(filename, ' ')

# Designs of Experiment ##

# fixed design ##
ot.RandomGenerator.SetSeed(0)
fixedDesign = persalys.FixedDesignOfExperiment('fixedDesign', symbolicModel)
inputSample = ot.LHSExperiment(
    ot.ComposedDistribution([ot.Uniform(0., 10.),
                             ot.Uniform(0., 10.)]), 10).generate()
inputSample.stack(ot.Sample(10, [0.5]))
fixedDesign.setOriginalInputSample(inputSample)
Exemple #10
0
    myStudy.add('randomGeneratorState', randomGeneratorState)

    # Create a GeneralLinearModelResult
    generalizedLinearModelResult = ot.GeneralLinearModelResult()
    generalizedLinearModelResult.setName('generalizedLinearModelResult')
    myStudy.add('generalizedLinearModelResult', generalizedLinearModelResult)

    # KDTree
    sample = ot.Normal(3).getSample(10)
    kDTree = ot.KDTree(sample)
    myStudy.add('kDTree', kDTree)

    # TensorApproximationAlgorithm/Result
    dim = 1
    model = ot.SymbolicFunction(['x'], ['x*sin(x)'])
    distribution = ot.ComposedDistribution([ot.Uniform()] * dim)
    factoryCollection = [ot.FourierSeriesFactory()] * dim
    functionFactory = ot.OrthogonalProductFunctionFactory(factoryCollection)
    size = 10
    X = distribution.getSample(size)
    Y = model(X)
    nk = [5] * dim
    rank = 1
    algo = ot.TensorApproximationAlgorithm(X, Y, distribution, functionFactory,
                                           nk, rank)
    algo.run()
    tensorResult = algo.getResult()
    myStudy.add('tensorResult', tensorResult)
    tensorIn = [0.4]
    tensorRef = tensorResult.getMetaModel()(tensorIn)
#! /usr/bin/env python

import openturns as ot
import openturns.testing as ott
import math as m

ot.TESTPREAMBLE()
ot.RandomGenerator.SetSeed(0)

# Definition of the marginals
X1 = ot.Uniform(-m.pi, m.pi)
X2 = ot.Uniform(-m.pi, m.pi)
X3 = ot.Uniform(-m.pi, m.pi)

# 3d distribution made with independent marginals
distX = ot.ComposedDistribution([X1, X2, X3])

# Get a sample of it
size = 100
X = distX.getSample(size)

# The Ishigami model
modelIshigami = ot.SymbolicFunction(
    ["X1", "X2", "X3"], ["sin(X1) + 5.0 * (sin(X2))^2 + 0.1 * X3^4 * sin(X1)"])

# Apply model: Y = m(X)
Y = modelIshigami(X)

# We define the covariance models for the HSIC indices.
# For the input, we consider a SquaredExponential covariance model.
covarianceModelCollection = ot.CovarianceModelCollection()
Exemple #12
0
init_size = 10

indim = 4  # inputs dim
plabels = ['Ks1', 'Ks2', 'Ks3', 'Q']
space = Space(corners)

# Build the learning samples
dists = [
    'Uniform(15., 60.)', 'Uniform(15., 60.)', 'Uniform(15., 60.)',
    'BetaMuSigma(4031, 400, 1000, 6000).getDistribution()'
]
dists_ot = dists_to_ot(dists)

x_train = ot.LHSExperiment(
    ot.ComposedDistribution(dists_ot), init_size, True,
    True).generate()  #training sample for truncation error (1 sample)
x_train = [list(x_train[i]) for i in range(init_size)]
x_train = np.array(x_train)
x_l = []
for i in range(init_size):
    x_l.append({
        'friction_coefficients': [{
            "type": "zone",
            "index": 0,
            "value": x_train[i, 0]
        }, {
            "type": "zone",
            "index": 1,
            "value": x_train[i, 1]
        }, {
Exemple #13
0
#! /usr/bin/env python

import openturns as ot

observationsSize = 5
# Create a collection of distribution
conditionedDistribution = ot.Normal()
conditioningDistributionCollection = []
# First conditioning distribution: continuous/continuous
atoms = [ot.Uniform(0.0, 1.0), ot.Uniform(1.0, 2.0)]
conditioningDistributionCollection.append(ot.ComposedDistribution(atoms))
# Second conditioning distribution: discrete/continuous
atoms = [ot.Binomial(3, 0.5), ot.Uniform(1.0, 2.0)]
# conditioningDistributionCollection.append(ot.ComposedDistribution(atoms))
# Third conditioning distribution: dirac/continuous
atoms = [ot.Dirac(0.0), ot.Uniform(1.0, 2.0)]
conditioningDistributionCollection.append(ot.ComposedDistribution(atoms))

for conditioning in conditioningDistributionCollection:
    print("conditioning distribution=", conditioning)
    observationsDistribution = ot.Distribution(conditionedDistribution)
    observationsDistribution.setParameter(conditioning.getMean())
    observations = observationsDistribution.getSample(observationsSize)
    distribution = ot.PosteriorDistribution(
        ot.ConditionalDistribution(conditionedDistribution, conditioning),
        observations)
    dim = distribution.getDimension()
    print("Distribution ", distribution)
    print("Distribution ", distribution)
    print("range=", distribution.getRange())
    mean = distribution.getMean()
# In this example we are going to build a conditional random vector
#
# .. math::
#    \underline{X}|\underline{\Theta}
#

# %%
from __future__ import print_function
import openturns as ot
import openturns.viewer as viewer
from matplotlib import pylab as plt

# %%
# create the random vector Theta (parameters of X)
gammaDist = ot.Uniform(1.0, 2.0)
alphaDist = ot.Uniform(0.0, 0.1)
thetaDist = ot.ComposedDistribution([gammaDist, alphaDist])
thetaRV = ot.RandomVector(thetaDist)

# %%
# create the XgivenTheta distribution
XgivenThetaDist = ot.Exponential()

# %%
# create the X distribution
XDist = ot.ConditionalRandomVector(XgivenThetaDist, thetaRV)

# %%
# draw a sample
XDist.getSample(5)
# %%
# We see that the Rastrigin function has several local minima. However, there is only one single global minimum at :math:`\vect{x}^\star=(0, 0)`.

# %%
# Create the problem and set the optimization algorithm
# -----------------------------------------------------

# %%
problem = ot.OptimizationProblem(rastrigin)

# %%
# We use the :class:`~openturns.Cobyla` algorithm and run it from multiple starting points selected by a :class:`~openturns.LowDiscrepancyExperiment`.

# %%
size = 64
distribution = ot.ComposedDistribution(
    [ot.Uniform(lowerbound[0], upperbound[0])] * dim)
experiment = ot.LowDiscrepancyExperiment(ot.SobolSequence(), distribution,
                                         size)
solver = ot.MultiStart(ot.Cobyla(problem), experiment.generate())

# %%
# Visualize the starting points of the optimization algorithm
# -----------------------------------------------------------

# %%
startingPoints = solver.getStartingSample()
graph = rastrigin.draw(lowerbound, upperbound, [100] * dim)
graph.setTitle("Rastrigin function")
cloud = ot.Cloud(startingPoints)
cloud.setPointStyle("bullet")
cloud.setColor("black")
# %%
# We draw their PDF. We recall that the `drawPDF` command just generates the graph data. It is the viewer module that enables the actual display.
graphNormalPDF = marginals[0].drawPDF()
graphNormalPDF.setTitle("PDF of the first marginal")
graphGumbelPDF = marginals[1].drawPDF()
graphGumbelPDF.setTitle("PDF of the second marginal")
view = otv.View(graphNormalPDF)
view = otv.View(graphGumbelPDF)

# %%
# The CDF is also available with the `drawCDF` method.

# %%
# We then have the minimum required to create a bivariate distribution, assuming no dependency structure :
distribution = ot.ComposedDistribution(marginals)

# %%
# We can draw the PDF (here in dimension 2) :
graph = distribution.drawPDF()
view = otv.View(graph)

# %%
# We also draw the CDF :
graph = distribution.drawCDF()
view = otv.View(graph)

# %%
# If a dependance between marginals is needed we have to create the copula specifying the dependency structure, here a :class:`~openturns.NormalCopula` :
R = ot.CorrelationMatrix(2)
R[0, 1] = 0.3
tmax = 12.  # Maximum time
gridsize = 100  # Number of time steps
mesh = ot.IntervalMesher([gridsize - 1]).build(ot.Interval(tmin, tmax))

# %%
vertices = mesh.getVertices()

# %%
# Creation of the input distribution.

# %%
distZ0 = ot.Uniform(100.0, 150.0)
distV0 = ot.Normal(55.0, 10.0)
distM = ot.Normal(80.0, 8.0)
distC = ot.Uniform(0.0, 30.0)
distribution = ot.ComposedDistribution([distZ0, distV0, distM, distC])

# %%
dimension = distribution.getDimension()
dimension

# %%
# Then we define the Python function which computes the altitude at each time value. In order to compute all altitudes with a vectorized evaluation, we first convert the vertices into a Numpy `array` and use the Numpy functions`exp` and `maximum`: this increases the evaluation performance of the script.


# %%
def AltiFunc(X):
    g = 9.81
    z0 = X[0]
    v0 = X[1]
    m = X[2]
Exemple #18
0
import openturns as ot
from matplotlib import pyplot as plt
from openturns.viewer import View
ot.RandomGenerator.SetSeed(0)

# Generate sample with the given plane
distribution = ot.ComposedDistribution([ot.Uniform(0, 1)] * 2)
size = 10
experiment = ot.LHSExperiment(distribution, size)

sample = experiment.generate()

# Create an empty graph
graph = ot.Graph("LHS experiment", "x1", "x2", True, "")

# Create the cloud
cloud = ot.Cloud(sample, "blue", "fsquare", "")

# Then, draw it
graph.add(cloud)

fig = plt.figure(figsize=(4, 4))
axis = fig.add_subplot(111)
axis.set_xlim(auto=True)
View(graph, figure=fig, axes=[axis], add_legend=False)
view = viewer.View(graph)

# %%
# Draw a cloud
# ------------
#
# The `Cloud` class creates clouds of bidimensional points. To demonstrate it, let us create two gaussian distributions in two dimensions.

# %%
# Create a Funky distribution
corr = ot.CorrelationMatrix(2)
corr[0, 1] = 0.2
copula = ot.NormalCopula(corr)
x1 = ot.Normal(-1., 1)
x2 = ot.Normal(2, 1)
x_funk = ot.ComposedDistribution([x1, x2], copula)

# %%
# Create a Punk distribution
x1 = ot.Normal(1., 1)
x2 = ot.Normal(-2, 1)
x_punk = ot.ComposedDistribution([x1, x2], copula)

# %%
# Let us mix these two distributions.

# %%
mixture = ot.Mixture([x_funk, x_punk], [0.5, 1.])

# %%
n = 500
def flooding(X):
    L, B = 5.0e3, 300.0
    Q, K_s, Z_v, Z_m = X
    alpha = (Z_m - Z_v)/L
    H = (Q/(K_s*B*m.sqrt(alpha)))**(3.0/5.0)
    return [H]


g = ot.PythonFunction(4, 1, flooding)
Q = ot.TruncatedDistribution(
    ot.Gumbel(558.0, 1013.0), ot.TruncatedDistribution.LOWER)
K_s = ot.Dirac(30.0)
Z_v = ot.Dirac(50.0)
Z_m = ot.Dirac(55.0)
inputRandomVector = ot.ComposedDistribution([Q, K_s, Z_v, Z_m])
nbobs = 100
inputSample = inputRandomVector.getSample(nbobs)
outputH = g(inputSample)
Hobs = outputH + ot.Normal(0.0, 0.1).getSample(nbobs)
Qobs = inputSample[:, 0]
thetaPrior = [20, 49, 51]
model = ot.ParametricFunction(g, [1, 2, 3], thetaPrior)
errorCovariance = ot.CovarianceMatrix([[0.5**2]])
sigma = ot.CovarianceMatrix(3)
sigma[0, 0] = 5.**2
sigma[1, 1] = 1.**2
sigma[2, 2] = 1.**2
algo = ot.GaussianNonLinearCalibration(
    model, Qobs, Hobs, thetaPrior, sigma, errorCovariance)
algo.run()
Exemple #21
0
#! /usr/bin/env python

from __future__ import print_function
import openturns as ot

# Defining parameters
dimension = 3
bounds = ot.Interval(dimension)
size = 25

# Build standard LHS algorithm
distribution = ot.ComposedDistribution([ot.Uniform(0.0, 1.0)] * dimension)
lhs = ot.LHSExperiment(distribution, size)
lhs.setRandomShift(False)  # centered
lhs.setAlwaysShuffle(True)  # randomized

# print the object
print("lhs=", lhs)
print("Bounds of uniform distributions=", distribution.getRange())

# Generate design without optimization
design = lhs.generate()
print("design=", design)

# Defining space fillings
spaceFillingC2 = ot.SpaceFillingC2()
spaceFillingPhiP = ot.SpaceFillingPhiP(10)

# print the criteria on this design
print("PhiP=%f, C2=%f" % (ot.SpaceFillingPhiP().evaluate(design),
                          ot.SpaceFillingC2().evaluate(design)))
import openturns as ot
from openturns.viewer import View

N = 1000
#create a sample X
dist = ot.Triangular(1.0, 5.0, 10.0)
# create a Y sample : Y = 0.5 + 3 * X + eps
eps = ot.Normal(0.0, 1.0)
sample = ot.ComposedDistribution([dist, eps]).getSample(N)
f = ot.SymbolicFunction(['x', 'eps'], ['0.5+3.0*x+eps'])
sampleY = f(sample)
sampleX = sample.getMarginal(0)
sampleX.setName('X')
#create a linear model
factory = ot.LinearModelFactory()
regressionModel = factory.build(sampleX, sampleY, 0.9)
graph = ot.VisualTest.DrawLinearModelResidual(sampleX, sampleY,
                                              regressionModel)
cloud = graph.getDrawable(0)
cloud.setPointStyle('times')
graph.setDrawable(cloud, 0)
graph.setTitle('')
# copy the graph in a file
View(graph)
Exemple #23
0
view = viewer.View(graph)

# %%
# We see that the Ackley function has many local minimas. The global minimum, however, is unique and located at the center of the domain.

# %%
# Create the initial kriging
# ^^^^^^^^^^^^^^^^^^^^^^^^^^
#
# Before using the EGO algorithm, we must create an initial kriging. In order to do this, we must create a design of experiment which fills the space. In this situation, the `LHSExperiment` is a good place to start (but other design of experiments may allow to better fill the space). We use a uniform distribution in order to create a LHS design with 50 points.

# %%
listUniformDistributions = [
    ot.Uniform(lowerbound[i], upperbound[i]) for i in range(dim)
]
distribution = ot.ComposedDistribution(listUniformDistributions)
sampleSize = 50
experiment = ot.LHSExperiment(distribution, sampleSize)
inputSample = experiment.generate()
outputSample = model(inputSample)

# %%
graph = ot.Graph("Initial LHS design of experiment - n=%d" % (sampleSize),
                 "$x_0$", "$x_1$", True)
cloud = ot.Cloud(inputSample)
graph.add(cloud)
view = viewer.View(graph)

# %%
# We now create the kriging metamodel. We selected the `SquaredExponential` covariance model with a constant basis (the `MaternModel` may perform better in this case). We use default settings (1.0) for the scale parameters of the covariance model, but configure the amplitude to 0.1, which better corresponds to the properties of the Ackley function.
Exemple #24
0
# Set the random generator seed
ot.RandomGenerator.SetSeed(0)

# %%
# Defining the marginals
# ----------------------
#
# We consider two independent normal marginals :
X1 = ot.Normal(0.0, 1.0)
X2 = ot.Normal(0.0, 3.0)

# %%
# Independent marginals
# ---------------------

distX = ot.ComposedDistribution([X1, X2])
sample = distX.getSample(1000)

# %%
# Let's see the data

# %%
graph = ot.Graph('2D-Normal sample', 'x1', 'x2', True, '')
cloud = ot.Cloud(sample, 'blue', 'fsquare', 'My Cloud')
graph.add(cloud)
graph.setXTitle("$X_1$")
graph.setYTitle("$X_2$")
graph.setTitle("A sample from $X=(X_1, X_2)$")
view = viewer.View(graph)

# %%
Exemple #25
0
# %%
import openturns as ot
import openturns.viewer as viewer
from matplotlib import pylab as plt
import math as m
ot.Log.Show(ot.Log.NONE)

# %%
# Create the marginal distributions of the parameters.
dist_E = ot.Beta(0.93, 2.27, 2.8e7, 4.8e7)
dist_F = ot.LogNormalMuSigma(30000, 9000, 15000).getDistribution()
dist_L = ot.Uniform(250, 260)
dist_I = ot.Beta(2.5, 1.5, 3.1e2, 4.5e2)
marginals = [dist_E, dist_F, dist_L, dist_I]
distribution = ot.ComposedDistribution(marginals)

# %%
# Sample the inputs.
sampleX = distribution.getSample(100)

# %%
# Create the model.
model = ot.SymbolicFunction(['E', 'F', 'L', 'I'], ['F*L^3/(3*E*I)'])

# %%
# Evaluate the outputs.
sampleY = model(sampleX)

# %%
# Get minimum and maximum values of both inputs and output variables.
    Q, Ks, Zv, Zm = X
    alpha = (Zm - Zv) / L
    H = (Q / (Ks * B * alpha**0.5))**0.6
    Zc = H + Zv
    S = Zc - Zd
    return [S]


myFunction = ot.PythonFunction(4, 1, flooding)
Q = ot.Gumbel(558.0, 1013.0)
Q = ot.TruncatedDistribution(Q, 0.0, ot.SpecFunc.MaxScalar)
Ks = ot.Normal(30.0, 7.5)
Ks = ot.TruncatedDistribution(Ks, 0.0, ot.SpecFunc.MaxScalar)
Zv = ot.Uniform(49.0, 51.0)
Zm = ot.Uniform(54.0, 56.0)
inputX = ot.ComposedDistribution([Q, Ks, Zv, Zm])
inputX.setDescription(["Q", "Ks", "Zv", "Zm"])

size = 5000
computeSO = True
inputDesign = ot.SobolIndicesExperiment(inputX, size, computeSO).generate()
outputDesign = myFunction(inputDesign)
sensitivityAnalysis = ot.SaltelliSensitivityAlgorithm(inputDesign,
                                                      outputDesign, size)

graph = sensitivityAnalysis.draw()

fig = plt.figure(figsize=(8, 4))
axis = fig.add_subplot(111)
axis.set_xlim(auto=True)
View(graph, figure=fig, axes=[axis], add_legend=True)
from __future__ import print_function
import openturns as ot
from openturns.usecases import ishigami_function

ot.TESTPREAMBLE()

input_dimension = 3
output_dimension = 1

formula = [
    'sin(pi_*X1)+7*sin(pi_*X2)*sin(pi_*X2)+0.1*((pi_*X3)*(pi_*X3)*(pi_*X3)*(pi_*X3))*sin(pi_*X1)'
]

model = ot.SymbolicFunction(['X1', 'X2', 'X3'], formula)

distribution = ot.ComposedDistribution([ot.Uniform(-1.0, 1.0)] *
                                       input_dimension)

# Size of simulation
size = 10000

# Test with the various implementation methods
methods = ["Saltelli", "Jansen", "MauntzKucherenko", "Martinez"]

# Test the different sampling options
samplings = ["MonteCarlo", "LHS", "QMC"]

# Generate input/output designs
computeSO = True
# Case 1 : Estimation of sensitivity using estimator and no bootstrap
for method in methods:
    for sampling in samplings:
import openturns as ot
from openturns.viewer import View

# Sobol
d = ot.LowDiscrepancyExperiment(ot.SobolSequence(), ot.ComposedDistribution([ot.Uniform()]*3), 32)
s = d.generate()
s.setDescription(["X1", "X2", "X3"])
g = ot.Graph()
g.setTitle("Low discrepancy experiment")
g.setGridColor("black")
p = ot.Pairs(s)
g.add(p)
View(g)
# %%
import numpy as np
import openturns as ot
import openturns.viewer as viewer
from matplotlib import pylab as plt
ot.Log.Show(ot.Log.NONE)

# %%
# We first define the input random vector of the function.

# %%
X0 = ot.Normal(0., 1.)
X1 = ot.Normal(0., 1.)
X2 = ot.Normal(0., 1.)
inputDistribution = ot.ComposedDistribution((X0, X1, X2))
inputRandomVector = ot.RandomVector(inputDistribution)


# %%
# The Python function
# -------------------
#
# Based on a Python function defined with the `def` keyword, the `PythonFunction` class creates a function.
#
# The simplest constructor of the `PythonFunction` class is:
#
# `PythonFunction ( nbInputs , nbOutputs , myPythonFunc )`
#
# where
#
Exemple #30
0
import openturns.viewer as viewer
from matplotlib import pylab as plt
ot.Log.Show(ot.Log.NONE)

# %%
# Copula fitting test using Kendall plot
# --------------------------------------
#
# We first perform a visual goodness-of-fit test for a copula with the Kendall plot test.

# %%
# We create two samples of size 500.
N = 500

# %%
dist1 = ot.ComposedDistribution([ot.Normal()] * 2, ot.GumbelCopula(3.0))
sample1 = dist1.getSample(N)
sample1.setName('sample1')

# %%
dist2 = ot.ComposedDistribution([ot.Normal()] * 2, ot.ClaytonCopula(0.2))
sample2 = dist2.getSample(N)
sample2.setName('sample2')

# %%
# We change the parameter for the evaluation of E(Wi) thanks to the `ResourceMap` :
ot.ResourceMap.SetAsUnsignedInteger('VisualTest-KendallPlot-MonteCarloSize',
                                    25)

# %%
# We can test a specific copula model for a given sample,