Exemplo n.º 1
0
def test_two_inputs_one_output():
    # Kriging use case
    inputDimension = 2

    # Learning data
    levels = [8, 5]
    box = ot.Box(levels)
    inputSample = box.generate()
    # Scale each direction
    inputSample *= 10.0

    model = ot.SymbolicFunction(['x', 'y'], ['cos(0.5*x) + sin(y)'])
    outputSample = model(inputSample)

    # Validation
    sampleSize = 10
    inputValidSample = ot.ComposedDistribution(
        2 * [ot.Uniform(0, 10.0)]).getSample(sampleSize)
    outputValidSample = model(inputValidSample)

    # 2) Definition of exponential model
    # The parameters have been calibrated using TNC optimization
    # and AbsoluteExponential models
    scales = [5.33532, 2.61534]
    amplitude = [1.61536]
    covarianceModel = ot.SquaredExponential(scales, amplitude)

    # 3) Basis definition
    basis = ot.ConstantBasisFactory(inputDimension).build()

    # 4) Kriging algorithm
    algo = ot.KrigingAlgorithm(inputSample, outputSample, covarianceModel,
                               basis)
    algo.run()

    result = algo.getResult()
    # Get meta model
    metaModel = result.getMetaModel()
    outData = metaModel(inputValidSample)

    # 5) Errors
    # Interpolation
    ott.assert_almost_equal(outputSample, metaModel(inputSample), 3.0e-5,
                            3.0e-5)

    # 6) Kriging variance is 0 on learning points
    covariance = result.getConditionalCovariance(inputSample)
    ott.assert_almost_equal(covariance, ot.SquareMatrix(len(inputSample)),
                            7e-7, 7e-7)

    # Covariance per marginal & extract variance component
    coll = result.getConditionalMarginalCovariance(inputSample)
    var = [mat[0, 0] for mat in coll]
    ott.assert_almost_equal(var, [0] * len(var), 0.0, 1e-13)

    # Variance per marginal
    var = result.getConditionalMarginalVariance(inputSample)
    ott.assert_almost_equal(var, ot.Point(len(inputSample)), 0.0, 1e-13)
    # Estimation
    ott.assert_almost_equal(outputValidSample, outData, 1.e-1, 1e-1)
def createMyBasicKriging(X, Y):
    '''
    Create a kriging from a pair of X and Y samples.
    We use a 3/2 Matérn covariance model and a constant trend.
    '''
    basis = ot.ConstantBasisFactory(dimension).build()
    covarianceModel = ot.MaternModel([1.0], 1.5)
    algo = ot.KrigingAlgorithm(X, Y, covarianceModel, basis)
    algo.run()
    krigResult = algo.getResult()
    return krigResult
Exemplo n.º 3
0
def fitKriging(covarianceModel):
    '''
    Fit the parameters of a kriging metamodel. 
    '''
    coordinates = ot.Sample([[1.0,1.0],[5.0,1.0],[9.0,1.0], \
                         [1.0,3.5],[5.0,3.5],[9.0,3.5], \
                         [1.0,6.0],[5.0,6.0],[9.0,6.0]])
    observations = ot.Sample([[25.0], [25.0], [10.0], [20.0], [25.0], [20.0],
                              [15.0], [25.0], [25.0]])
    basis = ot.ConstantBasisFactory(2).build()
    algo = ot.KrigingAlgorithm(coordinates, observations, covarianceModel,
                               basis)
    algo.run()
    krigingResult = algo.getResult()
    return krigingResult
def test_one_input_one_output():
    sampleSize = 6
    dimension = 1

    f = ot.SymbolicFunction(['x0'], ['x0 * sin(x0)'])

    X = ot.Sample(sampleSize, dimension)
    X2 = ot.Sample(sampleSize, dimension)
    for i in range(sampleSize):
        X[i, 0] = 3.0 + i
        X2[i, 0] = 2.5 + i
    X[0, 0] = 1.0
    X[1, 0] = 3.0
    X2[0, 0] = 2.0
    X2[1, 0] = 4.0
    Y = f(X)
    Y2 = f(X2)

    # create algorithm
    basis = ot.ConstantBasisFactory(dimension).build()
    covarianceModel = ot.SquaredExponential([1e-02], [4.50736])

    algo = ot.KrigingAlgorithm(X, Y, covarianceModel, basis)
    algo.run()

    # perform an evaluation
    result = algo.getResult()

    ott.assert_almost_equal(result.getMetaModel()(X), Y)
    ott.assert_almost_equal(result.getResiduals(), [1.32804e-07], 1e-3, 1e-3)
    ott.assert_almost_equal(result.getRelativeErrors(), [5.20873e-21])

    # Kriging variance is 0 on learning points
    covariance = result.getConditionalCovariance(X)
    covariancePoint = ot.Point(covariance.getImplementation())
    theoricalVariance = ot.Point(sampleSize * sampleSize)
    ott.assert_almost_equal(covariance,
                            ot.Matrix(sampleSize, sampleSize),
                            8.95e-7, 8.95e-7)

    # Covariance per marginal & extract variance component
    coll = result.getConditionalMarginalCovariance(X)
    var = [mat[0, 0] for mat in coll]
    ott.assert_almost_equal(var, [0]*sampleSize, 1e-14, 1e-14)

    # Variance per marginal
    var = result.getConditionalMarginalVariance(X)
    ott.assert_almost_equal(var, ot.Point(sampleSize), 1e-14, 1e-14)
Exemplo n.º 5
0
    def set_mean(self, mean):
        '''
        This function constructs the mean function
        takes the following argument:
            -> mean_type
        '''

        if mean.mean_type == 'Linear':
            self.mean_function = ot.LinearBasisFactory(self.input_dim).build()
        elif mean.mean_type == 'Constant':
            self.mean_function = ot.ConstantBasisFactory(
                self.input_dim).build()
        elif mean.mean_type == 'Quadratic':
            self.mean_function = ot.QuadraticBasisFactory(
                self.input_dim).build()
        elif mean.mean_type == 'Zero':
            self.mean_function = ot.Basis()
        else:
            self.mean_function = "This library does not support the specified mean function"
Exemplo n.º 6
0
# %%
# We rely on `H-Matrix` approximation for accelerating the evaluation.
# We change default parameters (compression, recompression) to higher values. The model is less accurate but very fast to build & evaluate.

# %%
ot.ResourceMap.SetAsString("KrigingAlgorithm-LinearAlgebra", "HMAT")
ot.ResourceMap.SetAsScalar("HMatrix-AssemblyEpsilon", 1e-5)
ot.ResourceMap.SetAsScalar("HMatrix-RecompressionEpsilon", 1e-4)

# %%
# In order to create the Kriging metamodel, we first select a constant trend with the `ConstantBasisFactory` class. Then we use a squared exponential covariance kernel.
# The `SquaredExponential` kernel has one amplitude coefficient and 4 scale coefficients. This is because this covariance kernel is anisotropic : each of the 4 input variables is associated with its own scale coefficient.

# %%
basis = ot.ConstantBasisFactory(dim).build()
covarianceModel = ot.SquaredExponential(dim)

# %%
# Typically, the optimization algorithm is quite good at setting sensible optimization bounds.
# In this case, however, the range of the input domain is extreme.

# %%
print("Lower and upper bounds of X_train:")
print(X_train.getMin(), X_train.getMax())

# %%
# We need to manually define sensible optimization bounds.
# Note that since the amplitude parameter is computed analytically (this is possible when the output dimension is 1), we only need to set bounds on the scale parameter.

# %%
def test_one_input_one_output():
    sampleSize = 6
    dimension = 1

    f = ot.SymbolicFunction(['x0'], ['x0 * sin(x0)'])

    X = ot.Sample(sampleSize, dimension)
    X2 = ot.Sample(sampleSize, dimension)
    for i in range(sampleSize):
        X[i, 0] = 3.0 + i
        X2[i, 0] = 2.5 + i
    X[0, 0] = 1.0
    X[1, 0] = 3.0
    X2[0, 0] = 2.0
    X2[1, 0] = 4.0
    Y = f(X)
    Y2 = f(X2)

    # create covariance model
    basis = ot.ConstantBasisFactory(dimension).build()
    covarianceModel = ot.SquaredExponential()

    # create algorithm
    algo = ot.KrigingAlgorithm(X, Y, covarianceModel, basis)

    # set sensible optimization bounds and estimate hyperparameters
    algo.setOptimizationBounds(ot.Interval(X.getMin(), X.getMax()))
    algo.run()

    # perform an evaluation
    result = algo.getResult()

    ott.assert_almost_equal(result.getMetaModel()(X), Y)
    ott.assert_almost_equal(result.getResiduals(), [1.32804e-07], 1e-3, 1e-3)
    ott.assert_almost_equal(result.getRelativeErrors(), [5.20873e-21])

    # Kriging variance is 0 on learning points
    covariance = result.getConditionalCovariance(X)
    nullMatrix = ot.Matrix(sampleSize, sampleSize)
    ott.assert_almost_equal(covariance, nullMatrix, 0.0, 1e-13)

    # Kriging variance is non-null on validation points
    validCovariance = result.getConditionalCovariance(X2)
    values = ot.Matrix([[
        0.81942182, -0.35599947, -0.17488593, 0.04622401, -0.03143555,
        0.04054783
    ],
                        [
                            -0.35599947, 0.20874735, 0.10943841, -0.03236419,
                            0.02397483, -0.03269184
                        ],
                        [
                            -0.17488593, 0.10943841, 0.05832917, -0.01779918,
                            0.01355719, -0.01891618
                        ],
                        [
                            0.04622401, -0.03236419, -0.01779918, 0.00578327,
                            -0.00467674, 0.00688697
                        ],
                        [
                            -0.03143555, 0.02397483, 0.01355719, -0.00467674,
                            0.0040267, -0.00631173
                        ],
                        [
                            0.04054783, -0.03269184, -0.01891618, 0.00688697,
                            -0.00631173, 0.01059488
                        ]])
    ott.assert_almost_equal(validCovariance - values, nullMatrix, 0.0, 1e-7)

    # Covariance per marginal & extract variance component
    coll = result.getConditionalMarginalCovariance(X)
    var = [mat[0, 0] for mat in coll]
    ott.assert_almost_equal(var, [0] * sampleSize, 1e-14, 1e-13)

    # Variance per marginal
    var = result.getConditionalMarginalVariance(X)
    ott.assert_almost_equal(var, ot.Sample(sampleSize, 1), 1e-14, 1e-13)

    # Prediction accuracy
    ott.assert_almost_equal(Y2, result.getMetaModel()(X2), 0.3, 0.0)
Exemplo n.º 8
0
# %%
sampleSize_train = 10
X_train = myDistribution.getSample(sampleSize_train)
Y_train = model(X_train)

# %%
# Create the metamodel
# --------------------

# %%
# In order to create the Kriging metamodel, we first select a constant trend with the `ConstantBasisFactory` class. Then we use a squared exponential covariance kernel.
# The `SquaredExponential` kernel has one amplitude coefficient and 4 scale coefficients. This is because this covariance kernel is anisotropic : each of the 4 input variables is associated with its own scale coefficient.

# %%
basis = ot.ConstantBasisFactory(dimension).build()
covarianceModel = ot.SquaredExponential(dimension)

# %%
# Typically, the optimization algorithm is quite good at setting sensible optimization bounds.
# In this case, however, the range of the input domain is extreme.

# %%
print("Lower and upper bounds of X_train:")
print(X_train.getMin(), X_train.getMax())

# %%
# We need to manually define sensible optimization bounds.
# Note that since the amplitude parameter is computed analytically (this is possible when the output dimension is 1), we only need to set bounds on the scale parameter.

# %%
    box = ot.Box(levels)
    inputSample = box.generate()
    # Scale each direction
    inputSample *= 10

    # Define model
    model = ot.Function(['x', 'y'], ['z'], ['cos(0.5*x) + sin(y)'])
    outputSample = model(inputSample)

    # 2) Definition of exponential model
    covarianceModel = ot.SquaredExponential([1.988, 0.924], [3.153])

    # 3) Basis definition
    basisCollection = ot.BasisCollection(
        1,
        ot.ConstantBasisFactory(spatialDimension).build())

    # Kriring algorithm
    algo = ot.KrigingAlgorithm(inputSample, outputSample, covarianceModel,
                               basisCollection)
    algo.run()
    result = algo.getResult()

    vertices = [[1.0, 0.0], [2.0, 0.0], [2.0, 1.0], [1.0, 1.0], [1.5, 0.5]]
    simplicies = [[0, 1, 4], [1, 2, 4], [2, 3, 4], [3, 0, 4]]

    mesh2D = ot.Mesh(vertices, simplicies)
    process = ot.ConditionedGaussianProcess(result, mesh2D)

    # Get a realization of the process
    realization = process.getRealization()
Exemplo n.º 10
0
model = ot.SymbolicFunction(['x', 'y'], ['cos(0.5*x) + sin(y)'])
outputSample = model(inputSample)

# Validation data
sampleSize = 10
inputValidSample = ot.ComposedDistribution(
    2 * [ot.Uniform(0, 10.0)]).getSample(sampleSize)
outputValidSample = model(inputValidSample)

# 2) Definition of exponential model
# The parameters have been calibrated using TNC optimization
# and AbsoluteExponential models
covarianceModel = ot.SquaredExponential([7.63, 2.11], [7.38])

# 3) Basis definition
basis = ot.ConstantBasisFactory(inputDimension).build()

# Kriging algorithm
algo = ot.KrigingAlgorithm(inputSample, outputSample, covarianceModel, basis)
algo.setOptimizeParameters(False)  # do not optimize hyper-parameters
algo.run()
result = algo.getResult()

vertices = [[1.0, 0.0], [2.0, 0.0], [2.0, 1.0], [1.0, 1.0], [1.5, 0.5]]
simplicies = [[0, 1, 4], [1, 2, 4], [2, 3, 4], [3, 0, 4]]

mesh2D = ot.Mesh(vertices, simplicies)
process = ot.ConditionedGaussianProcess(result, mesh2D)

# Get a realization of the process
realization = process.getRealization()
Exemplo n.º 11
0
     [4.35455857e+00, 1.23814619e-02, 1.01810539e+00, 1.10769534e+01]])

signals = ot.NumericalSample(
    [[37.305445], [35.466919], [43.187991], [45.305165], [40.121222],
     [44.609524], [45.14552], [44.80595], [35.414039], [39.851778],
     [42.046049], [34.73469], [39.339349], [40.384559], [38.718623],
     [46.189709], [36.155737], [31.768369], [35.384313], [47.914584],
     [46.758537], [46.564428], [39.698493], [45.636588], [40.643948]])


# Select point as initial DOE
inputDOE = inputSample[:10]
outputDOE = signals[:10]

# simulate the true physical model
basis = ot.ConstantBasisFactory(4).build()
basis = ot.ConstantBasisFactory(4).build()
if ot.__version__ == '1.6':
    covColl = ot.CovarianceModelCollection(4)
    scale = [5.03148, 13.9442, 20, 20]
    for i in range(4):
        c = ot.SquaredExponential(1, scale[i])
        c.setAmplitude([15.1697])
        covColl[i] = c
    covarianceModel = ot.ProductCovarianceModel(covColl)
elif ot.__version__ > '1.6':
    covarianceModel = ot.SquaredExponential([5.03148, 13.9442, 20, 20],
                                            [15.1697])

if ot.__version__ == '1.9':
    krigingModel = ot.KrigingAlgorithm(inputSample, signals, covarianceModel, basis)
Exemplo n.º 12
0
#! /usr/bin/env python

from __future__ import print_function
import openturns as ot

ot.TESTPREAMBLE()

dim = 2

x = [2.0 + i for i in range(dim)]

print("x=", x)

factory = ot.ConstantBasisFactory(dim)
print("factory=", factory)
basis = factory.build()
print("basis=", basis)

f = ot.AggregatedFunction(basis)
y = f(x)
print("y=", y)

factory = ot.LinearBasisFactory(dim)
print("factory=", factory)
basis = factory.build()
print("basis=", basis)

f = ot.AggregatedFunction(basis)
y = f(x)
print("y=", y)
Exemplo n.º 13
0
metaModel_0.run()
print(metaModel_0.getResult())

# deterministic/kriging
X0 = persalys.Input('X0', 0, '')
X1 = persalys.Input('X1', 0, '')
X2 = persalys.Input('X2', 0, '')
X3 = persalys.Input('X3', 0, '')
Y0 = persalys.Output('Y0', '')
inputs = [X0, X1, X2, X3]
outputs = [Y0]
formulas = ['X0+X1+X2+X3']
SymbolicModel_0 = persalys.SymbolicPhysicalModel('SymbolicModel_0', inputs, outputs, formulas)
values = [[0],
[0],
[-0.1, 0.1],
[-0.1, 0.1]]
design_0 = persalys.GridDesignOfExperiment('design_0', SymbolicModel_0, values)
design_0.setBlockSize(1)
interestVariables = ['Y0']
design_0.setInterestVariables(interestVariables)
design_0.run()
metaModel_0 = persalys.KrigingAnalysis('metaModel_0', design_0)
metaModel_0.setBasis(ot.ConstantBasisFactory(2).build())
metaModel_0.setCovarianceModel(ot.SquaredExponential(2))
metaModel_0.setAnalyticalValidation(True)
metaModel_0.setTestSampleValidation(False)
metaModel_0.setKFoldValidation(False)
metaModel_0.run()
print(metaModel_0.getResult())