Exemple #1
0
# %%
# We validate the pertinence of Karhunen-Loeve decomposition:

validationKL = ot.KarhunenLoeveValidation(outputFMUTestSample, resultKL)
graph = validationKL.computeResidualMean().draw()
ot.Show(graph)

# %%
# As the epidemiological model considers a population size of 700, the residual
# mean error on the field is acceptable.

# %%
# We validate the Kriging (using the Karhunen-Loeve coefficients of the test
# sample):

projectFunction = ot.KarhunenLoeveProjection(resultKL)
coefficientSample = projectFunction(outputFMUTestSample)

validationKriging = ot.MetaModelValidation(inputTestSample, coefficientSample,
                                           metamodel)
Q2 = validationKriging.computePredictivityFactor()[0]
print(Q2)

# %%
# The predictivity factor is very close to 1, which is satisfying.
# Further statistical tests exist in
# `OpenTURNS <http://openturns.github.io/openturns/master/contents.html>`_ to
# assert the quality of the obtained metamodel.

# %%
#
basisSize = enumerateFunction.getStrataCumulatedCardinal(degree)
adaptive = ot.FixedStrategy(basis, basisSize)
projection = ot.LeastSquaresStrategy(
    ot.LeastSquaresMetaModelSelectionFactory(ot.LARS(),
                                             ot.CorrectedLeaveOneOut()))
ot.ResourceMap.SetAsScalar("LeastSquaresMetaModelSelection-ErrorThreshold",
                           1.0e-7)
algo_chaos = ot.FunctionalChaosAlgorithm(sample_xi_X, sample_xi_Y,
                                         basis.getMeasure(), adaptive,
                                         projection)
algo_chaos.run()
result_chaos = algo_chaos.getResult()
meta_model = result_chaos.getMetaModel()
print("myConvolution=", myConvolution.getInputDimension(), "->",
      myConvolution.getOutputDimension())
preprocessing = ot.KarhunenLoeveProjection(result_X)
print("preprocessing=", preprocessing.getInputDimension(), "->",
      preprocessing.getOutputDimension())
print("meta_model=", meta_model.getInputDimension(), "->",
      meta_model.getOutputDimension())
postprocessing = ot.KarhunenLoeveLifting(result_Y)
print("postprocessing=", postprocessing.getInputDimension(), "->",
      postprocessing.getOutputDimension())
meta_model_field = ot.FieldToFieldConnection(
    postprocessing, ot.FieldToPointConnection(meta_model, preprocessing))

# %%
# Meta_model validation
iMax = 10
sample_X_validation = process_X.getSample(iMax)
sample_Y_validation = myConvolution(sample_X_validation)
Exemple #3
0
    def __init__(self, composedKLResultsAndDistributions):
        '''Initializes the aggregation

        Parameters
        ----------
        composedKLResultsAndDistributions : list
            list of ordered ot.Distribution and ot.KarhunenLoeveResult objects
        '''
        self.__KLResultsAndDistributions__ = atLeastList(composedKLResultsAndDistributions) #KLRL : Karhunen Loeve Result List
        assert len(self.__KLResultsAndDistributions__)>0
        self.__field_distribution_count__ = len(self.__KLResultsAndDistributions__)
        self.__name__ = 'Unnamed'
        self.__KL_lifting__ = []
        self.__KL_projecting__ = []

        #Flags
        self.__isProcess__ = [False]*self.__field_distribution_count__
        self.__has_distributions__ = False
        self.__unified_dimension__ = False
        self.__unified_mesh__ = False
        self.__isAggregated__ = False
        self.__means__ = [.0]*self.__field_distribution_count__
        self.__liftWithMean__ = False

        # checking the nature of eachelement of the input list
        for i in range(self.__field_distribution_count__):
            # If element is a Karhunen Loeve decomposition
            if isinstance(self.__KLResultsAndDistributions__[i], ot.KarhunenLoeveResult):
                # initializing lifting and projecting objects.
                self.__KL_lifting__.append(ot.KarhunenLoeveLifting(self.__KLResultsAndDistributions__[i]))
                self.__KL_projecting__.append(ot.KarhunenLoeveProjection(self.__KLResultsAndDistributions__[i]))
                self.__isProcess__[i] = True

            # If element is a distribution
            elif isinstance(self.__KLResultsAndDistributions__[i], (ot.Distribution, ot.DistributionImplementation)):
                self.__has_distributions__ = True
                if self.__KLResultsAndDistributions__[i].getMean()[0] != 0 :
                    print('The mean value of distribution {} at index {} of type {} is not 0.'.format(str('"'+self.__KLResultsAndDistributions__[i].getName()+'"'), str(i), self.__KLResultsAndDistributions__[i].getClassName()))
                    name_distr = self.__KLResultsAndDistributions__[i].getName()
                    self.__means__[i] = self.__KLResultsAndDistributions__[i].getMean()[0]
                    self.__KLResultsAndDistributions__[i] -= self.__means__[i]
                    self.__KLResultsAndDistributions__[i].setName(name_distr)
                    print('Distribution recentered and mean added to list of means')
                    print('Set the "liftWithMean" flag to true if you want to include the mean.')
                # We can say that the inverse iso probabilistic transformation is analoguous to lifting
                self.__KL_lifting__.append(self.__KLResultsAndDistributions__[i].getInverseIsoProbabilisticTransformation())
                # We can say that the iso probabilistic transformation is analoguous to projecting
                self.__KL_projecting__.append(self.__KLResultsAndDistributions__[i].getIsoProbabilisticTransformation())

        # If the function has distributions it cant be homogenous
        if not self.__has_distributions__ :
            self.__unified_mesh__ = all_same([self.__KLResultsAndDistributions__[i].getMesh() for i in range(self.__field_distribution_count__)])
            self.__unified_dimension__ = (   all_same([self.__KLResultsAndDistributions__[i].getCovarianceModel().getOutputDimension() for i in range(self.__field_distribution_count__)])\
                                         and all_same([self.__KLResultsAndDistributions__[i].getCovarianceModel().getInputDimension() for i in range(self.__field_distribution_count__)]))

        # If only one object is passed it has to be an decomposed aggregated process
        if self.__field_distribution_count__ == 1 :
            if hasattr(self.__KLResultsAndDistributions__[0], 'getCovarianceModel') and hasattr(self.__KLResultsAndDistributions__[0], 'getMesh'):
                #Cause when aggregated there is usage of multvariate covariance functions
                self.__isAggregated__ = self.__KLResultsAndDistributions__[0].getCovarianceModel().getOutputDimension() > self.__KLResultsAndDistributions__[0].getMesh().getDimension()
                print('Process seems to be aggregated. ')
            else :
                print('There is no point in passing only one process that is not aggregated')
                raise TypeError

        self.threshold = max([self.__KLResultsAndDistributions__[i].getThreshold() if hasattr(self.__KLResultsAndDistributions__[i], 'getThreshold') else 1e-3 for i in range(self.__field_distribution_count__)])
        #Now we gonna get the data we will usually need
        self.__process_distribution_description__ = [self.__KLResultsAndDistributions__[i].getName() for i in range(self.__field_distribution_count__)]
        self._checkSubNames()
        self.__mode_count__ = [self.__KLResultsAndDistributions__[i].getEigenValues().getSize() if hasattr(self.__KLResultsAndDistributions__[i], 'getEigenValues') else 1 for i in range(self.__field_distribution_count__)]
        self.__mode_description__ = self._getModeDescription()
#! /usr/bin/env python

from __future__ import print_function
import openturns as ot

ot.TESTPREAMBLE()

# Create a KarhunenLoeveResult
mesh = ot.IntervalMesher([9]).build(ot.Interval(-1.0, 1.0))
cov1D = ot.AbsoluteExponential([1.0])
algo = ot.KarhunenLoeveP1Algorithm(mesh, cov1D, 0.0)
algo.run()
result = algo.getResult()
projection = ot.KarhunenLoeveProjection(result)
# Construction based on a FieldFunction followed by a FieldToPointFunction
fieldFunction = ot.ValueFunction(ot.SymbolicFunction("x", "x"), mesh)
# Create an instance
myFunc = ot.FieldToPointConnection(projection, fieldFunction)

print("myFunc=", myFunc)
# Get the input and output description
print("myFunc input description=", myFunc.getInputDescription())
print("myFunc output description=", myFunc.getOutputDescription())
# Get the input and output dimension
print("myFunc input dimension=", myFunc.getInputDimension())
print("myFunc output dimension=", myFunc.getOutputDimension())
# Connection on a field
field = result.getModesAsProcessSample().computeMean()
print("field=", field)
print("myFunc(field)=", myFunc(field.getValues()))
print("called ", myFunc.getCallsNumber(), " times")
Exemple #5
0
# get some realizations and a sample
ot.RandomGenerator_SetSeed(11111)
field1D = process.getRealization()  #FIELD BASE

ot.RandomGenerator_SetSeed(11111)
sample1D = process.getSample(10)  #SAMPLE BASE

# get the Karhunen Loeve decomposition of the mesh
algorithm = ot.KarhunenLoeveP1Algorithm(mesh, model0, 1e-3)
algorithm.run()
results = algorithm.getResult()  #### This is the object we will need !

#now let's project the field and the samples on the eigenmode basis
lifter = ot.KarhunenLoeveLifting(results)
projecter = ot.KarhunenLoeveProjection(results)

coeffField1D = projecter(field1D)
coeffSample1D = projecter(
    sample1D
)  #dimension of the coefficents, done internaly by our class but needed for comparison

fieldVals = lifter(coeffField1D)
sample_lifted = lifter(coeffSample1D)
field_lifted = ot.Field(lifter.getOutputMesh(), fieldVals)

# Definition of centered normal variable
N05 = ot.Normal(0, 5)

# Definition of centered normal variable
N55 = ot.Normal(5, 5)