def plotKrigingPredictions(krigingMetamodel): ''' Plot the predictions of a Kriging metamodel. ''' # Create the mesh of the box [0., 1000.] * [0., 700.] myInterval = ot.Interval([0., 0.], [1000., 700.]) # Define the number of intervals in each direction of the box nx = 20 ny = 20 myIndices = [nx - 1, ny - 1] myMesher = ot.IntervalMesher(myIndices) myMeshBox = myMesher.build(myInterval) # Predict vertices = myMeshBox.getVertices() predictions = krigingMetamodel(vertices) # Format for plot X = np.array(vertices[:, 0]).reshape((ny, nx)) Y = np.array(vertices[:, 1]).reshape((ny, nx)) predictions_array = np.array(predictions).reshape((ny, nx)) # Plot plt.figure() plt.pcolormesh(X, Y, predictions_array, shading='auto') plt.colorbar() plt.show() return
def test_NonZeroMean(self): # Create the KL result numberOfVertices = 10 interval = ot.Interval(-1.0, 1.0) mesh = ot.IntervalMesher([numberOfVertices - 1]).build(interval) covariance = ot.SquaredExponential() zeroProcess = ot.GaussianProcess(covariance, mesh) # Define a trend function f = ot.SymbolicFunction(["t"], ["30 * t"]) fTrend = ot.TrendTransform(f, mesh) # Add it to the process process = ot.CompositeProcess(fTrend, zeroProcess) # Sample sampleSize = 100 processSample = process.getSample(sampleSize) threshold = 0.0 algo = ot.KarhunenLoeveSVDAlgorithm(processSample, threshold) algo.run() klresult = algo.getResult() # Create the KL reduction meanField = processSample.computeMean() klreduce = ot.KarhunenLoeveReduction(klresult) # Generate a trajectory and reduce it field = process.getRealization() values = field.getValues() reducedValues = klreduce(values) ott.assert_almost_equal(values, reducedValues)
def _buildMesh(self, grid_shape): """Builds a openturns mesh in the unit cube, based on a comprehesive list of grid coordinates as returned by the _getGridShape method. Arguments --------- grid_shape : comprehensive list all the grid coordinates, in the unit cube. Returns ------- mesh : ot.Mesh openturns Mesh object """ dimension = len(grid_shape) n_intervals = [int(grid_shape[i][2]) for i in range(dimension)] low_bounds = [grid_shape[i][0] for i in range(dimension)] lengths = [grid_shape[i][1] for i in range(dimension)] high_bounds = [low_bounds[i] + lengths[i] for i in range(dimension)] mesherObj = ot.IntervalMesher(n_intervals) grid_interval = ot.Interval(low_bounds, high_bounds) mesh = mesherObj.build(grid_interval) mesh.setName(str(dimension) + 'D_Grid') return mesh
def test_KarhunenLoeveValidationMultidimensional(self): # Create the KL result numberOfVertices = 20 interval = ot.Interval(-1.0, 1.0) mesh = ot.IntervalMesher([numberOfVertices - 1]).build(interval) outputDimension = 2 univariateCovariance = ot.SquaredExponential() covarianceCollection = [univariateCovariance] * outputDimension multivariateCovariance = ot.TensorizedCovarianceModel( covarianceCollection) process = ot.GaussianProcess(multivariateCovariance, mesh) sampleSize = 100 sampleSize = 10 processSample = process.getSample(sampleSize) threshold = 1.0e-7 algo = ot.KarhunenLoeveSVDAlgorithm(processSample, threshold) algo.run() klresult = algo.getResult() # Create the validation validation = ot.KarhunenLoeveValidation(processSample, klresult) # Check residuals residualProcessSample = validation.computeResidual() assert (type(residualProcessSample) is ot.ProcessSample) # Check standard deviation residualSigmaField = validation.computeResidualStandardDeviation() zeroSample = ot.Sample(numberOfVertices, outputDimension) ott.assert_almost_equal(residualSigmaField, zeroSample) # Check graph graph = validation.drawValidation() if False: from openturns.viewer import View View(graph).save('validation2.png')
def __init__(self): self.dim = 4 # number of inputs self.outputDimension = 1 # dimension of the output self.tmin = 0.0 # Minimum time self.tmax = 12.0 # Maximum time self.gridsize = 100 # Number of time steps self.mesh = ot.IntervalMesher([self.gridsize - 1]).build( ot.Interval(self.tmin, self.tmax)) self.vertices = self.mesh.getVertices() # Marginals self.distZ0 = ot.Uniform(100.0, 150.0) self.distV0 = ot.Normal(55.0, 10.0) self.distM = ot.Normal(80.0, 8.0) self.distC = ot.Uniform(0.0, 30.0) # Joint distribution self.distribution = ot.ComposedDistribution( [self.distZ0, self.distV0, self.distM, self.distC]) # Exact solution self.alti = ot.PythonPointToFieldFunction(self.dim, self.mesh, self.outputDimension, AltiFunc)
def test_KarhunenLoeveValidation(self): # Create the KL result numberOfVertices = 20 interval = ot.Interval(-1.0, 1.0) mesh = ot.IntervalMesher([numberOfVertices - 1]).build(interval) covariance = ot.SquaredExponential() process = ot.GaussianProcess(covariance, mesh) sampleSize = 100 processSample = process.getSample(sampleSize) threshold = 1.0e-7 algo = ot.KarhunenLoeveSVDAlgorithm(processSample, threshold) algo.run() klresult = algo.getResult() # Create validation validation = ot.KarhunenLoeveValidation(processSample, klresult) # Check residuals residualProcessSample = validation.computeResidual() assert (type(residualProcessSample) is ot.ProcessSample) # Check standard deviation residualSigmaField = validation.computeResidualStandardDeviation() exact = ot.Sample(numberOfVertices, 1) #ott.assert_almost_equal(residualSigmaField, exact) # Check mean residualMean = validation.computeResidualMean() exact = ot.Sample(numberOfVertices, 1) #ott.assert_almost_equal(residualMean, exact) # Check graph graph0 = validation.drawValidation() graph1 = residualProcessSample.drawMarginal(0) graph2 = residualMean.drawMarginal(0) graph3 = residualSigmaField.drawMarginal(0) graph4 = validation.drawObservationWeight(0) graph5 = validation.drawObservationQuality() if 0: from openturns.viewer import View View(graph0).save('validation1.png') View(graph1).save('validation1-residual.png') View(graph2).save('validation1-residual-mean.png') View(graph3).save('validation1-residual-stddev.png') View(graph4).save('validation1-indiv-weight.png') View(graph5).save('validation1-indiv-quality.png')
def test_ZeroMean(self): # Create the KL result numberOfVertices = 10 interval = ot.Interval(-1.0, 1.0) mesh = ot.IntervalMesher([numberOfVertices - 1]).build(interval) covariance = ot.SquaredExponential() process = ot.GaussianProcess(covariance, mesh) sampleSize = 10 processSample = process.getSample(sampleSize) threshold = 0.0 algo = ot.KarhunenLoeveSVDAlgorithm(processSample, threshold) algo.run() klresult = algo.getResult() # Create the KL reduction meanField = processSample.computeMean() klreduce = ot.KarhunenLoeveReduction(klresult) # Generate a trajectory and reduce it field = process.getRealization() values = field.getValues() reducedValues = klreduce(values) ott.assert_almost_equal(values, reducedValues)
def test_trend(self): N = 100 M = 1000 P = 10 mean = ot.SymbolicFunction("x", "sign(x)") cov = ot.SquaredExponential([1.0], [0.1]) mesh = ot.IntervalMesher([N]).build(ot.Interval(-2.0, 2.0)) process = ot.GaussianProcess(ot.TrendTransform(mean, mesh), cov, mesh) sample = process.getSample(M) algo = ot.KarhunenLoeveSVDAlgorithm(sample, 1e-6) algo.run() result = algo.getResult() trend = ot.TrendTransform( ot.P1LagrangeEvaluation(sample.computeMean()), mesh) sample2 = process.getSample(P) sample2.setName('reduction of sign(x) w/o trend') reduced1 = ot.KarhunenLoeveReduction(result)(sample2) reduced2 = ot.KarhunenLoeveReduction(result, trend)(sample2) g = sample2.drawMarginal(0) g.setColors(["red"]) g1 = reduced1.drawMarginal(0) g1.setColors(["blue"]) drs = g1.getDrawables() for i, d in enumerate(drs): d.setLineStyle("dashed") drs[i] = d g1.setDrawables(drs) g.add(g1) g2 = reduced2.drawMarginal(0) g2.setColors(["green"]) drs = g2.getDrawables() for i, d in enumerate(drs): d.setLineStyle("dotted") drs[i] = d g2.setDrawables(drs) g.add(g2) if 0: from openturns.viewer import View View(g).save('reduction.png')
def readProcessSample(fname): """ Return a ProcessSample from a text file. Assume the mesh is regular [0,1]. """ # Dataset data = np.loadtxt(fname) # Create the mesh n_nodes = data.shape[1] mesher = ot.IntervalMesher([n_nodes - 1]) Interval = ot.Interval([0.0], [1.0]) mesh = mesher.build(Interval) # Create the ProcessSample from the data n_fields = data.shape[0] dim_fields = 1 processSample = ot.ProcessSample(mesh, n_fields, dim_fields) for i in range(n_fields): trajectory = ot.Sample([[x] for x in data[i, :]]) processSample[i] = ot.Field(mesh, trajectory) return processSample
def dummyFunction2Wrap(field_10x10, field_100x1, scalar_0): ## Function doing some operation on the 2 field and a scalar and returning a field outDim = 1 NElem = [10] mesher = ot.IntervalMesher(NElem) lowerBound = [0] upperBound = [10] interval = ot.Interval(lowerBound, upperBound) mesh = mesher.build(interval) outField = ot.Field(mesh, [[0]] * mesh.getVerticesNumber()) for i in range(10): for j in range(10): if field_10x10[i][0] * field_100x1[i + j][0] > scalar_0[0][0]: outField.setValueAtIndex(i, [ field_10x10[i][0] * field_100x1[(i + 1) * (j + 1) - 1][0] - scalar_0[0][0] ]) else: outField.setValueAtIndex( i, [(field_10x10[j][0] - scalar_0[0][0]) / field_100x1[(i + 1) * (j + 1) - 1][0]]) return outField
import openturns as ot from matplotlib import pyplot as plt from openturns.viewer import View # Create a bivariate normal process myMesh = ot.IntervalMesher([39, 39]).build(ot.Interval([0.0] * 2, [1.0] * 2)) myCov = ot.GeneralizedExponential(2, 0.1, 1.3) myProcess = ot.TemporalNormalProcess(myCov, myMesh) myField = myProcess.getRealization() graph = myField.drawMarginal(0, False) fig = plt.figure(figsize=(8, 4)) plt.suptitle("A field") axis = fig.add_subplot(111) axis.set_xlim(auto=True) View(graph, figure=fig, axes=[axis], add_legend=True)
else: y = 2.0*m.sin(7.0*xx) return y XX_input = ot.Sample([[0.1, 0], [0.32, 0], [0.6, 0], [0.9, 0], [ 0.07, 1], [0.1, 1], [0.4, 1], [0.5, 1], [0.85, 1]]) y_output = ot.Sample(len(XX_input), 1) for i in range(len(XX_input)): y_output[i, 0] = fun_mixte(XX_input[i]) def C(s, t): return m.exp(-4.0 * abs(s - t) / (1 + (s * s + t * t))) N = 32 a = 4.0 myMesh = ot.IntervalMesher([N]).build(ot.Interval(-a, a)) myCovariance = ot.CovarianceMatrix(myMesh.getVerticesNumber()) for k in range(myMesh.getVerticesNumber()): t = myMesh.getVertices()[k] for l in range(k + 1): s = myMesh.getVertices()[l] myCovariance[k, l] = C(s[0], t[0]) covModel_discrete = ot.UserDefinedCovarianceModel(myMesh, myCovariance) f_ = ot.SymbolicFunction(["tau", "theta", "sigma"], [ "(tau!=0) * exp(-1/theta) * sigma * sigma + (tau==0) * exp(0) * sigma * sigma"]) rho = ot.ParametricFunction(f_, [1, 2], [0.2, 0.3]) covModel_discrete = ot.StationaryFunctionalCovarianceModel([1.0], [ 1.0], rho) covModel_continuous = ot.SquaredExponential([1.0], [1.0])
""" Aggregate processes =================== """ # %% # In this example we are going to concatenate several processes that share the same mesh. # %% import openturns as ot import openturns.viewer as viewer from matplotlib import pylab as plt ot.Log.Show(ot.Log.NONE) # %% # Create processes to aggregate myMesher = ot.IntervalMesher([100, 10]) lowerbound = [0.0, 0.0] upperBound = [2.0, 4.0] myInterval = ot.Interval(lowerbound, upperBound) myMesh = myMesher.build(myInterval) myProcess1 = ot.WhiteNoise(ot.Normal(), myMesh) myProcess2 = ot.WhiteNoise(ot.Triangular(), myMesh) # %% # Draw values of a realization of the 2nd process marginal = ot.HistogramFactory().build(myProcess1.getRealization().getValues()) graph = marginal.drawPDF() view = viewer.View(graph) # %% # Create an aggregated process
#! /usr/bin/env python import openturns as ot ot.TESTPREAMBLE() # A 1D->1D field mesh = ot.IntervalMesher([10]).build(ot.Interval(-2.0, 2.0)) function = ot.SymbolicFunction("x", "x") field = ot.Field(mesh, function(mesh.getVertices())) graph = field.draw() graph = field.drawMarginal(0, False) graph = field.drawMarginal(0, True) # A 2D->1D field mesh = ot.IntervalMesher([10] * 2).build(ot.Interval([-2.0] * 2, [2.0] * 2)) function = ot.SymbolicFunction(["x0", "x1"], ["x0 - x1"]) field = ot.Field(mesh, function(mesh.getVertices())) graph = field.draw() graph = field.drawMarginal(0, False) graph = field.drawMarginal(0, True) # A 2D->2D field function = ot.SymbolicFunction(["x0", "x1"], ["x0", "x1"]) field = ot.Field(mesh, function(mesh.getVertices())) graph = field.draw()
def test_model(myModel, test_partial_grad=True, x1=None, x2=None): inputDimension = myModel.getInputDimension() dimension = myModel.getOutputDimension() if x1 is None and x2 is None: x1 = ot.Point(inputDimension) x2 = ot.Point(inputDimension) for j in range(inputDimension): x1[j] = -1.0 - j x2[j] = 3.0 + 2.0 * j else: x1 = ot.Point(x1) x2 = ot.Point(x2) if myModel.isStationary(): ott.assert_almost_equal(myModel(x1 - x2), myModel(x1, x2), 1e-14, 1e-14) ott.assert_almost_equal(myModel(x2 - x1), myModel(x1, x2), 1e-14, 1e-14) eps = 1e-3 mesh = ot.IntervalMesher([7] * inputDimension).build( ot.Interval([-10] * inputDimension, [10] * inputDimension)) C = myModel.discretize(mesh) if dimension == 1: # Check that discretize & computeAsScalar provide the # same values vertices = mesh.getVertices() for j in range(len(vertices)): for i in range(j, len(vertices)): ott.assert_almost_equal( C[i, j], myModel.computeAsScalar(vertices[i], vertices[j]), 1e-14, 1e-14) else: # Check that discretize & operator() provide the same values vertices = mesh.getVertices() localMatrix = ot.SquareMatrix(dimension) for j in range(len(vertices)): for i in range(j, len(vertices)): for localJ in range(dimension): for localI in range(dimension): localMatrix[localI, localJ] = C[i * dimension + localI, j * dimension + localJ] ott.assert_almost_equal(localMatrix, myModel(vertices[i], vertices[j]), 1e-14, 1e-14) # Now we suppose that discretize is ok # we look at crossCovariance of (vertices, vertices) which should return the same values C.getImplementation().symmetrize() crossCov = myModel.computeCrossCovariance(vertices, vertices) ott.assert_almost_equal( crossCov, C, 1e-14, 1e-14, "in " + myModel.getClassName() + "::computeCrossCovariance") # Now crossCovariance(sample, sample) is ok # Let us validate crossCovariance(Sample, point) with 1st column(s) of previous calculations crossCovSamplePoint = myModel.computeCrossCovariance(vertices, vertices[0]) crossCovCol = crossCov.reshape(crossCov.getNbRows(), dimension) ott.assert_almost_equal( crossCovSamplePoint, crossCovCol, 1e-14, 1e-14, "in " + myModel.getClassName() + "::computeCrossCovarianceSamplePoint") if test_partial_grad: grad = myModel.partialGradient(x1, x2) if (dimension == 1): gradfd = ot.Matrix(inputDimension, 1) for j in range(inputDimension): x1_g = ot.Point(x1) x1_d = ot.Point(x1) x1_g[j] = x1_d[j] + eps x1_d[j] = x1_d[j] - eps gradfd[j, 0] = (myModel.computeAsScalar(x1_g, x2) - myModel.computeAsScalar(x1_d, x2)) / (2 * eps) else: gradfd = ot.Matrix(inputDimension, dimension * dimension) covarianceX1X2 = myModel(x1, x2) centralValue = ot.Point(covarianceX1X2.getImplementation()) # Loop over the shifted points for i in range(inputDimension): currentPoint = ot.Point(x1) currentPoint[i] += eps localCovariance = myModel(currentPoint, x2) currentValue = ot.Point(localCovariance.getImplementation()) for j in range(currentValue.getSize()): gradfd[i, j] = (currentValue[j] - centralValue[j]) / eps ott.assert_almost_equal(grad, gradfd, 1e-5, 1e-5, "in " + myModel.getClassName() + " grad")
#! /usr/bin/env python from __future__ import print_function import openturns as ot ot.TESTPREAMBLE() for diamond in [False, True]: mesher1D = ot.IntervalMesher([5]) print("mesher1D=", mesher1D) mesh1D = mesher1D.build(ot.Interval(-1.0, 2.0), diamond) print("mesh1D=", mesh1D) mesher2D = ot.IntervalMesher([5, 5]) print("mesher2D=", mesher2D) mesh2D = mesher2D.build(ot.Interval([-1.0, -1.0], [2.0, 2.0]), diamond) print("mesh2D=", mesh2D) mesher3D = ot.IntervalMesher([5]*3) print("mesher3D=", mesher3D) try: mesh3D = mesher3D.build(ot.Interval(3), diamond) print("mesh3D=", mesh3D) except RuntimeError: print('notyetimpl')
# # In this example we are going to assess a Karhunen-Loeve decomposition # # %% from __future__ import print_function import openturns as ot import openturns.viewer as viewer from matplotlib import pylab as plt ot.Log.Show(ot.Log.NONE) # %% # Create a Gaussian process numberOfVertices = 20 interval = ot.Interval(-1.0, 1.0) mesh = ot.IntervalMesher([numberOfVertices - 1]).build(interval) covariance = ot.SquaredExponential() process = ot.GaussianProcess(covariance, mesh) # %% # decompose it using KL-SVD sampleSize = 100 processSample = process.getSample(sampleSize) threshold = 1.0e-7 algo = ot.KarhunenLoeveSVDAlgorithm(processSample, threshold) algo.run() klresult = algo.getResult() # %% # Instanciate the validation service validation = ot.KarhunenLoeveValidation(processSample, klresult)
#! /usr/bin/env python from __future__ import print_function import openturns as ot ot.TESTPREAMBLE() # Create a KarhunenLoeveResult mesh = ot.IntervalMesher([9]).build(ot.Interval(-1.0, 1.0)) cov1D = ot.AbsoluteExponential([1.0]) algo = ot.KarhunenLoeveP1Algorithm(mesh, cov1D, 0.0) algo.run() result = algo.getResult() projection = ot.KarhunenLoeveProjection(result) # Construction based on a FieldFunction followed by a FieldToPointFunction fieldFunction = ot.ValueFunction(ot.SymbolicFunction("x", "x"), mesh) # Create an instance myFunc = ot.FieldToPointConnection(projection, fieldFunction) print("myFunc=", myFunc) # Get the input and output description print("myFunc input description=", myFunc.getInputDescription()) print("myFunc output description=", myFunc.getOutputDescription()) # Get the input and output dimension print("myFunc input dimension=", myFunc.getInputDimension()) print("myFunc output dimension=", myFunc.getOutputDimension()) # Connection on a field field = result.getModesAsProcessSample().computeMean() print("field=", field) print("myFunc(field)=", myFunc(field.getValues())) print("called ", myFunc.getCallsNumber(), " times")
def get_fem_vertices(min_vertices, max_vertices, n_elements): interval = ot.Interval([min_vertices],[max_vertices]) mesher = ot.IntervalMesher([n_elements]) fem_vertices = mesher.build(interval) return fem_vertices
=================== """ # %% # In this example we are going to concatenate several processes that share the same mesh. # %% from __future__ import print_function import openturns as ot import openturns.viewer as viewer from matplotlib import pylab as plt ot.Log.Show(ot.Log.NONE) # %% # Create processes to aggregate myMesher = ot.IntervalMesher(ot.Indices([100, 10])) lowerbound = [0.0, 0.0] upperBound = [2.0, 4.0] myInterval = ot.Interval(lowerbound, upperBound) myMesh = myMesher.build(myInterval) myProcess1 = ot.WhiteNoise(ot.Normal(), myMesh) myProcess2 = ot.WhiteNoise(ot.Triangular(), myMesh) # %% # Draw values of a realization of the 2nd process marginal = ot.HistogramFactory().build(myProcess1.getRealization().getValues()) graph = marginal.drawPDF() view = viewer.View(graph) # %% # Create an aggregated process
print("Nearest index(", points, ")=", tree.query(points)) print("P1 gram=\n", mesh3D.computeP1Gram()) rotation = ot.SquareMatrix(3) rotation[0, 0] = m.cos(m.pi / 3.0) rotation[0, 1] = m.sin(m.pi / 3.0) rotation[1, 0] = -m.sin(m.pi / 3.0) rotation[1, 1] = m.cos(m.pi / 3.0) rotation[2, 2] = 1.0 # isregular bug time_grid = ot.RegularGrid(0.0, 0.2, 40963) mesh = ot.Mesh(time_grid) print(mesh.isRegular()) # numerical limit testcase m1 = ot.IntervalMesher([1] * 2).build(ot.Interval([0.0] * 2, [1.0] * 2)) simplex = 0 point = [0.8, 0.2] found, coordinates = m1.checkPointInSimplexWithCoordinates(point, simplex) assert found, "not inside" # Fix https://github.com/openturns/openturns/issues/1547 # We force the checking try: vertices = [[2.1], [2.8], [3.5], [4.2], [4.9], [5.6], [6.3], [7.0]] simplices = [[3, 4], [4, 5], [5, 6], [6, 7], [7, 8], [8, 9], [9, 10]] mesh = ot.Mesh(vertices, simplices, True) weights = mesh.computeWeights() except Exception: print('ok')
# %% from __future__ import print_function import openturns as ot import openturns.viewer as viewer from matplotlib import pylab as plt import numpy as np ot.Log.Show(ot.Log.NONE) # %% # We first define the time grid associated with the model. # %% tmin = 0.0 # Minimum time tmax = 12. # Maximum time gridsize = 100 # Number of time steps mesh = ot.IntervalMesher([gridsize-1]).build(ot.Interval(tmin, tmax)) # %% vertices = mesh.getVertices() # %% # Creation of the input distribution. # %% distZ0 = ot.Uniform(100.0, 150.0) distV0 = ot.Normal(55.0, 10.0) distM = ot.Normal(80.0, 8.0) distC = ot.Uniform(0.0, 30.0) distribution = ot.ComposedDistribution([distZ0, distV0, distM, distC]) # %%
def __call__(self, i, j): pt1 = self.vertices[i] pt2 = self.vertices[j] difference = pt1 - pt2 val = m.exp(-difference.norm() / self.scaling) return val ot.ResourceMap.SetAsBool('HMatrix-ForceSequential', True) ot.ResourceMap.SetAsUnsignedInteger('HMatrix-MaxLeafSize', 10) ot.PlatformInfo.SetNumericalPrecision(3) n = 2 indices = [n, n] intervalMesher = ot.IntervalMesher(indices) interval = ot.Interval([0.0] * 2, [1.0] * 2) mesh2D = intervalMesher.build(interval) vertices = mesh2D.getVertices() factory = ot.HMatrixFactory() parameters = ot.HMatrixParameters() parameters.setAssemblyEpsilon(1.e-6) parameters.setRecompressionEpsilon(1.e-6) # HMatrix must be symmetric in order to perform Cholesky decomposition hmat = factory.build(vertices, 1, True, parameters) simpleAssembly = TestHMatrixRealAssemblyFunction(vertices, 0.1) hmat.assembleReal(simpleAssembly, 'L') hmatRef = ot.HMatrix(hmat)
# We note :math:`(\underline{t}_0, \dots, \underline{t}_{N-1})` the vertices of :math:`\mathcal{M}` and :math:`(\underline{x}_0, \dots, \underline{x}_{N-1})` the associated values in :math:`\mathbb{R}^d`. # # A field is stored in the *Field* object that stores the mesh and the values at each vertex of the mesh. # It can be built from a mesh and values or as a realization of a stochastic process. # %% import openturns as ot import openturns.viewer as viewer from matplotlib import pylab as plt import math as m ot.Log.Show(ot.Log.NONE) # %% # First, we define a regular 2-d mesh discretization = [10, 5] mesher = ot.IntervalMesher(discretization) lowerBound = [0.0, 0.0] upperBound = [2.0, 1.0] interval = ot.Interval(lowerBound, upperBound) mesh = mesher.build(interval) graph = mesh.draw() graph.setTitle('Regular 2-d mesh') view = viewer.View(graph) # %% # We now create a field from a mesh and some values values = ot.Normal([0.0] * 2, [1.0] * 2, ot.CorrelationMatrix(2)).getSample(len(mesh.getVertices())) for i in range(len(values)): x = values[i] values[i] = 0.05 * x / x.norm()
return [dY0, dY1] f = ot.PythonFunction(3, 2, flow) phi = ot.ParametricFunction(f, [2], [0.0]) solver = ot.RungeKutta(phi) initialState = [2.0, 2.0] nt = 47 dt = 0.1 timeGrid = ot.RegularGrid(0.0, dt, nt) result = solver.solve(initialState, timeGrid) xMin = result.getMin() xMax = result.getMax() delta = 0.2 * (xMax - xMin) mesh = ot.IntervalMesher([12] * 2).build( ot.Interval(xMin - delta, xMax + delta)) field = ot.Field(mesh, phi(mesh.getVertices())) ot.ResourceMap.SetAsScalar("Field-ArrowScaling", 0.1) graph = field.draw() cloud = ot.Cloud(mesh.getVertices()) cloud.setColor("black") graph.add(cloud) curve = ot.Curve(result) curve.setColor("red") curve.setLineWidth(2) graph.add(curve) fig = plt.figure() ax = fig.add_subplot(111) View(graph, figure=fig) plt.suptitle("Lotka-Volterra ODE system")
import openturns as ot from matplotlib import pyplot as plt from openturns.viewer import View from math import sqrt mesh = ot.IntervalMesher([128]).build(ot.Interval(-1.0, 1.0)) threshold = 0.001 model = ot.AbsoluteExponential([1.0]) algo = ot.KarhunenLoeveP1Algorithm(mesh, model, threshold) algo.run() ev = algo.getResult().getEigenvalues() modes = algo.getResult().getScaledModesAsProcessSample() g = modes.drawMarginal(0) g.setXTitle("$t$") g.setYTitle("$\sqrt{\lambda_n}\phi_n$") g.setTitle("P1 approx. of KL expansion for $C(s,t)=e^{-|s-t|}$") fig = plt.figure(figsize=(6, 4)) axis = fig.add_subplot(111) axis.set_xlim(auto=True) View(g, figure=fig, axes=[axis], add_legend=False)
#! /usr/bin/env python import openturns as ot ot.TESTPREAMBLE() dim = 2 interval = ot.Interval([0.0] * dim, [10.0] * dim) mesh = ot.IntervalMesher([30] * dim).build(interval) f = ot.SymbolicFunction(["x", "y"], ["x + 0.5*sin(y)", "y-0.1*x*sin(x)"]) mesh.setVertices(f(mesh.getVertices())) simplices = mesh.getSimplices() nrSimplices = len(simplices) naive = ot.NaiveEnclosingSimplex(mesh.getVertices(), simplices) print("naive=", naive) ot.RandomGenerator.SetSeed(0) test = ot.ComposedDistribution([ot.Uniform(-1.0, 11.0)] * dim).getSample(100) for i, vertex in enumerate(test): index = naive.query(vertex) if index >= nrSimplices: print(i, "is outside") else: found, coordinates = mesh.checkPointInSimplexWithCoordinates( vertex, index) if not found: print("Wrong simplex found for", vertex, "(index=", index, simplices[index], "barycentric coordinates=", coordinates)
point = ot.Point([123.456, 125.43, 3975.4567]) point2 = ot.Point(3, 789.123) point3 = ot.Point(3, 1673.456) point4 = ot.Point(3, 789.654123) sample = ot.Sample(1, point) sample.add(point2) sample.add(point3) sample.add(point4) sample.add(point2) sample.add(point4) sample.add(point3) print(sample) study.add('sample', sample) mesh = ot.IntervalMesher([50] * 3).build(ot.Interval(3)) study.add('mesh', mesh) study.save() study2 = ot.Study() study2.setStorageManager(ot.XMLH5StorageManager(fileName)) study2.load() sample2 = ot.Sample() study2.fillObject('sample', sample2) print(sample2) assert sample == sample2, "wrong sample" mesh2 = ot.Mesh() study2.fillObject('mesh', mesh2)
import openturns as ot from matplotlib import pyplot as plt from openturns.viewer import View # Create a process X: R^2 --> R^2 # Define a bi dimensional mesh as a box myIndices = ot.Indices([40, 20]) myMesher = ot.IntervalMesher(myIndices) lowerBound = [0.0, 0.0] upperBound = [2.0, 1.0] myInterval = ot.Interval(lowerBound, upperBound) myMesh = myMesher.build(myInterval) # Define a scalar temporal Gaussian process on the mesh # this process is stationary # myXproc R^2 --> R amplitude = [1.0] scale = [0.2, 0.2] myCovModel = ot.ExponentialModel(scale, amplitude) myXproc = ot.GaussianProcess(myCovModel, myMesh) # Transform myXproc to make its variance depend on the vertex (s,t) # and to get a positive process # thanks to the spatial function g # myXtProcess R --> R g = ot.SymbolicFunction(['x1'], ['exp(x1)']) myDynTransform = ot.ValueFunction(g, 2) myXtProcess = ot.CompositeProcess(myDynTransform, myXproc) myField = myXtProcess.getRealization()
#! /usr/bin/env python from __future__ import print_function import openturns as ot try: mesh = ot.IntervalMesher(ot.Indices(1, 9)).build(ot.Interval(-1.0, 1.0)) factory = ot.KarhunenLoeveP1Factory(mesh, 0.0) eigenValues = ot.NumericalPoint() KLModes = factory.buildAsProcessSample(ot.AbsoluteExponential([1.0]), eigenValues) print("KL modes=", KLModes) print("KL eigenvalues=", eigenValues) cov1D = ot.AbsoluteExponential([1.0]) KLFunctions = factory.build(cov1D, eigenValues) print("KL functions=", KLFunctions) print("KL eigenvalues=", eigenValues) R = ot.CorrelationMatrix(2, [1.0, 0.5, 0.5, 1.0]) scale = [1.0] amplitude = [1.0, 2.0] cov2D = ot.ExponentialModel(scale, amplitude, R) KLFunctions = factory.build(cov2D, eigenValues) print("KL functions=", KLFunctions) print("KL eigenvalues=", eigenValues) except: import sys print("t_KarhunenLoeveP1Factory_std.py", sys.exc_info()[0], sys.exc_info()[1])