def test_NonZeroMean(self): # Create the KL result numberOfVertices = 10 interval = ot.Interval(-1.0, 1.0) mesh = ot.IntervalMesher([numberOfVertices - 1]).build(interval) covariance = ot.SquaredExponential() zeroProcess = ot.GaussianProcess(covariance, mesh) # Define a trend function f = ot.SymbolicFunction(["t"], ["30 * t"]) fTrend = ot.TrendTransform(f, mesh) # Add it to the process process = ot.CompositeProcess(fTrend, zeroProcess) # Sample sampleSize = 100 processSample = process.getSample(sampleSize) threshold = 0.0 algo = ot.KarhunenLoeveSVDAlgorithm(processSample, threshold) algo.run() klresult = algo.getResult() # Create the KL reduction meanField = processSample.computeMean() klreduce = ot.KarhunenLoeveReduction(klresult) # Generate a trajectory and reduce it field = process.getRealization() values = field.getValues() reducedValues = klreduce(values) ott.assert_almost_equal(values, reducedValues)
def test_trend(self): N = 100 M = 1000 P = 10 mean = ot.SymbolicFunction("x", "sign(x)") cov = ot.SquaredExponential([1.0], [0.1]) mesh = ot.IntervalMesher([N]).build(ot.Interval(-2.0, 2.0)) process = ot.GaussianProcess(ot.TrendTransform(mean, mesh), cov, mesh) sample = process.getSample(M) algo = ot.KarhunenLoeveSVDAlgorithm(sample, 1e-6) algo.run() result = algo.getResult() trend = ot.TrendTransform( ot.P1LagrangeEvaluation(sample.computeMean()), mesh) sample2 = process.getSample(P) sample2.setName('reduction of sign(x) w/o trend') reduced1 = ot.KarhunenLoeveReduction(result)(sample2) reduced2 = ot.KarhunenLoeveReduction(result, trend)(sample2) g = sample2.drawMarginal(0) g.setColors(["red"]) g1 = reduced1.drawMarginal(0) g1.setColors(["blue"]) drs = g1.getDrawables() for i, d in enumerate(drs): d.setLineStyle("dashed") drs[i] = d g1.setDrawables(drs) g.add(g1) g2 = reduced2.drawMarginal(0) g2.setColors(["green"]) drs = g2.getDrawables() for i, d in enumerate(drs): d.setLineStyle("dotted") drs[i] = d g2.setDrawables(drs) g.add(g2) if 0: from openturns.viewer import View View(g).save('reduction.png')
upperBound = [2., 1.] myInterval = ot.Interval(lowerBound, upperBound) myMesh = myMesher.build(myInterval) # Define a scalar temporal normal process on the mesh # this process is stationary amplitude = [1.0] scale = [0.01]*2 myCovModel = ot.ExponentialModel(scale, amplitude) myXProcess = ot.GaussianProcess(myCovModel, myMesh) # Create a trend function # fTrend : R^2 --> R # (t,s) --> 1+2t+2s fTrend = ot.SymbolicFunction(['t', 's'], ['1+2*t+2*s']) fTemp = ot.TrendTransform(fTrend, myMesh) # Add the trend to the initial process myYProcess = ot.CompositeProcess(fTemp, myXProcess) # Get a field from myYtProcess myYField = myYProcess.getRealization() # %% # CASE 1 : we estimate the trend from the field # Define the regression stategy using the LAR method myBasisSequenceFactory = ot.LARS() # Define the fitting algorithm using the # Corrected Leave One Out or KFold algorithms
import openturns as ot from matplotlib import pyplot as plt from openturns.viewer import View # Create a process on a regular grid myGrid = ot.RegularGrid(0.0, 0.1, 100) amplitude = [5.0] scale = [0.2] myCovModel = ot.ExponentialModel(scale, amplitude) myXProcess = ot.GaussianProcess(myCovModel, myGrid) # Create a trend fTrend = ot.SymbolicFunction(["t"], ["1+2*t+t^2"]) fTemp = ot.TrendTransform(fTrend, myGrid) # Add the trend to the process and get a field myYProcess = ot.CompositeProcess(fTemp, myXProcess) myYField = myYProcess.getRealization() # Create a TrendFactory myBasisSequenceFactory = ot.LARS() myFittingAlgorithm = ot.KFold() func1 = ot.SymbolicFunction(["t"], ["1"]) func2 = ot.SymbolicFunction(["t"], ["t"]) func3 = ot.SymbolicFunction(["t"], ["t^2"]) myBasis = ot.Basis([func1, func2, func3]) myTrendFactory = ot.TrendFactory(myBasisSequenceFactory, myFittingAlgorithm) # Estimate the trend
# Second order model with parameters myCovModel = ot.ExponentialModel(scale, amplitude) print("myCovModel = ", myCovModel) myProcess1 = ot.GaussianProcess(myCovModel, myTimeGrid) print("myProcess1 = ", myProcess1) print("is stationary? ", myProcess1.isStationary()) myProcess1.setSamplingMethod(ot.GaussianProcess.CHOLESKY) print("mean over ", size, " realizations = ", myProcess1.getSample(size).computeMean()) myProcess1.setSamplingMethod(ot.GaussianProcess.GIBBS) print("mean over ", size, " realizations = ", myProcess1.getSample(size).computeMean()) # With constant trend trend = ot.TrendTransform(ot.SymbolicFunction("t", "4.0"), myTimeGrid) myProcess2 = ot.GaussianProcess(trend, myCovModel, myTimeGrid) myProcess2.setSamplingMethod(ot.GaussianProcess.GIBBS) print("myProcess2 = ", myProcess2) print("is stationary? ", myProcess2.isStationary()) print("mean over ", size, " realizations= ", myProcess2.getSample(size).computeMean()) # With varying trend trend3 = ot.TrendTransform(ot.SymbolicFunction("t", "sin(t)"), myTimeGrid) myProcess3 = ot.GaussianProcess(trend3, myCovModel, myTimeGrid) print("myProcess3 = ", myProcess3) print("is stationary? ", myProcess3.isStationary()) myProcess3.setSamplingMethod(ot.GaussianProcess.CHOLESKY) print("mean over ", size, " realizations = ", myProcess3.getSample(size).computeMean())
grid = ot.RegularGrid(0.0, 0.1, 10) amplitude = [5.0] scale = [0.2] covModel = ot.ExponentialModel(scale, amplitude) X = ot.GaussianProcess(covModel, grid) # %% # Draw a sample sample = X.getSample(6) sample.setName('X') graph = sample.drawMarginal(0) view = viewer.View(graph) # %% # Define a trend function f = ot.SymbolicFunction(['t'], ['30*t']) fTrend = ot.TrendTransform(f, grid) # %% # Add it to the process Y = ot.CompositeProcess(fTrend, X) Y.setName('Y') # %% # Draw a sample sample = Y.getSample(6) sample.setName('Y') graph = sample.drawMarginal(0) view = viewer.View(graph) plt.show()
amplitude = [3.5] scale = [0.5] myModel = ot.SquaredExponential(scale, amplitude) graph = plotCovarianceModel(myModel, myTimeGrid, 10) graph.setTitle("amplitude=%.3f, scale=%.3f" % (amplitude[0], scale[0])) view = viewer.View(graph) # %% # Define the trend # ---------------- # # The trend is a deterministic function. With the `GaussianProcess` class, the associated process is the sum of a trend and a gaussian process with zero mean. # %% f = ot.SymbolicFunction(['x'], ['2*x']) fTrend = ot.TrendTransform(f, myTimeGrid) # %% amplitude = [3.5] scale = [1.5] myModel = ot.SquaredExponential(scale, amplitude) process = ot.GaussianProcess(fTrend, myModel, myTimeGrid) process # %% nbTrajectories = 10 sample = process.getSample(nbTrajectories) graph = sample.drawMarginal(0) graph.setTitle("amplitude=%.3f, scale=%.3f" % (amplitude[0], scale[0])) view = viewer.View(graph)
import openturns as ot from matplotlib import pyplot as plt from openturns.viewer import View # Create a process on a regular grid myGrid = ot.RegularGrid(0.0, 0.1, 100) amplitude = [5.0] scale = [0.2] myCovModel = ot.ExponentialModel(scale, amplitude) myXProcess = ot.GaussianProcess(myCovModel, myGrid) # Create a trend fTrend = ot.SymbolicFunction(["t"], ["1+2*t+t^2"]) fTemp = ot.TrendTransform(fTrend) # Add the trend to the process and get a field myYProcess = ot.CompositeProcess(fTemp, myXProcess) myYField = myYProcess.getRealization() # Create a TrendFactory myBasisSequenceFactory = ot.LARS() myFittingAlgorithm = ot.KFold() func1 = ot.SymbolicFunction(["t"], ["1"]) func2 = ot.SymbolicFunction(["t"], ["t"]) func3 = ot.SymbolicFunction(["t"], ["t^2"]) myBasis = ot.Basis([func1, func2, func3]) myTrendFactory = ot.TrendFactory(myBasisSequenceFactory, myFittingAlgorithm) # Estimate the trend
import openturns as ot from openturns.viewer import View N = 100 M = 1000 P = 10 mean = ot.SymbolicFunction("x", "sign(x)") cov = ot.SquaredExponential([1.0], [0.1]) mesh = ot.IntervalMesher([N]).build(ot.Interval(-2.0, 2.0)) process = ot.GaussianProcess(ot.TrendTransform(mean, mesh), cov, mesh) sample = process.getSample(M) algo = ot.KarhunenLoeveSVDAlgorithm(sample, 1e-6) algo.run() result = algo.getResult() trend = ot.TrendTransform(ot.P1LagrangeEvaluation(sample.computeMean()), mesh) sample2 = process.getSample(P) sample2.setName('reduction of sign(x) w/o trend') reduced1 = ot.KarhunenLoeveReduction(result)(sample2) reduced2 = ot.KarhunenLoeveReduction(result, trend)(sample2) g = sample2.drawMarginal(0) g.setColors(["red"]) g1 = reduced1.drawMarginal(0) g1.setColors(["blue"]) drs = g1.getDrawables() for i, d in enumerate(drs): d.setLineStyle("dashed") drs[i] = d g1.setDrawables(drs) g.add(g1) g2 = reduced2.drawMarginal(0) g2.setColors(["green"])
Y1 = X[1] dY0 = 0.5 * Y0 * (2.0 - Y1) dY1 = 0.5 * Y1 * (Y0 - 1.0) return [dY0, dY1] phi_func = ot.PythonFunction(2, 2, flow) # Create the mesh discretization = [N] * 2 mesh = ot.IntervalMesher(discretization).build(interval) # Covariance model covariance = ot.TensorizedCovarianceModel( [ot.SquaredExponential([0.2] * 2, [0.3])] * 2) process = ot.GaussianProcess(ot.TrendTransform(phi_func), covariance, mesh) field = process.getRealization() f = ot.Function(ot.P1LagrangeEvaluation(field)) ot.ResourceMap.SetAsUnsignedInteger("Field-LevelNumber", 64) ot.ResourceMap.SetAsScalar("Field-ArrowRatio", 0.01) ot.ResourceMap.SetAsScalar("Field-ArrowScaling", 0.03) graph = field.draw() print("f=", f.getInputDimension(), "->", f.getOutputDimension()) phi = ot.ValueFunction(f) solver = ot.RungeKutta(phi) initialState = [0.5, 1.0] timeGrid = ot.RegularGrid(0.0, 0.1, 10000) result = solver.solve(initialState, timeGrid) print(result) curve = ot.Curve(result) curve.setColor("red")