def _exec(self, Lambda): Y_lambda = ot.BoxCoxTransform(Lambda)(self.Y_i_) algo = ot.LinearLeastSquares(self.a_i_, Y_lambda) algo.run() beta0 = algo.getConstant()[0] beta1 = algo.getLinear()[0, 0] sigma2 = (self.N_ - 1.0) / (self.N_ - 2.0) * (Y_lambda - (self.a_i_ * [beta1] + [beta0])).computeVariance()[0] return [-0.5 * self.N_ * m.log(sigma2) + (Lambda[0] - 1) * self.sumLogY_i]
def myPolynomialDataFitting(total_degree, x_train, y_train): """Computes the polynomial curve fitting with given total degree. This is for learning purposes only: please consider a serious metamodel for real applications, e.g. polynomial chaos or kriging.""" polynomialCollection = [ "x^%d" % (degree) for degree in range(1, total_degree + 1) ] basis = ot.SymbolicFunction(["x"], polynomialCollection) designMatrix = basis(x_train) myLeastSquares = ot.LinearLeastSquares(designMatrix, y_train) myLeastSquares.run() responseSurface = myLeastSquares.getMetaModel() return responseSurface, basis
def computeBreuschPaganTest(x, residuals): nx = x.getSize() df = 1 # linear regression with 2 parameters -> degree of freedom = 2 - 1 residuals = np.array(residuals) sigma2 = np.sum(residuals**2) / nx # Studentized Breusch Pagan w = residuals**2 - sigma2 linResidual = ot.LinearLeastSquares(x, w) linResidual.run() linModel = linResidual.getMetaModel() wpred = np.array(linModel(x)) # statistic Breusch Pagan bp = nx * np.sum(wpred**2) / np.sum(w**2) # return complementary cdf of central ChiSquare return 1 - ot.DistFunc.pNonCentralChiSquare(df, 0, bp)
point[0] = 0.5 point[1] = 0.5 data[4] = point point[0] = -0.25 point[1] = -0.25 data[5] = point point[0] = -0.25 point[1] = 0.25 data[6] = point point[0] = 0.25 point[1] = -0.25 data[7] = point point[0] = 0.25 point[1] = 0.25 data[8] = point myLeastSquares = ot.LinearLeastSquares(data, myFunc) myLeastSquares.run() responseSurface = ot.Function(myLeastSquares.getMetaModel()) print("myLeastSquares=", repr(myLeastSquares)) print("responseSurface=", repr(responseSurface)) inPoint = ot.Point(myFunc.getInputDimension(), 0.1) print("myFunc(", repr(inPoint), ")=", repr(myFunc(inPoint))) print("responseSurface(", repr(inPoint), ")=", repr(responseSurface(inPoint))) dataOut = myFunc(data) myLeastSquares = ot.LinearLeastSquares(data, dataOut) myLeastSquares.run() responseSurface = ot.Function(myLeastSquares.getMetaModel()) print("myLeastSquares=", repr(myLeastSquares)) print("responseSurface=", repr(responseSurface)) inPoint = ot.Point(myFunc.getInputDimension(), 0.1) print("myFunc(", repr(inPoint), ")=", repr(myFunc(inPoint)))
] polynomialCollection # %% # Given the list of strings, we create a symbolic function which computes the values of the monomials. # %% basis = ot.SymbolicFunction(["x"], polynomialCollection) basis # %% designMatrix = basis(x_train) designMatrix # %% myLeastSquares = ot.LinearLeastSquares(designMatrix, y_train) myLeastSquares.run() # %% responseSurface = myLeastSquares.getMetaModel() # %% # The couple (`x_test`,`y_test`) is the test set: it is used to assess the quality of the polynomial model with points that were not used for training. # %% n_test = 50 x_test = linearSample(0, 1, n_test) y_test = responseSurface(basis(x_test)) # %% graph = ot.Graph("Polynomial curve fitting", "x", "y", True, "topright")
X = ot.ComposedDistribution([dist_E, dist_F, dist_L, dist_I]) g = ot.SymbolicFunction(["E", "F", "L", "I"], ["F* L^3 / (3 * E * I)"]) g.setOutputDescription(["Y (cm)"]) # Pour pouvoir exploiter au mieux les simulations, nous équipons # la fonction d'un méchanisme d'historique. g = ot.MemoizeFunction(g) # Enfin, nous définissons le vecteur aléatoire de sortie. XRV = ot.RandomVector(X) Y = ot.CompositeRandomVector(g, XRV) Y.setDescription(["Y (cm)"]) # ## Régression linéaire avec LinearLeastSquares n = 1000 sampleX = X.getSample(n) sampleY = g(sampleX) myLeastSquares = ot.LinearLeastSquares(sampleX, sampleY) myLeastSquares.run() responseSurface = myLeastSquares.getMetaModel() val = ot.MetaModelValidation(sampleX, sampleY, responseSurface) graph = val.drawValidation() view = otv.View(graph)
from matplotlib import pylab as plt ot.Log.Show(ot.Log.NONE) # Prepare an input sample x = [[0.5, 0.5], [-0.5, -0.5], [-0.5, 0.5], [0.5, -0.5]] x += [[0.25, 0.25], [-0.25, -0.25], [-0.25, 0.25], [0.25, -0.25]] # %% # Compute the output sample from the input sample and a function. formulas = ['cos(x1 + x2)', '(x2 + 1) * exp(x1 - 2 * x2)'] model = ot.SymbolicFunction(['x1', 'x2'], formulas) y = model(x) # %% # Create a linear least squares model. algo = ot.LinearLeastSquares(x, y) algo.run() # %% # get the linear term algo.getLinear() # %% # get the constant term algo.getConstant() # %% # get the metamodel responseSurface = algo.getMetaModel() # %%