def fit(self, X, y, **fit_params): input_dimension = X.shape[1] if self.distribution is None: self.distribution = BuildDistribution(X) factoryCollection = [ot.OrthogonalUniVariateFunctionFamily( ot.OrthogonalUniVariatePolynomialFunctionFactory(ot.StandardDistributionPolynomialFactory(self.distribution.getMarginal(i)))) for i in range(input_dimension)] functionFactory = ot.OrthogonalProductFunctionFactory(factoryCollection) algo = ot.TensorApproximationAlgorithm(X, y.reshape(-1, 1), self.distribution, functionFactory, [self.nk]*input_dimension, self.max_rank) algo.run() self._result = algo.getResult() return self
def fit(self, X, y, **fit_params): """Fit Tensor regression model. Parameters ---------- X : array-like, shape = (n_samples, n_features) Training data. y : array-like, shape = (n_samples, [n_output_dims]) Target values. Returns ------- self : returns an instance of self. """ if len(X) == 0: raise ValueError( "Can not perform a tensor approximation with empty sample") # check data type is accurate if (len(np.shape(X)) != 2): raise ValueError("X has incorrect shape.") input_dimension = len(X[1]) if (len(np.shape(y)) != 2): raise ValueError("y has incorrect shape.") if self.distribution is None: self.distribution = BuildDistribution(X) factoryCollection = [ ot.OrthogonalUniVariateFunctionFamily( ot.OrthogonalUniVariatePolynomialFunctionFactory( ot.StandardDistributionPolynomialFactory( self.distribution.getMarginal(i)))) for i in range(input_dimension) ] functionFactory = ot.OrthogonalProductFunctionFactory( factoryCollection) algo = ot.TensorApproximationAlgorithm(X, y, self.distribution, functionFactory, [self.nk] * input_dimension, self.max_rank) algo.run() self.result_ = algo.getResult() return self
print('basis=', basis) x = [0.5] * dim for i in range(10): f = basis.build(i) print('i=', i, 'f(X)=', f(x)) # Using multi-indices enum = basis.getEnumerateFunction() for i in range(10): indices = enum(i) f = basis.build(indices) print('indices=', indices, 'f(X)=', f(x)) # Other factories factoryCollection = [ ot.OrthogonalUniVariatePolynomialFunctionFactory(ot.LaguerreFactory(2.5)), ot.HaarWaveletFactory(), ot.FourierSeriesFactory() ] dim = len(factoryCollection) basisFactory = ot.OrthogonalProductFunctionFactory(factoryCollection) basis = ot.OrthogonalBasis(basisFactory) print('basis=', basis) x = [0.5] * dim for i in range(10): f = basis.build(i) print('i=', i, 'f(X)=', f(x)) # Using multi-indices enum = basis.getEnumerateFunction() for i in range(10):
# # with: # # .. math:: # v_j^{(i)} (x_j) = \sum_{k=1}^{n_j} \beta_{j,k}^{(i)} \phi_{j,k} (x_j) # # We should define : # # - The family of univariate functions :math:`\phi_j`. We choose the orthogonal basis with respect to the marginal distribution measures. # - The maximal rank :math:`m`. Here value is set to 5 # - The marginal degrees :math:`n_j`. Here we set the degrees to [4, 15, 3, 2] # # %% factoryCollection = [ ot.OrthogonalUniVariatePolynomialFunctionFactory( ot.StandardDistributionPolynomialFactory(_)) for _ in [E, F, L, I] ] functionFactory = ot.OrthogonalProductFunctionFactory(factoryCollection) nk = [4, 15, 3, 2] maxRank = 1 # %% # Finally we might launch the algorithm: # %% algo = ot.TensorApproximationAlgorithm(X_train, Y_train, myDistribution, functionFactory, nk, maxRank) algo.run() result = algo.getResult() metamodel = result.getMetaModel()
# dim = 8 # model = ot.SymbolicFunction(['rw', 'r', 'Tu', 'Hu', 'Tl', 'Hl', 'L', 'Kw'], # ['(2*pi_*Tu*(Hu-Hl))/(ln(r/rw)*(1+(2*L*Tu)/(ln(r/rw)*rw^2*Kw)+Tu/Tl))']) # coll = [ot.Normal(0.1, 0.0161812), # ot.LogNormal(7.71, 1.0056), # ot.Uniform(63070.0, 115600.0), # ot.Uniform(990.0, 1110.0), # ot.Uniform(63.1, 116.0), # ot.Uniform(700.0, 820.0), # ot.Uniform(1120.0, 1680.0), # ot.Uniform(9855.0, 12045.0)] distribution = ot.ComposedDistribution(coll) factoryCollection = [ ot.OrthogonalUniVariateFunctionFamily( ot.OrthogonalUniVariatePolynomialFunctionFactory( ot.StandardDistributionPolynomialFactory(dist))) for dist in coll ] functionFactory = ot.OrthogonalProductFunctionFactory(factoryCollection) size = 1000 X = distribution.getSample(size) Y = model(X) # ot.ResourceMap.Set('TensorApproximationAlgorithm-Method', 'RankM') # n-d nk = [10] * dim maxRank = 5 algo = ot.TensorApproximationAlgorithm(X, Y, distribution, functionFactory, nk, maxRank)
import openturns as ot from openturns.viewer import View dim = 1 f = ot.SymbolicFunction(['x'], ['x*sin(x)']) uniform = ot.Uniform(0.0, 10.0) distribution = ot.ComposedDistribution([uniform] * dim) factoryCollection = [ ot.OrthogonalUniVariateFunctionFamily( ot.OrthogonalUniVariatePolynomialFunctionFactory( ot.StandardDistributionPolynomialFactory(uniform))) ] * dim functionFactory = ot.OrthogonalProductFunctionFactory(factoryCollection) size = 10 sampleX = [[1.0], [2.0], [3.0], [4.0], [5.0], [6.0], [7.0], [8.0]] sampleY = f(sampleX) nk = [5] * dim maxRank = 1 algo = ot.TensorApproximationAlgorithm(sampleX, sampleY, distribution, functionFactory, nk, maxRank) algo.run() result = algo.getResult() metamodel = result.getMetaModel() graph = f.draw(0.0, 10.0) graph.add(metamodel.draw(0.0, 10.0)) graph.add(ot.Cloud(sampleX, sampleY)) graph.setColors(['blue', 'red', 'black']) graph.setLegends(['model', 'meta model', 'sample']) graph.setLegendPosition('topleft') graph.setTitle('y(x)=x*sin(x)')
#! /usr/bin/env python from __future__ import print_function import openturns as ot polynomialFactory = ot.LegendreFactory() factory = ot.OrthogonalUniVariatePolynomialFunctionFactory(polynomialFactory) print(factory) x = 0.4 for i in range(10): function = factory.build(i) print('order=', i, function, 'X=', ot.Point([x]), 'f(X)=', ot.Point([function(x)]), 'df(X)=', ot.Point([function.gradient(x)]), 'd2f(X)=', ot.Point([function.hessian(x)]))