Exemple #1
0
    def tensorize(self, fun):
        '''
        Tensorize a provided function defined on self.X.support().

        Parameters
        ----------
        fun : function or tensap.Function
            The function to be tensorized.

        Raises
        ------
        ValueError
            If the provided argument is neither a tensap.Function nor a
            function.

        Returns
        -------
        tensap.TensorizedFunction
            The tensorized function.

        '''
        if not isinstance(fun, tensap.Function) and \
                not hasattr(fun, '__call__'):
            raise ValueError('The argument must be a tensap.Function or ' +
                             'function.')

        if not isinstance(fun, tensap.Function) and hasattr(fun, '__call__'):
            fun = tensap.UserDefinedFunction(fun, self.dim)

        f = tensap.UserDefinedFunction(lambda z: fun(self.inverse_map(z)),
                                       (self.d + 1) * self.dim)
        return tensap.TensorizedFunction(f, self)
Exemple #2
0
    def partial_evaluation(self, not_alpha, x_not_alpha):
        '''
        Return the partial evaluation of a function
        f(x) = f(x_alpha,x_not_alpha), a function
        f_alpha(.) = f(., x_not_alpha) for fixed values x_not_alpha of the
        variables with indices not_alpha.

        Parameters
        ----------
        not_alpha : list or numpy.ndarray
            The indices of the fixed variables.
        x_not_alpha : numpy.ndarray
            The points at which the function is evaluated in the dimensions
            not_alpha.

        Raises
        ------
        ValueError
            If the Function has an empty attribute dim.

        Returns
        -------
        f_alpha : tensap.UserDefinedFunction
            The partial evaluation of the Function.

        '''
        if self.dim is None or np.size(self.dim) == 0:
            raise ValueError('The Function has an empty attribute dim.')

        alpha = np.setdiff1d(range(self.dim), not_alpha)
        ind = [
            np.nonzero(np.concatenate((alpha, not_alpha)) == x)[0][0]
            for x in range(self.dim)
        ]

        def fun(x_alpha):
            grid = tensap.FullTensorGrid([x_alpha, x_not_alpha])
            return self.eval(grid.array()[:, ind])

        f_alpha = tensap.UserDefinedFunction(fun, alpha.size)
        f_alpha.store = self.store
        f_alpha.evaluation_at_multiple_points = \
            self.evaluation_at_multiple_points
        f_alpha.measure = self.measure.marginal(alpha)

        return f_alpha
'''

import sys
import numpy as np
import matplotlib.pyplot as plt
sys.path.insert(0, './../../../')
import tensap

# %% Interpolation on a polynomial space using Chebyshev Points
def FUN(x):
    # Function to approximate
    return np.cos(10*x)


FUN = tensap.UserDefinedFunction(FUN, 1)
FUN.evaluation_at_multiple_points = True
X = tensap.UniformRandomVariable(-1, 1)

# Interpolation basis and points
P = 30
H = tensap.PolynomialFunctionalBasis(X.orthonormal_polynomials(), range(P+1))
X_CP = tensap.chebyshev_points(H.cardinal(), [-1, 1])

# Interpolation of the function
F = H.interpolate(FUN, X_CP)

# Displays and error
print('Interpolation on a polynomial space using Chebyshev Points')
plt.figure()
X_PLOT = np.linspace(-1, 1, 100)
You should have received a copy of the GNU Lesser General Public License
along with tensap.  If not, see <https://www.gnu.org/licenses/>.

'''

import sys
import numpy as np
sys.path.insert(0, './../../../')
import tensap

# %% Definitions
D = 3
P = 10

F = tensap.UserDefinedFunction('1+x0+(2+x0)/(2+x1)+0.04*(x2-x2**3)', D)
F.evaluation_at_multiple_points = True
V = tensap.UniformRandomVariable(-1, 1)
X = tensap.RandomVector(V, D)
BASIS = tensap.PolynomialFunctionalBasis(tensap.LegendrePolynomials(),
                                         range(P + 1))
BASES = tensap.FunctionalBases.duplicate(BASIS, D)

# %% Sparse tensor product functional basis
IND = tensap.MultiIndices.with_bounded_norm(D, 1, P)
H = tensap.SparseTensorProductFunctionalBasis(BASES, IND)

# %% Interpolation on a magic grid
G, _, _ = H.magic_points(X.random(1000))
IF = H.interpolate(F, G)
# %% Identification of a function f(x) on (0,1) with a function g(i1, ...,id , y)
# x is identified with (i_1, ..., i_d, y) through a Tensorizer
# First interpolate and then truncate

L = 10  # Resolution
B = 3  # Scaling factor

T = tensap.Tensorizer(B, L, 1)


def FUN(x):
    return np.sqrt(x)


FUN = tensap.UserDefinedFunction(FUN, 1)
FUN.evaluation_at_multiple_points = True

TENSORIZED_FUN = T.tensorize(FUN)
TENSORIZED_FUN.fun.evaluation_at_multiple_points = True

# Interpolation of the function in the tensor product feature space
DEGREE = 3
BASES = T.tensorized_function_functional_bases(DEGREE)
H = tensap.FullTensorProductFunctionalBasis(BASES)
FUN_INTERP, _ = H.tensor_product_interpolation(TENSORIZED_FUN)
TENSORIZED_FUN_INTERP = tensap.TensorizedFunction(FUN_INTERP, T)
X_TEST = T.X.random(100)
F_X_TEST = TENSORIZED_FUN_INTERP(X_TEST)
Y_TEST = FUN(X_TEST)
ERR_L2 = np.linalg.norm(Y_TEST - F_X_TEST) / np.linalg.norm(Y_TEST)
import numpy as np
import tensap

# %% Identification of a multivariate function f(x1, ..., xd) on (0,1)^d with a function
# g(i_1^1,...,i_1^d, ..., i_L^1,...,i_L^d, y1, ...,yd)
# x1 and x2 are identified with (i_1^1, ,..., i_L^1, y1) and (i_1^d, ...., i_L^d, yd)
# through a Tensorizer

DIM = 4
L = 13  # Resolution
B = 2  # Scaling factor

T = tensap.Tensorizer(B, L, DIM)
T.ordering_type = 2
FUN = tensap.UserDefinedFunction('1/(1+x0+2*x1+3*x2+4*x3)', DIM)
FUN.evaluation_at_multiple_points = True
TENSORIZED_FUN = T.tensorize(FUN)

# Interpolation of the function in the tensor product feature space
DEGREE = 2
BASES = T.tensorized_function_functional_bases(DEGREE)

# %% Learning with PCA based algorithm
FPCA = tensap.FunctionalTensorPrincipalComponentAnalysis()
FPCA.pca_sampling_factor = 20
FPCA.bases = BASES
FPCA.display = False
TREE = tensap.DimensionTree.balanced(TENSORIZED_FUN.fun.dim)
FPCA.tol = 1e-8
FPCA.max_rank = np.inf
Exemple #7
0
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
GNU Lesser General Public License for more details.

You should have received a copy of the GNU Lesser General Public License
along with tensap.  If not, see <https://www.gnu.org/licenses/>.

'''

import sys
import numpy as np
sys.path.insert(0, './../../../')
import tensap

# %% Approximation of a function F(X)
DIM = 6
FUN = tensap.UserDefinedFunction('x0 + x0*x1 + x0*x2**2 + x3**3 + x4 + x5',
                                 DIM)
FUN.evaluationAtMultiplePoints = True

DEGREE = 3
X = tensap.RandomVector(tensap.NormalRandomVariable(0, 1), DIM)
P = tensap.PolynomialFunctionalBasis(tensap.HermitePolynomials(),
                                     range(DEGREE+1))
BASES = tensap.FunctionalBases.duplicate(P, DIM)
GRIDS = X.random(1000)

G = tensap.FullTensorGrid([np.reshape(GRIDS[:, i], [-1, 1]) for
                           i in range(DIM)])
H = tensap.FullTensorProductFunctionalBasis(BASES)

F, OUTPUT = H.tensor_product_interpolation(FUN, G)
Exemple #8
0
# GNU Lesser General Public License for more details.

# You should have received a copy of the GNU Lesser General Public License
# along with tensap.  If not, see <https://www.gnu.org/licenses/>.
'''
Tutorial on multivariate functions, Tensor Grids, Projection.

'''

import matplotlib.pyplot as plt
import numpy as np
import tensap

# %% Scalar-valued UserDefinedFunction
d = 3
fun = tensap.UserDefinedFunction('x0+x1+x2**4', d)
fun.evaluation_at_multiple_points = True
x = np.random.rand(4, d)
print('fun.eval(x) = \n%s\n' % fun.eval(x))
print('fun(x) =      \n%s\n' % fun(x))

# %% Vector-valued UserDefinedFunction
f = tensap.UserDefinedFunction('[x0, 100*x1]', d, 2)
f.evaluation_at_multiple_points = False
print('With f.evaluation_at_multiple_points == False, f(x) = \n%s\n' % f(x))
f.evaluation_at_multiple_points = True
print('With f.evaluation_at_multiple_points == True, f(x) =  \n%s\n' % f(x))

f = tensap.UserDefinedFunction('[x1,100*x2]', d, [1, 2])
f.evaluation_at_multiple_points = False
print('With f.evaluation_at_multiple_points == False, f(x) = \n%s\n' % f(x))