Example #1
0
    def shift(self, bias, scaling):
        '''
        Shift the normal random variable using the provided bias and scaling
        factor.

        The mean mu of the random variable becomes mu + bias, and its standard
        deviation sigma becomes sigma * scaling.

        Parameters
        ----------
        bias : float
            The bias.
        scaling : float
            The scaling factor.

        Returns
        -------
        RV : tensap.NormalRandomVariable
            The shifted normal random variable.

        '''
        shifted_rv = tensap.NormalRandomVariable(self.mu, self.sigma)
        shifted_rv.mu += bias
        shifted_rv.sigma *= scaling
        return shifted_rv
Example #2
0
    def get_standard_random_variable():
        '''
        Return the standard normal random variable with mean 0 and standard
        deviation 1.

        Returns
        -------
        tensap.NormalRandomVariable
            The standard normal random variable.

        '''
        return tensap.NormalRandomVariable()
Example #3
0
    def __init__(self, n=50):
        '''
        Constructor for the class HermitePolynomials.

        Parameters
        ----------
        n : int, optional
            The highest degree for which a polynomial can be computed with the
            stored recurrence coefficients. The default is 50.

        Returns
        -------
        None.

        '''
        self.measure = tensap.NormalRandomVariable(0, 1)

        self._recurrence_coefficients, self._orthogonal_polynomials_norms = \
            self._recurrence(n)
plt.plot(X_PLOT, F(X_PLOT))
plt.legend(('True function', 'Interpolation'))
plt.show()
N = 100
ERR_L2, ERR_L_INF = F.test_error(FUN, N, X)
print('Mean squared error = %2.5e' % ERR_L2)

# %% Projection on polynomial space through quadrature
def FUN(x):
    # Function to approximate
    return x**2/2


FUN = tensap.UserDefinedFunction(FUN, 1)
FUN.evaluation_at_multiple_points = True
X = tensap.NormalRandomVariable()

# Integration rule
I = X.gauss_integration_rule(5)

# Approximation basis
P = 3
H = tensap.PolynomialFunctionalBasis(X.orthonormal_polynomials(), range(P+1))

# Computation of the projection
F = H.projection(FUN, I)

# Displays and error
print('\nProjection on polynomial space through quadrature')
N = 100
ERR_L2, ERR_L_INF = F.test_error(FUN, N, X)
# GNU Lesser General Public License for more details.

# You should have received a copy of the GNU Lesser General Public License
# along with tensap.  If not, see <https://www.gnu.org/licenses/>.
'''
Tutorial on learning in canonical tensor format.

'''

import tensap

# %% Function to approximate
CHOICE = 2
if CHOICE == 1:
    ORDER = 5
    X = tensap.RandomVector(tensap.NormalRandomVariable(), ORDER)

    def fun(x):
        return 1 / (10 + x[:, 0] + 0.5 * x[:, 1])**2
elif CHOICE == 2:
    fun, X = tensap.multivariate_functions_benchmark('borehole')
    ORDER = X.size

# %% Approximation basis
DEGREE = 8
BASES = [
    tensap.PolynomialFunctionalBasis(X_TRAIN.orthonormal_polynomials(),
                                     range(DEGREE + 1))
    for X_TRAIN in X.random_variables
]
BASES = tensap.FunctionalBases(BASES)
Example #6
0
'''

import sys
import numpy as np
sys.path.insert(0, './../../../')
import tensap

# %% Approximation of a function F(X)
DIM = 6
FUN = tensap.UserDefinedFunction('x0 + x0*x1 + x0*x2**2 + x3**3 + x4 + x5',
                                 DIM)
FUN.evaluationAtMultiplePoints = True

DEGREE = 3
X = tensap.RandomVector(tensap.NormalRandomVariable(0, 1), DIM)
P = tensap.PolynomialFunctionalBasis(tensap.HermitePolynomials(),
                                     range(DEGREE+1))
BASES = tensap.FunctionalBases.duplicate(P, DIM)
GRIDS = X.random(1000)

G = tensap.FullTensorGrid([np.reshape(GRIDS[:, i], [-1, 1]) for
                           i in range(DIM)])
H = tensap.FullTensorProductFunctionalBasis(BASES)

F, OUTPUT = H.tensor_product_interpolation(FUN, G)

X_TEST = X.random(1000)
F_X_TEST = F(X_TEST)
Y_TEST = FUN(X_TEST)
ERR = np.linalg.norm(Y_TEST-F_X_TEST) / np.linalg.norm(Y_TEST)
Example #7
0
    def _recurrence(measure, n):
        '''
        Compute the coefficients of the three-term recurrence used to construct
        the empirical polynomials, and the norms of the polynomials.

        The three-term recurrence writes:
        p_{n+1}(x) = (x-a_n)p_n(x) - b_n p_{n-1}(x), a_n and b_n are
        the three-term recurrence coefficients

        Parameters
        ----------
        measure : tensap.EmpiricalRandomVariable
            The empirical random variable associated with the orthonormal
            polynomials.
        n : int
            The highest degree for which a polynomial can be computed with the
            returned recurrence coefficients.

        Returns
        -------
        recurr : numpy.ndarray
            The recurrence coefficients.
        norms : numpy.ndarray
            The norms of the polynomials.

        '''
        def is_orth(pnp1, pn, pnm1, weights):
            '''
            Determine if the polynomial pnp1 is orthogonal to the polynomials
            pn and pnm1, according to the DiscreteRandomVariable r.

            Parameters
            ----------
            pnp1 : function
                The evaluations of the first function at integration points.
            pn : function
                The evaluations of the second function at integration points.
            pnm1 : function
                The evaluations of the third function at integration points.
            weights : numpy.ndarray
                The weights used for the numerical integration.

            Returns
            -------
            bool
                Boolean equal to True if the polynomial pnp1 is orthogonal to
                the polynomials pn and pnm1, False otherwise.

            '''
            tol = 1e-4  # Tolerance for the inner product to be considered as 0

            d1 = np.abs(np.sum(np.matmul(pnp1 * pn, weights)))
            d2 = np.abs(np.sum(np.matmul(pnp1 * pnm1, weights)))

            if d1 < tol and d2 < tol:
                return True
            return False

        def peval(i, a, b, x):
            '''
            Evaluate the polynomial of degree i defined by the coefficients a
            and b at points x.

            Parameters
            ----------
            i : int
                The degree of the polynomial.
            a : numpy.ndarray
                The first recurrence coefficients.
            b : numpy.ndarray
                The second recurrence coefficients.
            x : numpy.ndarray
                The points of evaluation.

            Returns
            -------
            p_n_loc : numpy.ndarray
                The evaluations of the polynomial at points x.

            '''

            if i < 0:
                p_n_loc = np.zeros(x.shape)
            elif i == 0:
                p_n_loc = np.ones(x.shape)
            else:
                p_n_loc_m2 = 1
                p_n_loc_m1 = x - a[0]
                p_n_loc = np.array(p_n_loc_m1)
                for N in np.arange(2, i + 1):
                    p_n_loc = (x - a[N - 1]) * p_n_loc_m1 - b[N -
                                                              1] * p_n_loc_m2
                    p_n_loc_m2 = np.array(p_n_loc_m1)
                    p_n_loc_m1 = np.array(p_n_loc)
            return p_n_loc

        if n is None:
            check = True
            n = 10
        else:
            check = False

        norms = np.zeros(n + 2)
        a = np.zeros(n + 2)
        b = np.zeros(n + 2)

        G = tensap.NormalRandomVariable().gauss_integration_rule(
            int(np.ceil((2 * n + 3) / 2)))
        xi = measure.sample
        weights = G.weights / xi.size

        x_ij = measure.bandwidth * np.tile(np.reshape(G.points, [1, -1]),
                                           (xi.size, 1)) + \
            np.tile(np.reshape(xi, [-1, 1]), (1, G.points.size))

        i = 0
        cond = True
        norms[0] = 1
        a[0] = np.sum(np.matmul(x_ij, weights)) / xi.size
        b[0] = 0

        while cond and i <= n:
            i += 1

            p_n_m1 = np.reshape(peval(i - 1, a, b, x_ij), x_ij.shape)
            p_n = np.reshape(peval(i, a, b, x_ij), x_ij.shape)

            norms[i] = np.sum(np.matmul(p_n**2, weights))
            a[i] = np.sum(np.matmul(p_n * x_ij * p_n, weights)) / norms[i]
            b[i] = norms[i] / norms[i - 1]

            p_n_p1 = (x_ij - a[i]) * p_n - b[i] * p_n_m1

            if check:
                # Orthogonality condition, only if the number of polynomials
                # is not specified by the user
                cond = is_orth(p_n_p1, p_n, p_n_m1, weights)

        if not cond and i - 2 != n:
            raise ValueError('Maximum degree: %i (%i asked)' % (i - 2, n))

        return np.vstack((a[:i], b[:i])), np.sqrt(norms[:i])
    Raises
    ------
    NotImplementedError
        If the function is not implemented.

    Returns
    -------
    function
        The asked function.
    tensap.RandomVector
        Input random variables.

    '''
    if case == 'borehole':
        X = np.empty(8, dtype=object)
        X[0] = tensap.NormalRandomVariable(0.1, 0.0161812)
        X[1] = tensap.NormalRandomVariable(0, 1)
        X[2] = tensap.UniformRandomVariable(63070, 115600)
        X[3] = tensap.UniformRandomVariable(990, 1110)
        X[4] = tensap.UniformRandomVariable(63.1, 116)
        X[5] = tensap.UniformRandomVariable(700, 820)
        X[6] = tensap.UniformRandomVariable(1120, 1680)
        X[7] = tensap.UniformRandomVariable(9855, 12045)
        X = tensap.RandomVector(X)

        def fun(x):
            return 2 * np.pi * x[:, 2] * (x[:, 3] - x[:, 5]) / \
                (np.log(np.exp(7.71 + 1.0056*x[:, 1]) / x[:, 0]) *
                 (1 + 2 * x[:, 6] * x[:, 2] /
                  np.log(np.exp(7.71 + 1.0056 * x[:, 1]) / x[:, 0]) /
                  x[:, 0]**2 / x[:, 7] + x[:, 2] / x[:, 4]))