Esempio n. 1
0
    def fit(self, X, y, **fit_params):
        input_dimension = X.shape[1]
        if self.distribution is None:
            self.distribution = BuildDistribution(X)
        if self.enumerate == 'linear':
            enumerateFunction = ot.LinearEnumerateFunction(input_dimension)
        elif self.enumerate == 'hyperbolic':
            enumerateFunction = ot.HyperbolicAnisotropicEnumerateFunction(input_dimension, self.q)
        else:
            raise ValueError('enumerate should be "linear" or "hyperbolic"')
        polynomials = [ot.StandardDistributionPolynomialFactory(self.distribution.getMarginal(i)) for i in range(input_dimension)]
        productBasis = ot.OrthogonalProductPolynomialFactory(polynomials, enumerateFunction)
        adaptiveStrategy = ot.FixedStrategy(productBasis, enumerateFunction.getStrataCumulatedCardinal(self.degree))
        if self.sparse:
            projectionStrategy = ot.LeastSquaresStrategy(ot.LeastSquaresMetaModelSelectionFactory(ot.LARS(), ot.CorrectedLeaveOneOut()))
        else:
            projectionStrategy = ot.LeastSquaresStrategy(X, y.reshape(-1, 1))
        algo = ot.FunctionalChaosAlgorithm(X, y.reshape(-1, 1), self.distribution, adaptiveStrategy, projectionStrategy)
        algo.run()
        self._result = algo.getResult()
        output_dimension = self._result.getMetaModel().getOutputDimension()

        # sensitivity
        si = ot.FunctionalChaosSobolIndices(self._result)
        if output_dimension == 1:
            self.feature_importances_ = [si.getSobolIndex(i) for i in range(input_dimension)]
        else:
            self.feature_importances_ = [[0.0] * input_dimension] * output_dimension
            for k in range(output_dimension):
                for i in range(input_dimension):
                    self.feature_importances_[k][i] = si.getSobolIndex(i, k)
        self.feature_importances_ = np.array(self.feature_importances_)
        return self
#! /usr/bin/env python

import openturns as ot

ot.TESTPREAMBLE()

print(
    'Default q :',
    ot.ResourceMap.GetAsScalar(
        'HyperbolicAnisotropicEnumerateFunction-DefaultQ'), '\n')

# check weight constructor
f = ot.HyperbolicAnisotropicEnumerateFunction((1., 2., 3., 4., 5.), 0.75)

# check inverse when the cache is empty
print("inverse([0,0,0,0,0])=", f.inverse([0, 0, 0, 0, 0]))
# fill the cache
for i in range(4):
    res = f(i)
# check inverse when the cache is filled
print("inverse([2,0,0,0,0])=", f.inverse([2, 0, 0, 0, 0]))

# verify consistency with LinearEnumerateFunction
size = 10
stratas = 5
for dimension in range(1, 4):
    f = ot.HyperbolicAnisotropicEnumerateFunction(dimension, 1.0)
    g = ot.LinearEnumerateFunction(dimension)
    print("First", size, "values for dimension", dimension)
    for index in range(size):
        if (not f(index) == g(index)):
Esempio n. 3
0
algo = ot.SimulatedAnnealingLHS(initialDesign, distribution,
                                temperatureProfile, space_filling)
# Retrieve optimal design
input_database = algo.generate()

result = algo.getResult()

print('initial design computed')

# Response of the model
print('sampling size = ', N)
output_database = ishigami_model(input_database)

# Learning input/output
# Usual chaos meta model
enumerate_function = ot.HyperbolicAnisotropicEnumerateFunction(dimension)
orthogonal_basis = ot.OrthogonalProductPolynomialFactory(
    dimension * [ot.LegendreFactory()], enumerate_function)
basis_size = 100
# Initial chaos algorithm
adaptive_strategy = ot.FixedStrategy(orthogonal_basis, basis_size)
# ProjectionStrategy ==> Sparse
fitting_algorithm = ot.KFold()
approximation_algorithm = ot.LeastSquaresMetaModelSelectionFactory(
    ot.LARS(), fitting_algorithm)
projection_strategy = ot.LeastSquaresStrategy(input_database, output_database,
                                              approximation_algorithm)
print('Surrogate model...')
distribution_ishigami = ot.ComposedDistribution(dimension *
                                                [ot.Uniform(-pi, pi)])
algo_pc = ot.FunctionalChaosAlgorithm(input_database, output_database,
Esempio n. 4
0
ncols = 4

# coordinates of grid
grid = ot.Box([5, 5], ot.Interval([0.0] * 2, [6.0] * 2))
sample = grid.generate()
grid_x = sample.getMarginal(0)
grid_y = sample.getMarginal(1)

#plt.rc('text', usetex=True)

q_values = [1.0, 0.75, 0.5]
fig = plt.figure()
index = 1
for i in range(nrows):
    q = q_values[i]
    enumerate = ot.HyperbolicAnisotropicEnumerateFunction(2, q)
    for j in range(ncols):
        ax = fig.add_subplot(nrows, ncols, index, aspect=1.0)
        ax.plot(grid_x, grid_y, 'xr')
        strataIndex = j + 3
        strata_x, strata_y = [], []
        strataCardinal = enumerate.getStrataCumulatedCardinal(strataIndex)
        for ii in range(strataCardinal):
            x = enumerate(ii)
            strata_x.append(x[0])
            strata_y.append(x[1])
        ax.plot(strata_x, strata_y, 'ob')
        ax.set_yticks([])
        #ax.set_title('$||x||_{'+str(q)+'} \leq '+str(strataIndex)+'$')
        ax.set_title('||x||q=' + str(q) + ' < ' + str(strataIndex))
        index += 1
Esempio n. 5
0
# %%
# Create the enumeration function.

# %%
# The first possibility is to use the `LinearEnumerateFunction`.

# %%
enumerateFunction = ot.LinearEnumerateFunction(inputDimension)

# %%
# Another possibility is to use the `HyperbolicAnisotropicEnumerateFunction`, which gives less weight to interactions.

# %%
q = 0.4
enumerateFunction_1 = ot.HyperbolicAnisotropicEnumerateFunction(
    inputDimension, q)

# %%
# Create the multivariate orthonormal basis which is the the cartesian product of the univariate basis.

# %%
multivariateBasis = ot.OrthogonalProductPolynomialFactory(
    polyColl, enumerateFunction)

# %%
# Ask how many polynomials have total degrees equal to k=5.

# %%
k = 5
enumerateFunction.getStrataCardinal(k)
Esempio n. 6
0
    def fit(self, X, y, **fit_params):
        """Fit PC regression model.

        Parameters
        ----------
        X : array-like, shape = (n_samples, n_features)
            Training data.
        y : array-like, shape = (n_samples, [n_output_dims])
            Target values.

        Returns
        -------
        self : returns an instance of self.

        """
        if len(X) == 0:
            raise ValueError(
                "Can not perform chaos expansion with empty sample")
        # check data type is accurate
        if (len(np.shape(X)) != 2):
            raise ValueError("X has incorrect shape.")
        input_dimension = len(X[1])
        if (len(np.shape(y)) != 2):
            raise ValueError("y has incorrect shape.")
        if self.distribution is None:
            self.distribution = ot.MetaModelAlgorithm.BuildDistribution(X)
        if self.enumeratef == 'linear':
            enumerateFunction = ot.LinearEnumerateFunction(input_dimension)
        elif self.enumeratef == 'hyperbolic':
            enumerateFunction = ot.HyperbolicAnisotropicEnumerateFunction(
                input_dimension, self.q)
        else:
            raise ValueError('enumeratef should be "linear" or "hyperbolic"')
        polynomials = [
            ot.StandardDistributionPolynomialFactory(
                self.distribution.getMarginal(i))
            for i in range(input_dimension)
        ]
        productBasis = ot.OrthogonalProductPolynomialFactory(
            polynomials, enumerateFunction)
        adaptiveStrategy = ot.FixedStrategy(
            productBasis,
            enumerateFunction.getStrataCumulatedCardinal(self.degree))
        if self.sparse:
            # Filter according to the sparse_fitting_algorithm key
            if self.sparse_fitting_algorithm == "cloo":
                fitting_algorithm = ot.CorrectedLeaveOneOut()
            else:
                fitting_algorithm = ot.KFold()
            # Define the correspondinding projection strategy
            projectionStrategy = ot.LeastSquaresStrategy(
                ot.LeastSquaresMetaModelSelectionFactory(
                    ot.LARS(), fitting_algorithm))
        else:
            projectionStrategy = ot.LeastSquaresStrategy(X, y)
        algo = ot.FunctionalChaosAlgorithm(X, y, self.distribution,
                                           adaptiveStrategy,
                                           projectionStrategy)
        algo.run()
        self.result_ = algo.getResult()
        output_dimension = self.result_.getMetaModel().getOutputDimension()

        # sensitivity
        si = ot.FunctionalChaosSobolIndices(self.result_)
        if output_dimension == 1:
            self.feature_importances_ = [
                si.getSobolIndex(i) for i in range(input_dimension)
            ]
        else:
            self.feature_importances_ = [[0.0] * input_dimension
                                         ] * output_dimension
            for k in range(output_dimension):
                for i in range(input_dimension):
                    self.feature_importances_[k][i] = si.getSobolIndex(i, k)
        self.feature_importances_ = np.array(self.feature_importances_)
        return self