Beispiel #1
0
def dali_pce(func,
             N,
             jpdf_cp,
             jpdf_ot,
             tol=1e-12,
             max_fcalls=1000,
             verbose=True,
             interp_dict={}):

    if not interp_dict:  # if dictionary is empty --> cold-start
        idx_act = []  # M_activated x N
        idx_adm = []  # M_admissible x N
        fevals_act = []  # M_activated x 1
        fevals_adm = []  # M_admissible x 1
        coeff_act = []  # M_activated x 1
        coeff_adm = []  #  M_admissible x 1

        # start with 0 multi-index
        knot0 = []
        for n in range(N):
            # get knots per dimension based on maximum index
            kk, ww = seq_lj_1d(order=0, dist=jpdf_cp[n])
            knot0.append(kk[0])
        feval = func(knot0)

        # update activated sets
        idx_act.append([0] * N)
        coeff_act.append(feval)
        fevals_act.append(feval)

        # local error indicators
        local_error_indicators = np.abs(coeff_act)

        # get the OT distribution type of each random variable
        dist_types = []
        for i in range(N):
            dist_type = jpdf_ot.getMarginal(i).getName()
            dist_types.append(dist_type)

        # create orthogonal univariate bases
        poly_collection = ot.PolynomialFamilyCollection(N)
        for i in range(N):
            if dist_types[i] == 'Uniform':
                poly_collection[i] = ot.OrthogonalUniVariatePolynomialFamily(
                    ot.LegendreFactory())
            elif dist_types[i] == 'Normal':
                poly_collection[i] = ot.OrthogonalUniVariatePolynomialFamily(
                    ot.HermiteFactory())
            elif dist_types[i] == 'Beta':
                poly_collection[i] = ot.OrthogonalUniVariatePolynomialFamily(
                    ot.JacobiFactory())
            elif dist_types[i] == 'Gamma':
                poly_collection[i] = ot.OrthogonalUniVariatePolynomialFamily(
                    ot.LaguerreFactory())
            else:
                pdf = jpdf_ot.getDistributionCollection()[i]
                algo = ot.AdaptiveStieltjesAlgorithm(pdf)
                poly_collection[i] = ot.StandardDistributionPolynomialFactory(
                    algo)

        # create multivariate basis
        mv_basis = ot.OrthogonalProductPolynomialFactory(
            poly_collection, ot.EnumerateFunction(N))
        # get enumerate function (multi-index handling)
        enum_func = mv_basis.getEnumerateFunction()

    else:
        idx_act = interp_dict['idx_act']
        idx_adm = interp_dict['idx_adm']
        coeff_act = interp_dict['coeff_act']
        coeff_adm = interp_dict['coeff_adm']
        fevals_act = interp_dict['fevals_act']
        fevals_adm = interp_dict['fevals_adm']
        mv_basis = interp_dict['mv_basis']
        enum_func = interp_dict['enum_func']
        # local error indicators
        local_error_indicators = np.abs(coeff_adm)

    # compute global error indicator
    global_error_indicator = local_error_indicators.sum()  # max or sum

    # fcalls / M approx. terms up to now
    fcalls = len(idx_act) + len(idx_adm)  # fcalls = M --> approx. terms

    # maximum index per dimension
    max_idx_per_dim = np.max(idx_act + idx_adm, axis=0)

    # univariate knots and polynomials per dimension
    knots_per_dim = {}
    for n in range(N):
        kk, ww = seq_lj_1d(order=max_idx_per_dim[n], dist=jpdf_cp[n])
        knots_per_dim[n] = kk

    # start iterations
    while global_error_indicator > tol and fcalls < max_fcalls:
        if verbose:
            print(fcalls)
            print(global_error_indicator)

        # the index added last to the activated set is the one to be refined
        last_act_idx = idx_act[-1][:]
        # compute the knot corresponding to the lastly added index
        last_knot = [
            knots_per_dim[n][i] for n, i in zip(range(N), last_act_idx)
        ]
        # get admissible neighbors of the lastly added index
        adm_neighbors = admissible_neighbors(last_act_idx, idx_act)

        for an in adm_neighbors:
            # update admissible index set
            idx_adm.append(an)
            # find which parameter/direction n (n=1,2,...,N) gets refined
            n_ref = np.argmin(
                [idx1 == idx2 for idx1, idx2 in zip(an, last_act_idx)])
            # sequence of 1d Leja nodes/weights for the given refinement
            knots_n, weights_n = seq_lj_1d(an[n_ref], jpdf_cp[int(n_ref)])

            # update max_idx_per_dim, knots_per_dim, if necessary
            if an[n_ref] > max_idx_per_dim[n_ref]:
                max_idx_per_dim[n_ref] = an[n_ref]
                knots_per_dim[n_ref] = knots_n

            # find new_knot and compute function on new_knot
            new_knot = last_knot[:]
            new_knot[n_ref] = knots_n[-1]
            feval = func(new_knot)
            fevals_adm.append(feval)
            fcalls += 1  # update function calls

        # create PCE basis
        idx_system = idx_act + idx_adm
        idx_system_single = transform_multi_index_set(idx_system, enum_func)
        system_basis = mv_basis.getSubBasis(idx_system_single)
        # get corresponding evaluations
        fevals_system = fevals_act + fevals_adm
        # multi-dimensional knots
        M = len(idx_system)  # equations terms
        knots_md = [[knots_per_dim[n][idx_system[m][n]] for m in range(M)]
                    for n in range(N)]
        knots_md = np.array(knots_md).T
        # design matrix
        D = get_design_matrix(system_basis, knots_md)
        # solve system of equaations
        Q, R = scl.qr(D, mode='economic')
        c = Q.T.dot(fevals_system)
        coeff_system = scl.solve_triangular(R, c)

        # find the multi-index with the largest contribution, add it to idx_act
        # and delete it from idx_adm
        coeff_act = coeff_system[:len(idx_act)].tolist()
        coeff_adm = coeff_system[-len(idx_adm):].tolist()
        help_idx = np.argmax(np.abs(coeff_adm))
        idx_add = idx_adm.pop(help_idx)
        pce_coeff_add = coeff_adm.pop(help_idx)
        fevals_add = fevals_adm.pop(help_idx)
        idx_act.append(idx_add)
        coeff_act.append(pce_coeff_add)
        fevals_act.append(fevals_add)
        # re-compute coefficients of admissible multi-indices

        # local error indicators
        local_error_indicators = np.abs(coeff_adm)
        # compute global error indicator
        global_error_indicator = local_error_indicators.sum()  # max or sum

    # store expansion data in dictionary
    interp_dict = {}
    interp_dict['idx_act'] = idx_act
    interp_dict['idx_adm'] = idx_adm
    interp_dict['coeff_act'] = coeff_act
    interp_dict['coeff_adm'] = coeff_adm
    interp_dict['fevals_act'] = fevals_act
    interp_dict['fevals_adm'] = fevals_adm
    interp_dict['enum_func'] = enum_func
    interp_dict['mv_basis'] = mv_basis
    return interp_dict
        graphJacobi_temp = factory.build(i).draw(xMinJacobi, xMaxJacobi,
                                                 pointNumber)
        graphJacobi_temp_draw = graphJacobi_temp.getDrawable(0)
        graphJacobi_temp_draw.setLegend("degree " + str(i))
        graphJacobi_temp_draw.setColor(colorList[i])
        graphJacobi.add(graphJacobi_temp_draw)
    return graphJacobi


# %%
# Draw the 5-th first members of the Jacobi family.

# %%
# Create the Jacobi polynomials family using the default Jacobi.ANALYSIS
# parameter set
alpha = 0.5
beta = 1.5
jacobiFamily = ot.JacobiFactory(alpha, beta)
graph = drawFamily(jacobiFamily)
view = viewer.View(graph)

# %%
laguerreFamily = ot.LaguerreFactory(2.75, 1)
graph = drawFamily(laguerreFamily)
view = viewer.View(graph)

# %%
graph = drawFamily(ot.HermiteFactory())
view = viewer.View(graph)
plt.show()
Beispiel #3
0
for i in range(N):
    dist_type = jpdf_ot.getMarginal(i).getName()
    dist_types.append(dist_type)

# create orthogonal univariate bases
poly_collection = ot.PolynomialFamilyCollection(N)
for i in range(N):
    if dist_types[i] == 'Uniform':
        poly_collection[i] = ot.OrthogonalUniVariatePolynomialFamily(
            ot.LegendreFactory())
    elif dist_types[i] == 'Normal':
        poly_collection[i] = ot.OrthogonalUniVariatePolynomialFamily(
            ot.HermiteFactory())
    elif dist_types[i] == 'Beta':
        poly_collection[i] = ot.OrthogonalUniVariatePolynomialFamily(
            ot.JacobiFactory())
    elif dist_types[i] == 'Gamma':
        poly_collection[i] = ot.OrthogonalUniVariatePolynomialFamily(
            ot.LaguerreFactory())
    else:
        pdf = jpdf_ot.getDistributionCollection()[i]
        algo = ot.AdaptiveStieltjesAlgorithm(pdf)
        poly_collection[i] = ot.StandardDistributionPolynomialFactory(algo)

# create multivariate basis
mv_basis = ot.OrthogonalProductPolynomialFactory(poly_collection,
                                                 ot.EnumerateFunction(N))
# get enumerate function (multi-index handling)
enum_func = mv_basis.getEnumerateFunction()

max_fcalls = np.linspace(100, 1000, 10).tolist()
Beispiel #4
0
# We could also use the automatic selection of the polynomial which corresponds to the distribution: this is done with the `StandardDistributionPolynomialFactory` class.

# %%
for i in range(inputDimension):
    marginal = distribution.getMarginal(i)
    polyColl[i] = ot.StandardDistributionPolynomialFactory(marginal)

# %%
# In our specific case, we use specific polynomial factories.

# %%
polyColl[0] = ot.HermiteFactory()
polyColl[1] = ot.LegendreFactory()
polyColl[2] = ot.LaguerreFactory(2.75)
# Parameter for the Jacobi factory : 'Probabilty' encoded with 1
polyColl[3] = ot.JacobiFactory(2.5, 3.5, 1)

# %%
# Create the enumeration function.

# %%
# The first possibility is to use the `LinearEnumerateFunction`.

# %%
enumerateFunction = ot.LinearEnumerateFunction(inputDimension)

# %%
# Another possibility is to use the `HyperbolicAnisotropicEnumerateFunction`, which gives less weight to interactions.

# %%
q = 0.4
Beispiel #5
0
import openturns as ot
import numpy as np
from matplotlib import pyplot as plt
n_functions = 8
function_factory = ot.JacobiFactory()
if function_factory.getClassName() == 'KrawtchoukFactory':
    function_factory = ot.JacobiFactory(n_functions, .5)
functions = [function_factory.build(i) for i in range(n_functions)]
measure = function_factory.getMeasure()
if hasattr(measure, 'getA') and hasattr(measure, 'getB'):
    x_min = measure.getA()
    x_max = measure.getB()
else:
    x_min = measure.computeQuantile(1e-3)[0]
    x_max = measure.computeQuantile(1. - 1e-3)[0]
n_points = 200
meshed_support = np.linspace(x_min, x_max, n_points)
fig = plt.figure()
ax = fig.add_subplot(111)
for i in range(n_functions):
    plt.plot(meshed_support, [functions[i](x) for x in meshed_support],
             lw=1.5,
             label='$\phi_{' + str(i) + '}(x)$')
plt.xlabel('$x$')
plt.ylabel('$\phi_i(x)$')
plt.xlim(x_min, x_max)
plt.grid()
box = ax.get_position()
ax.set_position([box.x0, box.y0, box.width, box.height * 0.9])
plt.legend(loc='upper center', bbox_to_anchor=(.5, 1.25), ncol=4)