def _extractPDFforMomentEstimation(self, U, T):
     dists = U.getDistributions()
     vol = 1.
     # check if importance sampling has been used for some parameters
     for i, trans in enumerate(T.getTransformations()):
         # if this is the case replace them by a uniform distribution
         if isinstance(trans, InverseCDFTransformation):
             dists[i] = Uniform(0, 1)
         else:
             vol *= trans.vol()
     return vol, J(dists)
Example #2
0
    def testChangeBandwidths(self):
        # dimension of domain
        d = 1

        dist = J([Normal(0.5, 0.1, 0, 1)] * d)

        # estimate a kernel density
        samples = DataMatrix(dist.rvs(1000))
        kde = KernelDensityEstimator(samples)

        bandwidths = DataVector(d)
        kde.getBandwidths(bandwidths)

        hs = np.logspace(9e-4, 5e-1, 10, True, 10) - 1

        #         fig = plt.figure()
        #         x = np.linspace(0, 1, 1000)
        #         for h in hs:
        #             bandwidths[0] = h
        #             kde.setBandwidths(bandwidths)
        #             y = [kde.pdf(DataVector([xi])) for xi in x]
        #             plt.plot(x, y, label="h=%g" % h)
        #
        #         plt.legend()
        #         fig.show()

        fig = plt.figure()
        sample = DataVector(kde.getDim())
        skipElements = IndexVector(1)
        yhs = np.ndarray(len(hs))
        for k, h in enumerate(hs):
            bandwidths[0] = h
            kde.setBandwidths(bandwidths)
            x = np.ndarray(kde.getNsamples())
            values = np.ndarray(kde.getNsamples())
            for i in range(kde.getNsamples()):
                skipElements[0] = i
                kde.getSample(i, sample)
                values[i] = -np.log(kde.evalSubset(sample, skipElements))
                x[i] = sample[0]
            yhs[k] = np.mean(values)

            # sort x values
            ixs = np.argsort(x)
            plt.plot(x[ixs], values[ixs], label="h=%g" % h)
        plt.legend()
        fig.show()

        fig = plt.figure()
        plt.plot(hs, yhs)
        fig.show()

        plt.show()
Example #3
0
def example8(dist_type="uniform"):
    operation = pysgpp.CombigridOperation.createExpClenshawCurtisPolynomialInterpolation(
        d, func)

    config = pysgpp.OrthogonalPolynomialBasis1DConfiguration()

    if dist_type == "beta":
        config.polyParameters.type_ = pysgpp.OrthogonalPolynomialBasisType_JACOBI
        config.polyParameters.alpha_ = 5
        config.polyParameters.alpha_ = 4

        U = J(
            [Beta(config.polyParameters.alpha_, config.polyParameters.beta_)] *
            d)
    else:
        config.polyParameters.type_ = pysgpp.OrthogonalPolynomialBasisType_LEGENDRE
        U = J([Uniform(0, 1)] * d)

    basisFunction = pysgpp.OrthogonalPolynomialBasis1D(config)
    basisFunctions = pysgpp.OrthogonalPolynomialBasis1DVector(d, basisFunction)

    q = 3
    operation.getLevelManager().addRegularLevels(q)
    print("Total function evaluations: %i" % operation.numGridPoints())
    ## compute variance of the interpolant

    surrogateConfig = pysgpp.CombigridSurrogateModelConfiguration()
    surrogateConfig.type = pysgpp.CombigridSurrogateModelsType_POLYNOMIAL_CHAOS_EXPANSION
    surrogateConfig.loadFromCombigridOperation(operation)
    surrogateConfig.basisFunction = basisFunction
    pce = pysgpp.createCombigridSurrogateModel(surrogateConfig)

    n = 10000
    values = [g(pysgpp.DataVector(xi)) for xi in U.rvs(n)]
    print("E(u)   = %g ~ %g" % (np.mean(values), pce.mean()))
    print("Var(u) = %g ~ %g" % (np.var(values), pce.variance()))
Example #4
0
 def _extractPDFforMomentEstimation(self, U, T):
     dists = []
     jointTrans = []
     vol = 1.
     # check if importance sampling has been used for some parameters
     for i, trans in enumerate(T.getTransformations()):
         # if this is the case replace them by a uniform distribution
         if isinstance(trans, RosenblattTransformation):
             for _ in range(trans.getSize()):
                 dists.append(Uniform(0, 1))
                 jointTrans.append(LinearTransformation(0.0, 1.0))
         else:
             vol *= trans.vol()
             dists.append(U.getDistributions()[i])
             jointTrans.append(trans)
     return vol, J(dists), jointTrans
Example #5
0
    def __extractDiscretePDFforMomentEstimation(self, U, T):
        dists = U.getDistributions()
        vol = 1.
        err = 0.
        # check if importance sampling has been used for some parameters
        for i, trans in enumerate(T.getTransformations()):
            # if this is the case replace them by a uniform distribution
            if isinstance(trans, InverseCDFTransformation):
                grid, alpha, erri = Uniform(0, 1).discretize(level=2)
            else:
                vol *= trans.vol()
                grid, alpha, erri = dists[i].discretize(level=10)

            dists[i] = SGDEdist.fromSGFunction(grid, alpha)
            err += erri
        return vol, J(dists), err
Example #6
0
    def testMarginalEstimationStrategy(self):
        xlim = np.array([[-1, 1], [-1, 1]])
        trans = JointTransformation()
        dists = []
        for idim in range(xlim.shape[0]):
            trans.add(LinearTransformation(xlim[idim, 0], xlim[idim, 1]))
            dists.append(Uniform(xlim[idim, 0], xlim[idim, 1]))
        dist = J(dists)

        def f(x):
            return np.prod([(1 + xi) * (1 - xi) for xi in x])

        def F(x):
            return 1. - x**3 / 3.

        grid, alpha_vec = interpolate(f,
                                      1,
                                      2,
                                      gridType=GridType_Poly,
                                      deg=2,
                                      trans=trans)
        alpha = alpha_vec.array()

        q = (F(1) - F(-1))**2
        q1 = doQuadrature(grid, alpha)
        q2 = AnalyticEstimationStrategy().mean(grid, alpha, dist,
                                               trans)["value"]

        self.assertTrue(abs(q - q1) < 1e-10)
        self.assertTrue(abs(q - q2) < 1e-10)

        ngrid, nalpha, _ = MarginalAnalyticEstimationStrategy().mean(
            grid, alpha, dist, trans, [[0]])

        self.assertTrue(abs(nalpha[0] - 2. / 3.) < 1e-10)

        plotSG3d(grid, alpha)
        plt.figure()
        plotSG1d(ngrid, nalpha)
        plt.show()
Example #7
0
                        help="minimum level of regular grids")
    parser.add_argument('--marginalType',
                        default="beta",
                        type=str,
                        help="marginals")
    args = parser.parse_args()

    if args.marginalType == "uniform":
        marginal = Uniform(0, 1)
    elif args.marginalType == "beta":
        marginal = Beta(5, 10)
    else:
        marginal = Normal(0.5, 0.1, 0, 1)

    # plot pdf
    dist = J([marginal] * numDims)
    fig = plt.figure()
    plotDensity2d(dist)
    savefig(fig, "/tmp/%s" % (args.marginalType, ))
    plt.close(fig)

    w = pysgpp.singleFunc(marginal.pdf)

    grids = pysgpp.AbstractPointHierarchyVector()
    grids.push_back(pysgpp.CombiHierarchies.linearLeja(w))
    grids.push_back(pysgpp.CombiHierarchies.linearLeja(w))

    evaluators = pysgpp.FloatScalarAbstractLinearEvaluatorVector()
    evaluators.push_back(pysgpp.CombiEvaluators.polynomialInterpolation())
    evaluators.push_back(pysgpp.CombiEvaluators.polynomialInterpolation())
Example #8
0
 def getIndependentJointDistribution(self):
     """
     Creates a multivariate distributions where the marginal distributions
     are given by the uncertain parameter definitions
     """
     return J(self.getDistributions())
Example #9
0
# Copyright (C) 2008-today The SG++ project
# This file is part of the SG++ project. For conditions of distribution and
# use, please see the copyright notice provided with SG++ or at
# sgpp.sparsegrids.org

import numpy as np
import matplotlib.pyplot as plt

from pysgpp import DataVector, Grid, createOperationHierarchisation, createOperationEval
from pysgpp.extensions.datadriven.uq.operations import hierarchize
from pysgpp.extensions.datadriven.uq.plot import plotFunction3d, plotSG3d
from pysgpp.extensions.datadriven.uq.dists import Normal, J
from pysgpp.extensions.datadriven.uq.operations.sparse_grid import evalSGFunction

U = J([Normal.by_alpha(0.5, 0.05, 0.001),
       Normal.by_alpha(0.5, 0.05, 0.001)])

grid = Grid.createPolyGrid(2, 2)
grid.getGenerator().regular(3)
gs = grid.getStorage()

nodalValues = np.ndarray(gs.getSize())
p = DataVector(2)
for i in range(gs.getSize()):
    gs.getCoordinates(gs.getPoint(i), p)
    nodalValues[i] = U.pdf(p.array())

alpha = hierarchize(grid, nodalValues)


fig, _, _ = plotFunction3d(U.pdf)
Example #10
0
candidateSearchAlgorithm = MakePositiveCandidateSearchAlgorithm_IntersectionsJoin
# candidateSearchAlgorithm = MakePositiveCandidateSearchAlgorithm_HybridFullIntersections
# interpolationAlgorithm = MakePositiveInterpolationAlgorithm_InterpolateBoundaries1d
interpolationAlgorithm = MakePositiveInterpolationAlgorithm_SetToZero
plot = True
verbose = True
code = "c++"

gridConfig.dim_ = numDims
gridConfig.level_ = level
gridConfig.maxDegree_ = level + 1

mu = np.ones(numDims) * 0.5
cov = np.diag(np.ones(numDims) * 0.1 / 10.)

dist = J([Normal(0.5, 1. / 16., 0, 1)] * numDims)
# dist = MultivariateNormal(mu, cov, 0, 1)  # problems in 3d/l2
# dist = J([Beta(5, 4, 0, 1)] * numDims)  # problems in 5d/l3
# dist = J([Lognormal(0.2, 0.7, 0, 1)] * numDims)  # problems in 5d/l3

trainSamples = dist.rvs(1000)
testSamples = dist.rvs(1000)

# plot analytic density
if numDims == 2 and plot:
    fig = plt.figure()
    plotDensity2d(dist)
    plt.title("analytic, kldivergence = %g" %
              dist.klDivergence(dist, testSamples))
    fig.show()