Esempio n. 1
0
    def _computePValuesAsymptotic(self):
        W = self._computeWeightMatrix(self.Y)

        self.PValuesAsymptotic = ot.Point()

        H = np.eye(self.n) - 1 / self.n * np.ones((self.n, self.n))
        Ky = np.array(self.CovY.discretize(self.Y))

        Ky_mdiag = copy.deepcopy(Ky)
        np.fill_diagonal(Ky_mdiag, 0.)

        Ey = 1 / self.n / (self.n - 1) * np.sum(Ky_mdiag)
        By = H @ Ky @ H

        for dim in range(self.d):
            HSIC_obs = self._computeHSICIndex(self.X[:, dim], self.Y,
                                              self.CovX[dim], self.CovY, W)

            Kx = np.array(self.CovX[dim].discretize(self.X[:, dim]))

            Kx_mdiag = copy.deepcopy(Kx)
            np.fill_diagonal(Kx_mdiag, 0.)

            Ex = 1 / self.n / (self.n - 1) * np.sum(Kx_mdiag)

            Bx = H @ Kx @ H
            B = np.multiply(Bx, By)
            B = B**2
            np.fill_diagonal(B, 0.)

            mHSIC = 1 / self.n * (1 + Ex * Ey - Ex - Ey)
            varHSIC = (2 * (self.n - 4) * (self.n - 5) / self.n /
                       (self.n - 1) / (self.n - 2) / (self.n - 3) * np.ones(
                           (1, self.n)) @ B @ np.ones(
                               (self.n, 1)) / self.n / (self.n - 1))
            varHSIC = varHSIC[0, 0]

            alpha = mHSIC**2 / varHSIC
            beta = self.n * varHSIC / mHSIC

            gamma = ot.Gamma(alpha, 1 / beta)
            p = self.HSICstat._computePValue(gamma, self.n, HSIC_obs, mHSIC)

            self.PValuesAsymptotic.add(p)
        return 0
Esempio n. 2
0
import openturns.viewer as viewer
from matplotlib import pylab as plt
ot.Log.Show(ot.Log.NONE)

# %%
# Create a function R^n --> R^p
# For example R^4 --> R
myModel = ot.SymbolicFunction(['x1', 'x2', 'x3', 'x4'],
                              ['1+x1*x2 + 2*x3^2+x4^4'])

# Create a distribution of dimension n
# for example n=3 with indpendent components
distribution = ot.ComposedDistribution([
    ot.Normal(),
    ot.Uniform(),
    ot.Gamma(2.75, 1.0),
    ot.Beta(2.5, 1.0, -1.0, 2.0)
])

# %%
# Prepare the input/output samples
sampleSize = 250
X = distribution.getSample(sampleSize)
Y = myModel(X)
dimension = X.getDimension()

# %%
# build the orthogonal basis
coll = [
    ot.StandardDistributionPolynomialFactory(distribution.getMarginal(i))
    for i in range(dimension)
Esempio n. 3
0
import openturns as ot
from openturns.viewer import View

sample = ot.Gamma(6.0, 1.0).getSample(100)
ks = ot.KernelSmoothing()
bandwidth = [0.9]
fittedDist = ks.build(sample, bandwidth)

graph = fittedDist.drawPDF()
graph.add( ot.Gamma(6.0, 1.0).drawPDF())
graph.setColors(ot.Drawable.BuildDefaultPalette(2))
graph.setLegends(['KS dist', 'Gamma'])
View(graph)
#View(graph, figure_kw={'figsize': (8, 4)})
Esempio n. 4
0
    # Histogram tests
    normal = ot.Normal(1)
    size = 100
    sample = normal.getSample(size)
    graph = ot.HistogramFactory().build(sample, 10).drawPDF()
    # graph.draw('curve3.png')
    view = View(graph)
    # view.save('curve3.png')
    view.show()

    # QQPlot tests
    size = 100
    normal = ot.Normal(1)
    sample = normal.getSample(size)
    sample2 = ot.Gamma(3.0, 4.0, 0.0).getSample(size)
    graph = ot.VisualTest.DrawQQplot(sample, sample2, 100)
    # graph.draw('curve4.png')
    view = View(graph)
    # view.save('curve4.png')
    view.ShowAll(block=True)

    # Clouds tests
    dimension = 2
    R = ot.CorrelationMatrix(dimension)
    R[0, 1] = 0.8
    distribution = ot.Normal(ot.Point(dimension, 3.0),
                             ot.Point(dimension, 2.0), R)
    size = 100
    sample1 = ot.Normal([3.0] * dimension, [2.0] * dimension,
                        R).getSample(size)
Esempio n. 5
0
Tobs = np.array([4380, 1791, 1611, 1291, 6132, 5694, 5296, 4818, 4818, 4380])
fail = np.array([True] * 4 + [False] * 6)
x = ot.Sample(np.vstack((Tobs, fail)).T)

# %%
# Define a uniform prior distribution for :math:`\alpha` and a Gamma prior distribution for :math:`\beta`.
#

# %%

alpha_min, alpha_max = 0.5, 3.8
a_beta, b_beta = 2, 2e-4

priorCopula = ot.IndependentCopula(2)  # prior independence
priorMarginals = []  # prior marginals
priorMarginals.append(ot.Gamma(a_beta, b_beta))  # Gamma prior for beta
priorMarginals.append(ot.Uniform(alpha_min,
                                 alpha_max))  # uniform prior for alpha
prior = ot.ComposedDistribution(priorMarginals, priorCopula)
prior.setDescription(['beta', 'alpha'])

# %%
# We select prior means as the initial point of the Metropolis-Hastings algorithm.
#

# %%

initialState = [a_beta / b_beta, 0.5 * (alpha_max - alpha_min)]

# %%
# For our random walk proposal distributions, we choose normal steps, with standard deviation equal to roughly :math:`10\%` of the prior range (for the uniform prior) or standard deviation (for the normal prior).
Esempio n. 6
0
#! /usr/bin/env python

from __future__ import print_function
import openturns as ot

ot.TESTPREAMBLE()
ot.RandomGenerator.SetSeed(0)

distribution = ot.Gamma(0.2, 1.0, 1.0)
size = 10000
sample = distribution.getSample(size)
factory = ot.GammaFactory()
estimatedDistribution = factory.build(sample)
print("distribution=", repr(distribution))
print("Estimated distribution=", repr(estimatedDistribution))
distribution = ot.Gamma(2.3, 1.0, 1.0)
sample = distribution.getSample(size)
estimatedDistribution = factory.build(sample)
print("distribution=", repr(distribution))
print("Estimated distribution=", repr(estimatedDistribution))
distribution = ot.Gamma(2.3, 1.0, 1.0)
sample = distribution.getSample(size)
estimatedDistribution = factory.build(sample)
print("Distribution          =", distribution)
print("Estimated distribution=", estimatedDistribution)
estimatedDistribution = factory.build()
print("Default distribution=", estimatedDistribution)
estimatedDistribution = factory.build(distribution.getParameter())
print("Distribution from parameters=", estimatedDistribution)
estimatedGamma = factory.buildAsGamma(sample)
print("Gamma          =", distribution)
# %%
from __future__ import print_function
import openturns as ot
import openturns.viewer as viewer
from matplotlib import pylab as plt
ot.Log.Show(ot.Log.NONE)

# %%
# Create a function R^n --> R^p
# For example R^4 --> R
myModel = ot.SymbolicFunction(['x1', 'x2', 'x3', 'x4'], ['1+x1*x2 + 2*x3^2+x4^4'])

# Create a distribution of dimension n
# for example n=3 with indpendent components
distribution = ot.ComposedDistribution(
    [ot.Normal(), ot.Uniform(), ot.Gamma(2.75, 1.0), ot.Beta(2.5, 1.0, -1.0, 2.0)])

# %%
# Prepare the input/output samples
sampleSize = 250
X = distribution.getSample(sampleSize)
Y = myModel(X)
dimension = X.getDimension()

# %%
# build the orthogonal basis
coll = [ot.StandardDistributionPolynomialFactory(distribution.getMarginal(i)) for i in range(dimension)]
enumerateFunction = ot.LinearEnumerateFunction(dimension)
productBasis = ot.OrthogonalProductPolynomialFactory(coll, enumerateFunction)

# %%
Esempio n. 8
0
import openturns as ot
from math import fabs
import openturns.testing as ott

ot.TESTPREAMBLE()
ot.RandomGenerator.SetSeed(0)

continuousDistributionCollection = ot.DistributionCollection()
discreteDistributionCollection = ot.DistributionCollection()
distributionCollection = ot.DistributionCollection()

beta = ot.Beta(2.0, 1.0, 0.0, 1.0)
distributionCollection.add(beta)
continuousDistributionCollection.add(beta)

gamma = ot.Gamma(1.0, 2.0, 3.0)
distributionCollection.add(gamma)
continuousDistributionCollection.add(gamma)

gumbel = ot.Gumbel(1.0, 2.0)
distributionCollection.add(gumbel)
continuousDistributionCollection.add(gumbel)

lognormal = ot.LogNormal(1.0, 1.0, 2.0)
distributionCollection.add(lognormal)
continuousDistributionCollection.add(lognormal)

logistic = ot.Logistic(1.0, 1.0)
distributionCollection.add(logistic)
continuousDistributionCollection.add(logistic)
Esempio n. 9
0
ot.RandomGenerator.SetSeed(0)
ot.ResourceMap.SetAsUnsignedInteger("RandomMixture-DefaultMaxSize", 4000000)
# Deactivate the simplification mechanism as we want to test the Poisson formula
# based algorithm here
ot.ResourceMap.SetAsBool("RandomMixture-SimplifyAtoms", False)

# Create a collection of test-cases and the associated references
numberOfTests = 3
testCases = list()
references = list()
testCases.append([ot.Uniform(-1.0, 3.0)] * 2)
references.append(ot.Triangular(-2.0, 2.0, 6.0))
testCases.append([ot.Normal(), ot.Normal(1.0, 2.0), ot.Normal(-2.0, 2.0)])
references.append(ot.Normal(-1.0, 3.0))
testCases.append([ot.Exponential()] * 3)
references.append(ot.Gamma(3.0, 1.0, 0.0))
print("testCases=", testCases)
print("references=", references)
for testIndex in range(len(testCases)):
    # Instanciate one distribution object
    distribution = ot.RandomMixture(testCases[testIndex])
    distribution.setBlockMin(5)
    distribution.setBlockMax(20)
    distributionReference = references[testIndex]
    print("Distribution ", repr(distribution))
    print("Distribution ", distribution)

    # Is this distribution elliptical ?
    print("Elliptical = ", distribution.isElliptical())

    # Is this distribution continuous ?
import openturns as ot
from matplotlib import pyplot as plt
from openturns.viewer import View
if ot.Gamma().__class__.__name__ == 'ComposedDistribution':
    correlation = ot.CorrelationMatrix(2)
    correlation[1, 0] = 0.25
    aCopula = ot.NormalCopula(correlation)
    marginals = [ot.Normal(1.0, 2.0), ot.Normal(2.0, 3.0)]
    distribution = ot.ComposedDistribution(marginals, aCopula)
elif ot.Gamma().__class__.__name__ == 'CumulativeDistributionNetwork':
    distribution = ot.CumulativeDistributionNetwork(
        [ot.Normal(2), ot.Dirichlet([0.5, 1.0, 1.5])],
        ot.BipartiteGraph([[0, 1], [0, 1]]))
elif ot.Gamma().__class__.__name__ == 'Histogram':
    distribution = ot.Histogram([-1.0, 0.5, 1.0, 2.0], [0.45, 0.4, 0.15])
else:
    distribution = ot.Gamma()
dimension = distribution.getDimension()
if dimension == 1:
    distribution.setDescription(['$x$'])
    pdf_graph = distribution.drawPDF()
    cdf_graph = distribution.drawCDF()
    fig = plt.figure(figsize=(10, 4))
    plt.suptitle(str(distribution))
    pdf_axis = fig.add_subplot(121)
    cdf_axis = fig.add_subplot(122)
    View(pdf_graph, figure=fig, axes=[pdf_axis], add_legend=False)
    View(cdf_graph, figure=fig, axes=[cdf_axis], add_legend=False)
elif dimension == 2:
    distribution.setDescription(['$x_1$', '$x_2$'])
    pdf_graph = distribution.drawPDF()
Esempio n. 11
0
import openturns as ot
from matplotlib import pyplot as plt
from openturns.viewer import View

pdf_graph = ot.Graph('PDF graph', 'x', 'PDF', True, 'topleft')
cdf_graph = ot.Graph('CDF graph', 'x', 'CDF', True, 'topleft')
palette = ot.Drawable.BuildDefaultPalette(10)
for i, p in enumerate([(1, 0.5), (2, 0.5), (3, 0.5), (5, 1), (9, 2)]):
    k, l = p
    distribution = ot.Gamma(k, l, 0.0)
    pdf_curve = distribution.drawPDF().getDrawable(0)
    cdf_curve = distribution.drawCDF().getDrawable(0)
    pdf_curve.setColor(palette[i])
    cdf_curve.setColor(palette[i])
    pdf_curve.setLegend('k,l={},{}'.format(k, l))
    cdf_curve.setLegend('k,l={},{}'.format(k, l))
    pdf_graph.add(pdf_curve)
    cdf_graph.add(cdf_curve)
fig = plt.figure(figsize=(10, 4))
pdf_axis = fig.add_subplot(121)
cdf_axis = fig.add_subplot(122)
View(pdf_graph, figure=fig, axes=[pdf_axis], add_legend=True)
View(cdf_graph, figure=fig, axes=[cdf_axis], add_legend=True)
fig.suptitle('Gamma(k,l,0)')
Esempio n. 12
0
iMax = 5
distributionCollection = [
    ot.Laplace(1.0, 0.0),
    ot.Logistic(0.0, 1.0),
    ot.Normal(0.0, 1.0),
    ot.Normal(1.0, 1.0),
    ot.Rayleigh(1.0),
    ot.Student(22.0),
    ot.Triangular(-1.0, 0.3, 1.0),
    ot.Uniform(-1.0, 1.0),
    ot.Uniform(-1.0, 3.0),
    ot.Weibull(1.0, 3.0),
    ot.Beta(1.0, 3.0, -1.0, 1.0),
    ot.Beta(0.5, 1.0, -1.0, 1.0),
    ot.Beta(0.5, 1.0, -2.0, 3.0),
    ot.Gamma(1.0, 3.0),
    ot.Arcsine()
]
for n in range(len(distributionCollection)):
    distribution = distributionCollection[n]
    name = distribution.getClassName()
    polynomialFactory = ot.StandardDistributionPolynomialFactory(
        ot.AdaptiveStieltjesAlgorithm(distribution))
    print("polynomialFactory(", name, "=", polynomialFactory, ")")
    for i in range(iMax):
        print(name, " polynomial(", i, ")=", clean(polynomialFactory.build(i)))
    roots = polynomialFactory.getRoots(iMax - 1)
    print(name, " polynomial(", iMax - 1, ") roots=", roots)
    nodes, weights = polynomialFactory.getNodesAndWeights(iMax - 1)
    print(name, " polynomial(", iMax - 1, ") nodes=", nodes, " and weights=",
          weights)
import openturns as ot
from matplotlib import pyplot as plt
from openturns.viewer import View
if (ot.Gamma().__class__.__name__ == 'ComposedDistribution'):
    correlation = ot.CorrelationMatrix(2)
    correlation[1, 0] = 0.25
    aCopula = ot.NormalCopula(correlation)
    marginals = [ot.Normal(1.0, 2.0), ot.Normal(2.0, 3.0)]
    distribution = ot.ComposedDistribution(marginals, aCopula)
elif (ot.Gamma().__class__.__name__ == 'CumulativeDistributionNetwork'):
    distribution = ot.CumulativeDistributionNetwork(
        [ot.Normal(2), ot.Dirichlet([0.5, 1.0, 1.5])],
        ot.BipartiteGraph([[0, 1], [0, 1]]))
else:
    distribution = ot.Gamma()
dimension = distribution.getDimension()
if dimension <= 2:
    if distribution.getDimension() == 1:
        distribution.setDescription(['$x$'])
        pdf_graph = distribution.drawPDF()
        cdf_graph = distribution.drawCDF()
        fig = plt.figure(figsize=(10, 4))
        plt.suptitle(str(distribution))
        pdf_axis = fig.add_subplot(121)
        cdf_axis = fig.add_subplot(122)
        View(pdf_graph, figure=fig, axes=[pdf_axis], add_legend=False)
        View(cdf_graph, figure=fig, axes=[cdf_axis], add_legend=False)
    else:
        distribution.setDescription(['$x_1$', '$x_2$'])
        pdf_graph = distribution.drawPDF()
        fig = plt.figure(figsize=(10, 5))
Esempio n. 14
0
from __future__ import print_function
import openturns as ot
from math import pi

ot.TESTPREAMBLE()
ot.Log.Show(0)
ot.PlatformInfo.SetNumericalPrecision(3)

dim = 2
R = ot.CorrelationMatrix(2)
R[0, 1] = 0.5
src = [
    ot.ComposedDistribution([ot.Uniform(-pi, pi)] * dim),
    ot.ComposedDistribution([ot.Normal(4.0, 2.0)] * dim),
    ot.ComposedDistribution([ot.Gamma()] * dim),
    ot.ComposedDistribution([ot.Gamma(1.5, 2.5, -0.5)] * dim),
    ot.ComposedDistribution([ot.Arcsine(5.2, 11.6)] * dim),
    ot.Normal([3.0] * dim, [2.0] * dim, R)
]

for sd in src:
    sample = sd.getSample(2000)
    d = ot.MetaModelAlgorithm.BuildDistribution(sample)
    print(d)

sample = ot.Sample([[0], [142.857], [285.714], [428.571], [571.429], [714.286],
                    [857.143], [1000.0]])
d = ot.MetaModelAlgorithm.BuildDistribution(sample)
print(d)
from __future__ import print_function
import openturns as ot
import openturns.viewer as viewer
from matplotlib import pylab as plt

ot.Log.Show(ot.Log.NONE)

# %%
# An introductory example
# -----------------------
#

# %%
# We create the data from a Gamma distribution :
ot.RandomGenerator.SetSeed(0)
distribution = ot.Gamma(6.0, 1.0)
sample = distribution.getSample(800)

# %%
# We define the kernel smoother and build the smoothed estimate.
kernel = ot.KernelSmoothing()
estimated = kernel.build(sample)

# %%
# We can draw the original distribution vs the kernel smoothing.
graph = distribution.drawPDF()
graph.setTitle("Kernel smoothing vs original")
kernel_plot = estimated.drawPDF().getDrawable(0)
kernel_plot.setColor("blue")
graph.add(kernel_plot)
graph.setLegends(["original", "KS"])