Beispiel #1
0
# add a 2-d test
dimension = 2
# This distribution takes too much time for the test
#size = 70
#ref = ot.Normal(dimension)
#sample = ref.getSample(size)
#ks = ot.KernelSmoothing().build(sample)
# Use a multivariate Normal distribution instead
ks = ot.Normal(2)
truncatedKS = ot.TruncatedDistribution(
    ks, ot.Interval([-0.5] * dimension, [2.0] * dimension))
distribution.append(truncatedKS)
referenceDistribution.append(ks)  # N/A
# Add a non-truncated example
weibull = ot.WeibullMin(2.0, 3.0)
distribution.append(ot.TruncatedDistribution(weibull))
referenceDistribution.append(weibull)
ot.RandomGenerator.SetSeed(0)

for testCase in range(len(distribution)):
    print('Distribution ', distribution[testCase])

    # Is this distribution elliptical ?
    print('Elliptical = ', distribution[testCase].isElliptical())

    # Is this distribution continuous ?
    print('Continuous = ', distribution[testCase].isContinuous())

    # Test for realization of distribution
    oneRealization = distribution[testCase].getRealization()
Beispiel #2
0
import openturns as ot
from matplotlib import pyplot as plt
from openturns.viewer import View

grid = ot.GridLayout(2, 3)
palette = ot.Drawable.BuildDefaultPalette(10)
for j in range(grid.getNbColumns()):
    alpha = 1.0 + j
    pdf_curve = ot.WeibullMin(1.0, alpha, 0.0).drawPDF()
    cdf_curve = ot.WeibullMin(1.0, alpha, 0.0).drawCDF()
    pdf_curve.setColors([palette[j]])
    cdf_curve.setColors([palette[j]])
    pdf_curve.setLegends(['alpha={}'.format(alpha)])
    cdf_curve.setLegends(['alpha={}'.format(alpha)])
    grid.setGraph(0, j, pdf_curve)
    grid.setGraph(1, j, cdf_curve)
graph = grid

fig = plt.figure(figsize=(12, 8))
View(graph, figure=fig, add_legend=True)
a = xMin - delta / (size + 2)
b = xMax + delta / (size + 2)
distribution = ot.TruncatedNormal()
factory = ot.MethodOfMomentsFactory(distribution)
factory.setKnownParameter([a, b], [2, 3])
solver = factory.getOptimizationAlgorithm()
sampleMean = sample.computeMean()[0]
sampleSigma = sample.computeStandardDeviation()[0]
startingPoint = [sampleMean, sampleSigma]
solver.setStartingPoint(startingPoint)
factory.setOptimizationAlgorithm(solver)
lowerBound = [-1.0, 0]
upperBound = [-1.0, 1.5]
finiteLowerBound = [False, True]
finiteUpperBound = [False, True]
bounds = ot.Interval(lowerBound, upperBound, finiteLowerBound,
                     finiteUpperBound)
factory = ot.MethodOfMomentsFactory(distribution, bounds)
factory.setKnownParameter([a, b], [2, 3])
factory.setOptimizationBounds(bounds)
inf_distribution = factory.build(sample)
print('estimated distribution=', inf_distribution)

# setKnownParameter+buildEstimator
sample = ot.Normal(2.0, 1.0).getSample(10)
factory = ot.MethodOfMomentsFactory(ot.WeibullMin())
factory.setBootstrapSize(4)
factory.setKnownParameter([1.0], [1])  # set the sigma parameter to 1.0
result = factory.buildEstimator(sample)
print('ok')
# %%
n = 1000
uniformSample = U.getSample(n)

# %%
# To generate the numbers, we evaluate the quantile function on the uniform numbers.

# %%
weibullSample = quantile(uniformSample)

# %%
# In order to compare the results, we use the `WeibullMin` class (using the default value of the location parameter :math:`\gamma=0`).

# %%
W = ot.WeibullMin(beta,alpha)

# %%
histo = ot.HistogramFactory().build(weibullSample).drawPDF()
histo.setTitle("Weibull alpha=%s, beta=%s, n=%d" % (alpha,beta,n))
histo.setLegends(["Sample"])
wpdf = W.drawPDF()
wpdf.setColors(["blue"])
wpdf.setLegends(["Weibull"])
histo.add(wpdf)
view = viewer.View(histo)

# %%
# We see that the empirical histogram of the generated outcomes is close to the exact density of the Weibull distribution.

# %%
Beispiel #5
0
distributionCollection.add(truncatednormal)
continuousDistributionCollection.add(truncatednormal)

student = ot.Student(10.0, 10.0)
distributionCollection.add(student)
continuousDistributionCollection.add(student)

triangular = ot.Triangular(-1.0, 2.0, 4.0)
distributionCollection.add(triangular)
continuousDistributionCollection.add(triangular)

uniform = ot.Uniform(1.0, 2.0)
distributionCollection.add(uniform)
continuousDistributionCollection.add(uniform)

weibull = ot.WeibullMin(1.0, 1.0, 2.0)
distributionCollection.add(weibull)
continuousDistributionCollection.add(weibull)

geometric = ot.Geometric(0.5)
distributionCollection.add(geometric)
discreteDistributionCollection.add(geometric)

binomial = ot.Binomial(10, 0.25)
distributionCollection.add(binomial)
discreteDistributionCollection.add(binomial)

zipf = ot.ZipfMandelbrot(20, 5.25, 2.5)
distributionCollection.add(zipf)
discreteDistributionCollection.add(zipf)
graph = result.drawPDF()

result = ot.LogUniform() * ot.LogUniform()
print('logu*logu:', result)
graph = result.drawPDF()

result = ot.LogUniform() * ot.LogNormal()
print('logu*logn:', result)
graph = result.drawPDF()

result = ot.LogNormal() * ot.LogUniform()
print('logn*logu:', result)
graph = result.drawPDF()

# For ticket #917
result = ot.WeibullMin() + ot.Exponential()
print('WeibullMin+Exponential:', result)
print('result.CDF(1.0)=%.6f' % result.computeCDF(1.0))
result = -1.0 * ot.WeibullMin() + ot.Exponential()
print('-WeibullMin+Exponential:', result)
print('result.CDF(1.0)=%.6f' % result.computeCDF(1.0))
result = ot.WeibullMin() - ot.Exponential()
print('WeibullMin-Exponential:', result)
print('result.CDF(1.0)=%.6f' % result.computeCDF(1.0))
result = -1.0 * ot.WeibullMin() - ot.Exponential()
print('-WeibullMin-Exponential:', result)
print('result.CDF(-1.0)=%.6f' % result.computeCDF(-1.0))

# 2-d
print(ot.Normal(2) + ot.Normal(2))
print(ot.Normal(2) + 3.0)
# In this example we are going to perform a visual goodness-of-fit test for an 1-d distribution with the QQ plot.

# %%
from __future__ import print_function
import openturns as ot
import openturns.viewer as viewer
from matplotlib import pylab as plt
ot.Log.Show(ot.Log.NONE)

# %%
# Create data
ot.RandomGenerator.SetSeed(0)
distribution = ot.Gumbel(0.2, 0.5)
sample = distribution.getSample(100)
sample.setDescription(['Sample'])

# %%
# Fit a distribution
distribution = ot.GumbelFactory().build(sample)

# %%
# Draw QQ plot
graph = ot.VisualTest.DrawQQplot(sample, distribution)
view = viewer.View(graph)

# %%
# Incorrect proposition
graph = ot.VisualTest.DrawQQplot(sample, ot.WeibullMin())
view = viewer.View(graph)
plt.show()
Beispiel #8
0
# %%
# In this example we are going to use distribution algebra and distribution transformation via functions.

# %%
import openturns as ot
import openturns.viewer as viewer
from matplotlib import pylab as plt
ot.Log.Show(ot.Log.NONE)

# %%
# We define some (classical) distributions :

# %%
distribution1 = ot.Uniform(0.0, 1.0)
distribution2 = ot.Uniform(0.0, 2.0)
distribution3 = ot.WeibullMin(1.5, 2.0)

# %%
# Sum & difference of distributions
# ---------------------------------
#
# It is easy to compute the sum of distributions. For example :

# %%
distribution = distribution1 + distribution2
print(distribution)
graph = distribution.drawPDF()
view = viewer.View(graph)

# %%
# We might also use substraction even with scalar values :
Beispiel #9
0
            coefficients[i] = 0.0
    return ot.UniVariatePolynomial(coefficients)


iMax = 5
distributionCollection = [
    ot.Laplace(0.0, 1.0),
    ot.Logistic(0.0, 1.0),
    ot.Normal(0.0, 1.0),
    ot.Normal(1.0, 1.0),
    ot.Rayleigh(1.0),
    ot.Student(22.0),
    ot.Triangular(-1.0, 0.3, 1.0),
    ot.Uniform(-1.0, 1.0),
    ot.Uniform(-1.0, 3.0),
    ot.WeibullMin(1.0, 3.0),
    ot.Beta(1.0, 2.0, -1.0, 1.0),
    ot.Beta(0.5, 0.5, -1.0, 1.0),
    ot.Beta(0.5, 0.5, -2.0, 3.0),
    ot.Gamma(1.0, 3.0),
    ot.Arcsine()
]
for n in range(len(distributionCollection)):
    distribution = distributionCollection[n]
    name = distribution.getClassName()
    polynomialFactory = ot.StandardDistributionPolynomialFactory(
        ot.AdaptiveStieltjesAlgorithm(distribution))
    print("polynomialFactory(", name, "=", polynomialFactory, ")")
    for i in range(iMax):
        print(name, " polynomial(", i, ")=", clean(polynomialFactory.build(i)))
    roots = polynomialFactory.getRoots(iMax - 1)
graph.setLegendPosition("topright")
view = viewer.View(graph)

# %%
# As expected the Silverman seriously overfit the data and the other rules are more to the point.

# %%
# Boundary corrections
# --------------------
#
# We finish this example on an advanced feature of the kernel smoothing, the boundary corrections.
#

# %%
# We consider a Weibull distribution :
myDist = ot.WeibullMin(2.0, 1.5, 1.0)

# %%
# We generate a sample from the defined distribution :
sample = myDist.getSample(2000)

# %%
# We draw the exact Weibull distribution :
graph = myDist.drawPDF()

# %%
# We use two different kernels :
#
# - a standard normal kernel
# - the same kernel with a boundary correction
#
Beispiel #11
0
import openturns as ot
from matplotlib import pyplot as plt
from openturns.viewer import View

pdf_graph = ot.Graph('PDF graph', 'x', 'PDF', True, 'topleft')
cdf_graph = ot.Graph('CDF graph', 'x', 'CDF', True, 'topleft')
palette = ot.Drawable.BuildDefaultPalette(10)
for i, p in enumerate([1.0,  2.0, 3.0]):
    distribution = ot.WeibullMin(1.0, p, 0.0)
    pdf_curve = distribution.drawPDF().getDrawable(0)
    cdf_curve = distribution.drawCDF().getDrawable(0)
    pdf_curve.setColor(palette[i])
    cdf_curve.setColor(palette[i])
    pdf_curve.setLegend('alpha={}'.format(p))
    cdf_curve.setLegend('alpha={}'.format(p))
    pdf_graph.add(pdf_curve)
    cdf_graph.add(cdf_curve)
fig = plt.figure(figsize=(10, 4))
pdf_axis = fig.add_subplot(121)
cdf_axis = fig.add_subplot(122)
View(pdf_graph, figure=fig, axes=[pdf_axis], add_legend=True)
View(cdf_graph, figure=fig, axes=[cdf_axis], add_legend=True)
fig.suptitle('WeibullMin(1,alpha,0)')