#! /usr/bin/env python

from __future__ import print_function
import openturns as ot

ot.TESTPREAMBLE()
ot.RandomGenerator.SetSeed(0)

# Instanciate one distribution object
distribution = ot.MaximumEntropyOrderStatisticsDistribution([
    ot.Trapezoidal(-2.0, -1.1, -1.0, 1.0),
    ot.LogUniform(1.0, 1.2),
    ot.Triangular(3.0, 4.5, 5.0),
    ot.Beta(2.5, 3.5, 4.7, 5.2)
])

dim = distribution.getDimension()
print("Distribution ", distribution)

# Is this distribution elliptical ?
print("Elliptical = ", distribution.isElliptical())

# Test for realization of distribution
oneRealization = distribution.getRealization()
print("oneRealization=", repr(oneRealization))

# Test for sampling
size = 10000
oneSample = distribution.getSample(size)
print("oneSample first=", repr(oneSample[0]), " last=",
      repr(oneSample[size - 1]))
Пример #2
0
elif ot.LogNormal().__class__.__name__ == 'ComposedDistribution':
    copula = ot.IndependentCopula(2)
    marginals = [ot.Uniform(1.0, 2.0), ot.Normal(2.0, 3.0)]
    distribution = ot.ComposedDistribution(marginals, copula)
elif ot.LogNormal().__class__.__name__ == 'CumulativeDistributionNetwork':
    coll = [ot.Normal(2),ot.Dirichlet([0.5, 1.0, 1.5])]
    distribution = ot.CumulativeDistributionNetwork(coll, ot.BipartiteGraph([[0,1], [0,1]]))
elif ot.LogNormal().__class__.__name__ == 'Histogram':
    distribution = ot.Histogram([-1.0, 0.5, 1.0, 2.0], [0.45, 0.4, 0.15])
elif ot.LogNormal().__class__.__name__ == 'KernelMixture':
    kernel = ot.Uniform()
    sample = ot.Normal().getSample(5)
    bandwith = [1.0]
    distribution = ot.KernelMixture(kernel, bandwith, sample)
elif ot.LogNormal().__class__.__name__ == 'MaximumDistribution':
    coll = [ot.Uniform(2.5, 3.5), ot.LogUniform(1.0, 1.2), ot.Triangular(2.0, 3.0, 4.0)]
    distribution = ot.MaximumDistribution(coll)
elif ot.LogNormal().__class__.__name__ == 'Multinomial':
    distribution = ot.Multinomial(5, [0.2])
elif ot.LogNormal().__class__.__name__ == 'RandomMixture':
    coll = [ot.Triangular(0.0, 1.0, 5.0), ot.Uniform(-2.0, 2.0)]
    weights = [0.8, 0.2]
    cst = 3.0
    distribution = ot.RandomMixture(coll, weights, cst)
elif ot.LogNormal().__class__.__name__ == 'TruncatedDistribution':
    distribution = ot.TruncatedDistribution(ot.Normal(2.0, 1.5), 1.0, 4.0)
elif ot.LogNormal().__class__.__name__ == 'UserDefined':
    distribution = ot.UserDefined([[0.0], [1.0], [2.0]], [0.2, 0.7, 0.1])
elif ot.LogNormal().__class__.__name__ == 'ZipfMandelbrot':
    distribution = ot.ZipfMandelbrot(10, 2.5, 0.3)
else:
Пример #3
0
print('dist1-dist2:', result)
graph = result.drawPDF()

result = dist1 * dist2
print('dist1*dist2:', result)
graph = result.drawPDF()

result = dist1 / dist2
print('dist1/dist2:', result)
# graph = result.drawPDF()

result = ot.LogNormal() * ot.LogNormal()
print('logn*logn:', result)
graph = result.drawPDF()

result = ot.LogUniform() * ot.LogUniform()
print('logu*logu:', result)
graph = result.drawPDF()

result = ot.LogUniform() * ot.LogNormal()
print('logu*logn:', result)
graph = result.drawPDF()

result = ot.LogNormal() * ot.LogUniform()
print('logn*logu:', result)
graph = result.drawPDF()

# For ticket #917
result = ot.Weibull() + ot.Exponential()
print('Weibull+Exponential:', result)
print('result.CDF(1.0)=%.6f' % result.computeCDF(1.0))
Пример #4
0
import openturns as ot
from matplotlib import pyplot as plt
from openturns.viewer import View
if ot.LogUniform().__class__.__name__ == 'ComposedDistribution':
    correlation = ot.CorrelationMatrix(2)
    correlation[1, 0] = 0.25
    aCopula = ot.NormalCopula(correlation)
    marginals = [ot.Normal(1.0, 2.0), ot.Normal(2.0, 3.0)]
    distribution = ot.ComposedDistribution(marginals, aCopula)
elif ot.LogUniform().__class__.__name__ == 'CumulativeDistributionNetwork':
    distribution = ot.CumulativeDistributionNetwork(
        [ot.Normal(2), ot.Dirichlet([0.5, 1.0, 1.5])],
        ot.BipartiteGraph([[0, 1], [0, 1]]))
elif ot.LogUniform().__class__.__name__ == 'Histogram':
    distribution = ot.Histogram([-1.0, 0.5, 1.0, 2.0], [0.45, 0.4, 0.15])
else:
    distribution = ot.LogUniform()
dimension = distribution.getDimension()
if dimension == 1:
    distribution.setDescription(['$x$'])
    pdf_graph = distribution.drawPDF()
    cdf_graph = distribution.drawCDF()
    fig = plt.figure(figsize=(10, 4))
    plt.suptitle(str(distribution))
    pdf_axis = fig.add_subplot(121)
    cdf_axis = fig.add_subplot(122)
    View(pdf_graph, figure=fig, axes=[pdf_axis], add_legend=False)
    View(cdf_graph, figure=fig, axes=[cdf_axis], add_legend=False)
elif dimension == 2:
    distribution.setDescription(['$x_1$', '$x_2$'])
    pdf_graph = distribution.drawPDF()
Пример #5
0
import openturns as ot
import numpy as np
import matplotlib.pyplot as plt
import math

distribution = ot.LogUniform(-1.0, 1.0)
sample = distribution.getSample(5)

#print(distribution.drawLogPDF(-1.0, 1.0, 10000 ))

Graph.viewer.View(distribution.drawLogPDF(-1.0, 1.0, 10000))
"""
plt.xscale("log")
plt.plot(distribution)
plt.show()
"""
import openturns as ot
from matplotlib import pyplot as plt
from openturns.viewer import View
if ot.LogUniform().__class__.__name__ == 'Bernoulli':
    distribution = ot.Bernoulli(0.7)
elif ot.LogUniform().__class__.__name__ == 'Binomial':
    distribution = ot.Binomial(5, 0.2)
elif ot.LogUniform().__class__.__name__ == 'ComposedDistribution':
    copula = ot.IndependentCopula(2)
    marginals = [ot.Uniform(1.0, 2.0), ot.Normal(2.0, 3.0)]
    distribution = ot.ComposedDistribution(marginals, copula)
elif ot.LogUniform().__class__.__name__ == 'CumulativeDistributionNetwork':
    coll = [ot.Normal(2), ot.Dirichlet([0.5, 1.0, 1.5])]
    distribution = ot.CumulativeDistributionNetwork(
        coll, ot.BipartiteGraph([[0, 1], [0, 1]]))
elif ot.LogUniform().__class__.__name__ == 'Histogram':
    distribution = ot.Histogram([-1.0, 0.5, 1.0, 2.0], [0.45, 0.4, 0.15])
elif ot.LogUniform().__class__.__name__ == 'KernelMixture':
    kernel = ot.Uniform()
    sample = ot.Normal().getSample(5)
    bandwith = [1.0]
    distribution = ot.KernelMixture(kernel, bandwith, sample)
elif ot.LogUniform().__class__.__name__ == 'MaximumDistribution':
    coll = [
        ot.Uniform(2.5, 3.5),
        ot.LogUniform(1.0, 1.2),
        ot.Triangular(2.0, 3.0, 4.0)
    ]
    distribution = ot.MaximumDistribution(coll)
elif ot.LogUniform().__class__.__name__ == 'Multinomial':
    distribution = ot.Multinomial(5, [0.2])
#! /usr/bin/env python

from __future__ import print_function
import openturns as ot

ot.TESTPREAMBLE()
ot.RandomGenerator.SetSeed(0)


def checkMarginals(coll):
    osmc = ot.OrderStatisticsMarginalChecker(coll)
    print("marginals=", coll)
    print("isCompatible=", osmc.isCompatible())
    print("partition=", osmc.buildPartition())

coll = [ot.Uniform(-1.0, 1.0), ot.LogUniform(1.0, 1.2),
        ot.Triangular(3.0, 4.0, 5.), ot.Uniform(5.0, 6.0), ot.Uniform(5.5, 6.5)]
checkMarginals(coll)
coll.append(ot.Uniform(0.0, 1.0))
checkMarginals(coll)
                                              instrumental)
RWMHsampler.setLikelihood(conditional, y_obs, model, x_obs)
print("Log-likelihood of thetaTrue = {!r}".format(
    RWMHsampler.computeLogLikelihood(thetaTrue)))
# produces an error with current master branch
real_504 = RWMHsampler.getRealization()
print("With 504 observations, getRealization() produces {!r}".format(
    real_504[0]))
ott.assert_almost_equal(real_504[0], 2.0)

# this example throws in ot 1.19 as it tries to evaluate the likelihood outside the support of the prior
# see MetropolisHastingsImplementation::computeLogPosterior
obs = ot.TruncatedNormal(0.5, 0.5, 0.0, 10.0).getSample(50)
likelihood = ot.GeneralizedPareto()
prior = ot.ComposedDistribution(
    [ot.LogUniform(-1.40, 4.0),
     ot.Normal(), ot.Normal()])
proposals = [
    ot.Uniform(-prior.getMarginal(k).getStandardDeviation()[0],
               +prior.getMarginal(k).getStandardDeviation()[0])
    for k in range(prior.getDimension())
]
initialState = prior.getMean()
mh_coll = [
    ot.RandomWalkMetropolisHastings(prior, initialState, proposals[i], [i])
    for i in range(2)
]
for mh in mh_coll:
    mh.setLikelihood(likelihood, obs)
sampler = ot.Gibbs(mh_coll)
parameters_sample = sampler.getSample(200)
Пример #9
0
import openturns as ot
from matplotlib import pyplot as plt
from openturns.viewer import View
if (ot.LogUniform().__class__.__name__ == 'ComposedDistribution'):
    correlation = ot.CorrelationMatrix(2)
    correlation[1, 0] = 0.25
    aCopula = ot.NormalCopula(correlation)
    marginals = [ot.Normal(1.0, 2.0), ot.Normal(2.0, 3.0)]
    distribution = ot.ComposedDistribution(marginals, aCopula)
elif (ot.LogUniform().__class__.__name__ == 'CumulativeDistributionNetwork'):
    distribution = ot.CumulativeDistributionNetwork(
        [ot.Normal(2), ot.Dirichlet([0.5, 1.0, 1.5])],
        ot.BipartiteGraph([[0, 1], [0, 1]]))
else:
    distribution = ot.LogUniform()
dimension = distribution.getDimension()
if dimension <= 2:
    if distribution.getDimension() == 1:
        distribution.setDescription(['$x$'])
        pdf_graph = distribution.drawPDF()
        cdf_graph = distribution.drawCDF()
        fig = plt.figure(figsize=(10, 4))
        plt.suptitle(str(distribution))
        pdf_axis = fig.add_subplot(121)
        cdf_axis = fig.add_subplot(122)
        View(pdf_graph, figure=fig, axes=[pdf_axis], add_legend=False)
        View(cdf_graph, figure=fig, axes=[cdf_axis], add_legend=False)
    else:
        distribution.setDescription(['$x_1$', '$x_2$'])
        pdf_graph = distribution.drawPDF()
        fig = plt.figure(figsize=(10, 5))