def fisher_triangular_inv(c, a, b):
    grid = np.linspace(a, b, 100)
    f = ot.Triangular(a, c, b)
    val = [
        f.computeLogPDFGradient([t])[1]**2 * f.computePDF([t]) for t in grid
    ]
    fi = spi.simps(val, grid)
    return 1 / fi
Beispiel #2
0
#
# The weigths are automatically normalized.
#
# It is also possible to create a mixture of copulas.

# %%
from __future__ import print_function
import openturns as ot
import openturns.viewer as viewer
from matplotlib import pylab as plt
ot.Log.Show(ot.Log.NONE)

# %%
# create a collection of distribution and the associated weights
distributions = [
    ot.Triangular(1.0, 2.0, 4.0),
    ot.Normal(-1.0, 1.0),
    ot.Uniform(5.0, 6.0)
]
weights = [0.4, 1.0, 0.2]

# %%
# create the mixture
distribution = ot.Mixture(distributions, weights)
print(distribution)

# %%
# draw PDF
graph = distribution.drawPDF()
view = viewer.View(graph)
#! /usr/bin/env python

from __future__ import print_function
import openturns as ot

ot.TESTPREAMBLE()
ot.RandomGenerator.SetSeed(0)

# Instanciate one distribution object
distribution = ot.MaximumEntropyOrderStatisticsDistribution([
    ot.Trapezoidal(-2.0, -1.1, -1.0, 1.0),
    ot.LogUniform(1.0, 1.2),
    ot.Triangular(3.0, 4.5, 5.0),
    ot.Beta(2.5, 3.5, 4.7, 5.2)
])

dim = distribution.getDimension()
print("Distribution ", distribution)

# Is this distribution elliptical ?
print("Elliptical = ", distribution.isElliptical())

# Test for realization of distribution
oneRealization = distribution.getRealization()
print("oneRealization=", repr(oneRealization))

# Test for sampling
size = 10000
oneSample = distribution.getSample(size)
print("oneSample first=", repr(oneSample[0]), " last=",
      repr(oneSample[size - 1]))
# %%
from __future__ import print_function
import openturns as ot
import matplotlib.pyplot as plt
ot.RandomGenerator.SetSeed(0)
ot.Log.Show(ot.Log.NONE)

# %%
# Create an arma process

tMin = 0.0
n = 1000
timeStep = 0.1
myTimeGrid = ot.RegularGrid(tMin, timeStep, n)

myWhiteNoise = ot.WhiteNoise(ot.Triangular(-1.0, 0.0, 1.0), myTimeGrid)
myARCoef = ot.ARMACoefficients([0.4, 0.3, 0.2, 0.1])
myMACoef = ot.ARMACoefficients([0.4, 0.3])
arma = ot.ARMA(myARCoef, myMACoef, myWhiteNoise)

tseries = ot.TimeSeries(arma.getRealization())

# Create a sample of N time series from the process
N = 100
sample = arma.getSample(N)

# %%
# CASE 1 : we specify a (p,q) order

# Specify the order (p,q)
p = 4
Beispiel #5
0
distribution = ot.Normal(2)
ott.assert_almost_equal(distribution.getRoughness(),
                        compute_roughness_sampling(distribution))

# 2D Normal with scale & correlation
# This allows checking that Normal::getRoughness is well implemented
corr = ot.CorrelationMatrix(2)
corr[1, 0] = 0.3
distribution = ot.Normal([0, 0], [1, 2], corr)
ott.assert_almost_equal(distribution.getRoughness(),
                        compute_roughness_sampling(distribution))

distribution = ot.Epanechnikov()
ott.assert_almost_equal(distribution.getRoughness(), 3/5)

distribution = ot.Triangular()
ott.assert_almost_equal(distribution.getRoughness(), 2/3)

distribution = ot.Distribution(Quartic())
ott.assert_almost_equal(distribution.getRoughness(), 5/7)

# Testing Histogram ==> getSingularities
distribution = ot.HistogramFactory().buildAsHistogram(ot.Uniform(0, 1).getSample(100000))
ott.assert_almost_equal(distribution.getRoughness(), 1.0, 5e-4, 1e-5)
# Compute the roughness using width and height
width = distribution.getWidth()
height = distribution.getHeight()
roughness = sum([width[i] * height[i]**2 for i in range(len(height))])
ott.assert_almost_equal(distribution.getRoughness(), roughness)

# Large dimension with independent copula
Beispiel #6
0
=============================
"""
# %%
# In this example we are going to create a deterministic weighted design experiment using Gauss product.

# %%
import openturns as ot
import openturns.viewer as viewer
from matplotlib import pylab as plt

ot.Log.Show(ot.Log.NONE)

# %%
# Define the underlying distribution, degrees
distribution = ot.ComposedDistribution(
    [ot.Exponential(), ot.Triangular(-1.0, -0.5, 1.0)])
marginalSizes = [15, 8]

# %%
# Create the design
experiment = ot.GaussProductExperiment(distribution, marginalSizes)
sample = experiment.generate()

# %%
# Plot the design
graph = ot.Graph("GP design", "x1", "x2", True, "")
cloud = ot.Cloud(sample, "blue", "fsquare", "")
graph.add(cloud)
view = viewer.View(graph)
plt.show()
Beispiel #7
0
#! /usr/bin/env python

import openturns as ot

ot.TESTPREAMBLE()
ot.RandomGenerator.SetSeed(0)

distribution = ot.Triangular(1.0, 2.5, 4.0)
size = 10000
sample = distribution.getSample(size)
factory = ot.TriangularFactory()
estimatedDistribution = factory.build(sample)
print("distribution=", repr(distribution))
print("Estimated distribution=", repr(estimatedDistribution))
estimatedDistribution = factory.build()
print("Default distribution=", estimatedDistribution)
estimatedDistribution = factory.build(
    distribution.getParameter())
print("Distribution from parameters=", estimatedDistribution)
estimatedTriangular = factory.buildAsTriangular(sample)
print("Triangular          =", distribution)
print("Estimated triangular=", estimatedTriangular)
estimatedTriangular = factory.buildAsTriangular()
print("Default triangular=", estimatedTriangular)
estimatedTriangular = factory.buildAsTriangular(
    distribution.getParameter())
print("Triangular from parameters=", estimatedTriangular)
Beispiel #8
0
# %%
from __future__ import print_function
import openturns as ot
import math as m
ot.Log.Show(ot.Log.NONE)

# %%
# generate some multivariate data to estimate, with correlation
cop1 = ot.AliMikhailHaqCopula(0.6)
cop2 = ot.ClaytonCopula(2.5)
copula = ot.ComposedCopula([cop1, cop2])
marginals = [
    ot.Uniform(5.0, 6.0),
    ot.Arcsine(),
    ot.Normal(-40.0, 3.0),
    ot.Triangular(100.0, 150.0, 300.0)
]
distribution = ot.ComposedDistribution(marginals, copula)
sample = distribution.getSample(10000).getMarginal([0, 2, 3, 1])

# %%
# estimate marginals
dimension = sample.getDimension()
marginalFactories = []
for factory in ot.DistributionFactory.GetContinuousUniVariateFactories():
    if str(factory).startswith('Histogram'):
        # ~ non-parametric
        continue
    marginalFactories.append(factory)
estimated_marginals = [
    ot.FittingTest.BestModelBIC(sample.getMarginal(i), marginalFactories)[0]
Beispiel #9
0
    coefficients = polynomial.getCoefficients()
    for i in range(coefficients.getDimension()):
        if abs(coefficients[i]) < 1.0e-12:
            coefficients[i] = 0.0
    return ot.UniVariatePolynomial(coefficients)


iMax = 5
distributionCollection = [
    ot.Laplace(1.0, 0.0),
    ot.Logistic(0.0, 1.0),
    ot.Normal(0.0, 1.0),
    ot.Normal(1.0, 1.0),
    ot.Rayleigh(1.0),
    ot.Student(22.0),
    ot.Triangular(-1.0, 0.3, 1.0),
    ot.Uniform(-1.0, 1.0),
    ot.Uniform(-1.0, 3.0),
    ot.Weibull(1.0, 3.0),
    ot.Beta(1.0, 3.0, -1.0, 1.0),
    ot.Beta(0.5, 1.0, -1.0, 1.0),
    ot.Beta(0.5, 1.0, -2.0, 3.0),
    ot.Gamma(1.0, 3.0),
    ot.Arcsine()
]
for n in range(len(distributionCollection)):
    distribution = distributionCollection[n]
    name = distribution.getClassName()
    polynomialFactory = ot.StandardDistributionPolynomialFactory(
        ot.AdaptiveStieltjesAlgorithm(distribution))
    print("polynomialFactory(", name, "=", polynomialFactory, ")")
Beispiel #10
0
    return [H, S, C]


myFunction = ot.PythonFunction(8, 3, functionCrue)

# 2. Create the Input and Output random variables
myParam = ot.GumbelAB(1013., 558.)
QGumbel = ot.ParametrizedDistribution(myParam)
Q = ot.TruncatedDistribution(QGumbel, 0, ot.TruncatedDistribution.LOWER)
KsNormal = ot.Normal(30.0, 7.5)
Ks = ot.TruncatedDistribution(KsNormal, 0, ot.TruncatedDistribution.LOWER)
Zv = ot.Uniform(49.0, 51.0)
Zm = ot.Uniform(54.0, 56.0)
#
Hd = ot.Uniform(7., 9.)  # Hd = 3.0;
Zb = ot.Triangular(55.0, 55.5, 56.0)  # Zb = 55.5
L = ot.Triangular(4990, 5000., 5010.)  # L = 5.0e3;
B = ot.Triangular(295., 300., 305.)  # B = 300.0

Q.setDescription(["Q (m3/s)"])
Ks.setDescription(["Ks (m^(1/3)/s)"])
Zv.setDescription(["Zv (m)"])
Zm.setDescription(["Zm (m)"])
Hd.setDescription(["Hd (m)"])
Zb.setDescription(["Zb (m)"])
L.setDescription(["L (m)"])
B.setDescription(["B (m)"])

# 3. Create the joint distribution
inputDistribution = ot.ComposedDistribution((Q, Ks, Zv, Zm, Hd, Zb, L, B))
inputRandomVector = ot.RandomVector(inputDistribution)
import openturns as ot
from matplotlib import pyplot as plt
from openturns.viewer import View
if ot.Triangular().__class__.__name__ == 'ComposedDistribution':
    correlation = ot.CorrelationMatrix(2)
    correlation[1, 0] = 0.25
    aCopula = ot.NormalCopula(correlation)
    marginals = [ot.Normal(1.0, 2.0), ot.Normal(2.0, 3.0)]
    distribution = ot.ComposedDistribution(marginals, aCopula)
elif ot.Triangular().__class__.__name__ == 'CumulativeDistributionNetwork':
    distribution = ot.CumulativeDistributionNetwork(
        [ot.Normal(2), ot.Dirichlet([0.5, 1.0, 1.5])],
        ot.BipartiteGraph([[0, 1], [0, 1]]))
elif ot.Triangular().__class__.__name__ == 'Histogram':
    distribution = ot.Histogram([-1.0, 0.5, 1.0, 2.0], [0.45, 0.4, 0.15])
else:
    distribution = ot.Triangular()
dimension = distribution.getDimension()
if dimension == 1:
    distribution.setDescription(['$x$'])
    pdf_graph = distribution.drawPDF()
    cdf_graph = distribution.drawCDF()
    fig = plt.figure(figsize=(10, 4))
    plt.suptitle(str(distribution))
    pdf_axis = fig.add_subplot(121)
    cdf_axis = fig.add_subplot(122)
    View(pdf_graph, figure=fig, axes=[pdf_axis], add_legend=False)
    View(cdf_graph, figure=fig, axes=[cdf_axis], add_legend=False)
elif dimension == 2:
    distribution.setDescription(['$x_1$', '$x_2$'])
    pdf_graph = distribution.drawPDF()
Beispiel #12
0
margins = distribution.getMarginal(indices)
print("margins=", margins)
print("margins PDF=%.5f" % margins.computePDF(point))
print("margins CDF=%.5f" % margins.computeCDF(point))
quantile = margins.computeQuantile(0.95)
print("margins quantile=", quantile)
print("margins CDF(quantile)=%.5f" % margins.computeCDF(quantile))
print("margins realization=", margins.getRealization())
# Tests o the isoprobabilistic transformation
# General case with normal standard distribution
print("isoprobabilistic transformation (general normal)=",
      distribution.getIsoProbabilisticTransformation())
# General case with non-normal standard distribution
collection[0] = ot.SklarCopula(ot.Student(
    3.0, [1.0]*2, [3.0]*2, ot.CorrelationMatrix(2)))
collection.append(ot.Triangular(2.0, 3.0, 4.0))
distribution = ot.BlockIndependentDistribution(collection)
print("isoprobabilistic transformation (general non-normal)=",
      distribution.getIsoProbabilisticTransformation())
dim = distribution.getDimension()
x = 0.6
y = [0.2] * (dim - 1)
print("conditional PDF=%.5f" % distribution.computeConditionalPDF(x, y))
print("conditional CDF=%.5f" % distribution.computeConditionalCDF(x, y))
print("conditional quantile=%.5f" %
      distribution.computeConditionalQuantile(x, y))
pt = ot.Point(dim)
for i in range(dim):
    pt[i] = 0.1 * i + 0.05
print("sequential conditional PDF=",
      distribution.computeSequentialConditionalPDF(pt))
#! /usr/bin/env python

from __future__ import print_function
import openturns as ot

ot.TESTPREAMBLE()
ot.RandomGenerator.SetSeed(0)


def checkMarginals(coll):
    osmc = ot.OrderStatisticsMarginalChecker(coll)
    print("marginals=", coll)
    print("isCompatible=", osmc.isCompatible())
    print("partition=", osmc.buildPartition())

coll = [ot.Uniform(-1.0, 1.0), ot.LogUniform(1.0, 1.2),
        ot.Triangular(3.0, 4.0, 5.), ot.Uniform(5.0, 6.0), ot.Uniform(5.5, 6.5)]
checkMarginals(coll)
coll.append(ot.Uniform(0.0, 1.0))
checkMarginals(coll)
     "round": 4,
 },
 "shipping_time": {
     "marg": ot.Poisson(3.0),  # mean is 3 days
     "corr": -0.3,  # The longer, the less CR
     "bounds": [1, 14],
     "round": 4,
 },
 "nb_rating": {
     "marg": ot.Geometric(0.02),  # mean is 50
     "corr": 0.2,  # The more the nb of rating, the better trust
     "bounds": None,
     "round": 0,  # Already an integer (casted as double)
 },
 "avg_rating": {
     "marg": ot.Triangular(1.0, 4.0, 5.0),  # mode is 4.0
     "corr": 0.25,  # The better rating, the better the CR
     "bounds": None,
     "round": 2,
 },
 "nb_provider_rating": {
     "marg": ot.Geometric(0.001),  # mean is 1000
     "corr": 0.08,  # Weak positive correlation
     "bounds": None,
     "round": 0,  # Already an integer (casted as double)
 },
 "avg_provider_rating": {
     "marg": ot.Triangular(2.5, 4.0, 4.8),  # mode is 4.0
     "corr": 0.01,  # Weak positive correlation
     "bounds": None,
     "round": 2,
from __future__ import print_function
import openturns as ot
import math as m

# ot.Log.Show(ot.Log.ALL)

coll = []

# case 1: no transformation
coll.append([ot.Normal(), ot.Normal()])

# case 2: same copula
left = ot.ComposedDistribution([ot.Normal(), ot.Gumbel()],
                               ot.IndependentCopula(2))
right = ot.ComposedDistribution([ot.Triangular()] * 2, ot.IndependentCopula(2))
coll.append([left, right])

# case 3: same standard space
left = ot.ComposedDistribution([ot.Normal(), ot.Gumbel()],
                               ot.IndependentCopula(2))
right = ot.ComposedDistribution([ot.Triangular()] * 2, ot.GumbelCopula())
coll.append([left, right])

# TODO case 4: different standard space

for left, right in coll:
    transformation = ot.DistributionTransformation(left, right)
    print('left=', left)
    print('right=', right)
    print('transformation=', transformation)
#! /usr/bin/env python

import openturns as ot

ot.TESTPREAMBLE()
ot.RandomGenerator.SetSeed(0)


def checkMarginals(coll):
    osmc = ot.OrderStatisticsMarginalChecker(coll)
    print("marginals=", coll)
    print("isCompatible=", osmc.isCompatible())
    print("partition=", osmc.buildPartition())


coll = [
    ot.Uniform(-1.0, 1.0),
    ot.LogUniform(1.0, 1.2),
    ot.Triangular(3.0, 4.0, 5.),
    ot.Uniform(5.0, 6.0),
    ot.Uniform(5.5, 6.5)
]
checkMarginals(coll)
coll.append(ot.Uniform(0.0, 1.0))
checkMarginals(coll)
Beispiel #17
0
# moisture has 2 attributes : Dry and Wet
moisture.addLabel("Dry")
moisture.addLabel("Wet")

# height is a discretized variable
[height.addTick(i) for i in range(0, 150, 10)]
height.domainSize()

# height has a conditional probability table
# We give here its conditional distributions
# We use some OT distributions

# distribution when Dim and Dry
heightWhenDimAndDry = ot.Uniform(0.0, 20.0)
# distribution when Dim and Wet
heightWhenDimAndWet = ot.Triangular(15.0, 30.0, 50.0)
# distribution when Bright and Dry
heightWhenBrightAndDry = ot.Triangular(0.0, 15.0, 30.0)
# distribution when Bright and Wet
heightWhenBrightAndWet = ot.Normal(90.0, 10.0)

## Create the net
bn = gum.BayesNet("Plant Growth")

# Add variables
indexLight = bn.add(light)
indexMoisture = bn.add(moisture)
indexHeight = bn.add(height)

# Add arcs
bn.addArc(indexLight, indexMoisture)
Beispiel #18
0
#
# - graphically if :math:`\underline{X}` is of dimension 1, by drawing the residual couples (:math:`\varepsilon_i, \varepsilon_{i+1}`), where the residual :math:`\varepsilon_i` is evaluated on the samples :math:`(X, Y)`.
# - numerically with the LinearModelResidualMean Test which tests, under the hypothesis of a gaussian sample, if the mean of the residual is equal to zero. It is based on the Student test (equality of mean for two gaussian samples).
#

# %%
from __future__ import print_function
import openturns as ot
import openturns.viewer as viewer
from matplotlib import pylab as plt
ot.Log.Show(ot.Log.NONE)

# %%
# Generate X,Y samples
N = 1000
Xsample = ot.Triangular(1.0, 5.0, 10.0).getSample(N)
Ysample = Xsample * 3.0 + ot.Normal(0.5, 1.0).getSample(N)

# %%
# Generate a particular scalar sampleX
particularXSample = ot.Triangular(1.0, 5.0, 10.0).getSample(N)

# %%
# Create the linear model from Y,X samples
result = ot.LinearModelAlgorithm(Xsample, Ysample).getResult()

# Get the coefficients ai
print("coefficients of the linear regression model = ",
      result.getCoefficients())

# Get the confidence intervals of the ai coefficients
Beispiel #19
0
# %%
import openturns as ot
import openturns.viewer as viewer
from matplotlib import pylab as plt
ot.Log.Show(ot.Log.NONE)

# %%
# Create processes to aggregate
myMesher = ot.IntervalMesher([100, 10])
lowerbound = [0.0, 0.0]
upperBound = [2.0, 4.0]
myInterval = ot.Interval(lowerbound, upperBound)
myMesh = myMesher.build(myInterval)
myProcess1 = ot.WhiteNoise(ot.Normal(), myMesh)
myProcess2 = ot.WhiteNoise(ot.Triangular(), myMesh)

# %%
# Draw values of a realization of the 2nd process
marginal = ot.HistogramFactory().build(myProcess1.getRealization().getValues())
graph = marginal.drawPDF()
view = viewer.View(graph)

# %%
# Create an aggregated process
myAggregatedProcess = ot.AggregatedProcess([myProcess1, myProcess2])

# %%
# Draw values of the realization on the 2nd marginal
marginal = ot.HistogramFactory().build(
    myAggregatedProcess.getRealization().getValues().getMarginal(0))
import math as m

ot.Log.Show(ot.Log.NONE)

# %%
# Create an ARMA process

# Create the mesh
tMin = 0.
time_step = 0.1
n = 100
time_grid = ot.RegularGrid(tMin, time_step, n)

# Create the distribution of dimension 1 or 3
# Care : the mean must be NULL
myDist_1 = ot.Triangular(-1., 0.0, 1.)

# Create  a white noise of dimension 1
myWN_1d = ot.WhiteNoise(myDist_1, time_grid)

# Create the ARMA model : ARMA(4,2) in dimension 1
myARCoef = ot.ARMACoefficients([0.4, 0.3, 0.2, 0.1])
myMACoef = ot.ARMACoefficients([0.4, 0.3])
arma = ot.ARMA(myARCoef, myMACoef, myWN_1d)

# %%
# Check the linear recurrence
arma

# %%
# Get the coefficients of the recurrence
Beispiel #21
0
dim = 2
meanPoint = [0.5, -0.5]
sigma = [2.0, 3.0]
R = ot.CorrelationMatrix(dim)
for i in range(1, dim):
    R[i, i - 1] = 0.5

distribution = ot.Normal(meanPoint, sigma, R)
discretization = 100
kernel = ot.KernelSmoothing()
sample = distribution.getSample(discretization)
kernels = ot.DistributionCollection(0)
kernels.add(ot.Normal())
kernels.add(ot.Epanechnikov())
kernels.add(ot.Uniform())
kernels.add(ot.Triangular())
kernels.add(ot.Logistic())
kernels.add(ot.Beta(2.0, 2.0, -1.0, 1.0))
kernels.add(ot.Beta(3.0, 3.0, -1.0, 1.0))
meanExact = distribution.getMean()
covarianceExact = distribution.getCovariance()
for i in range(kernels.getSize()):
    kernel = kernels[i]
    print("kernel=", kernel.getName())
    smoother = ot.KernelSmoothing(kernel)
    smoothed = smoother.build(sample)
    bw = smoother.getBandwidth()
    print("kernel bandwidth=[ %.6g" % bw[0], ",  %.6g" % bw[1], "]")
    meanSmoothed = smoothed.getMean()
    print("mean(smoothed)=[ %.6g" % meanSmoothed[0],
          ",  %.6g" % meanSmoothed[1], "] mean(exact)=[", meanExact[0], ", ",
# - to draw some curves

# %%
from __future__ import print_function
import openturns as ot
import openturns.viewer as viewer
from matplotlib import pylab as plt
ot.Log.Show(ot.Log.NONE)

# %%
# Create an 1-d distribution
dist_1 = ot.Normal()

# Create a 2-d distribution
dist_2 = ot.ComposedDistribution(
    [ot.Normal(), ot.Triangular(0.0, 2.0, 3.0)], ot.ClaytonCopula(2.3))

# Create a 3-d distribution
copula_dim3 = ot.Student(5.0, 3).getCopula()
dist_3 = ot.ComposedDistribution([ot.Normal(), ot.Triangular(
    0.0, 2.0, 3.0), ot.Exponential(0.2)], copula_dim3)

# %%
# Get the dimension fo the distribution
dist_2.getDimension()

# %%
# Get the 2nd marginal
dist_2.getMarginal(1)

# %%
Beispiel #23
0
    Zm = X[3]  # m
    Hd = 0.  # m
    Zb = 55.5  # m
    S = Zv + (Q / (Ks * B * m.sqrt((Zm - Zv) / L)))**(3. / 5) - (Hd + Zb)
    return [S]


function = ot.PythonFunction(dim, 1, flood_model)

Q_law = ot.TruncatedDistribution(
    ot.Gumbel(1. / 558., 1013., ot.Gumbel.ALPHABETA), 0.,
    ot.TruncatedDistribution.LOWER)
# alpha=1/b, beta=a | you can use Gumbel(a, b, Gumbel.AB) starting from OT 1.2
Ks_law = ot.TruncatedDistribution(ot.Normal(30.0, 7.5), 0.,
                                  ot.TruncatedDistribution.LOWER)
Zv_law = ot.Triangular(49., 50., 51.)
Zm_law = ot.Triangular(54., 55., 56.)
coll = ot.DistributionCollection([Q_law, Ks_law, Zv_law, Zm_law])
distribution = ot.ComposedDistribution(coll)

x = list(map(lambda dist: dist.computeQuantile(0.5)[0], coll))
fx = function(x)

for k in [0.0, 2.0, 5.0, 8.][0:1]:
    randomVector = ot.RandomVector(distribution)
    composite = ot.RandomVector(function, randomVector)

    print('--------------------')
    print('model flood S <', k, 'gamma=', end=' ')
    print('f(', ot.NumericalPoint(x), ')=', fx)
Beispiel #24
0
import openturns as ot
from openturns.viewer import View

N = 1000
#create a sample X
dist = ot.Triangular(1.0, 5.0, 10.0)
# create a Y sample : Y = 0.5 + 3 * X + eps
eps = ot.Normal(0.0, 1.0)
sample = ot.ComposedDistribution([dist, eps]).getSample(N)
f = ot.SymbolicFunction(['x', 'eps'], ['0.5+3.0*x+eps'])
sampleY = f(sample)
sampleX = sample.getMarginal(0)
sampleX.setName('X')
# Fit this linear model
factory = ot.LinearModelFactory()
regressionModel = factory.build(sampleX, sampleY, 0.9)
# Test the linear model fitting
graph = ot.VisualTest.DrawLinearModel(sampleX, sampleY, regressionModel)
cloud = graph.getDrawable(0)
cloud.setPointStyle('times')
graph.setDrawable(cloud, 0)
graph.setTitle('')
View(graph)
elif ot.VonMises().__class__.__name__ == 'CumulativeDistributionNetwork':
    coll = [ot.Normal(2), ot.Dirichlet([0.5, 1.0, 1.5])]
    distribution = ot.CumulativeDistributionNetwork(
        coll, ot.BipartiteGraph([[0, 1], [0, 1]]))
elif ot.VonMises().__class__.__name__ == 'Histogram':
    distribution = ot.Histogram([-1.0, 0.5, 1.0, 2.0], [0.45, 0.4, 0.15])
elif ot.VonMises().__class__.__name__ == 'KernelMixture':
    kernel = ot.Uniform()
    sample = ot.Normal().getSample(5)
    bandwith = [1.0]
    distribution = ot.KernelMixture(kernel, bandwith, sample)
elif ot.VonMises().__class__.__name__ == 'MaximumDistribution':
    coll = [
        ot.Uniform(2.5, 3.5),
        ot.LogUniform(1.0, 1.2),
        ot.Triangular(2.0, 3.0, 4.0)
    ]
    distribution = ot.MaximumDistribution(coll)
elif ot.VonMises().__class__.__name__ == 'Multinomial':
    distribution = ot.Multinomial(5, [0.2])
elif ot.VonMises().__class__.__name__ == 'RandomMixture':
    coll = [ot.Triangular(0.0, 1.0, 5.0), ot.Uniform(-2.0, 2.0)]
    weights = [0.8, 0.2]
    cst = 3.0
    distribution = ot.RandomMixture(coll, weights, cst)
elif ot.VonMises().__class__.__name__ == 'TruncatedDistribution':
    distribution = ot.TruncatedDistribution(ot.Normal(2.0, 1.5), 1.0, 4.0)
elif ot.VonMises().__class__.__name__ == 'UserDefined':
    distribution = ot.UserDefined([[0.0], [1.0], [2.0]], [0.2, 0.7, 0.1])
elif ot.VonMises().__class__.__name__ == 'ZipfMandelbrot':
    distribution = ot.ZipfMandelbrot(10, 2.5, 0.3)
Beispiel #26
0
import openturns as ot
from math import sqrt, pi, exp, log

ot.TESTPREAMBLE()
ot.RandomGenerator.SetSeed(0)
ot.ResourceMap.SetAsUnsignedInteger("RandomMixture-DefaultMaxSize", 4000000)
# Deactivate the simplification mechanism as we want to test the Poisson formula
# based algorithm here
ot.ResourceMap.SetAsBool("RandomMixture-SimplifyAtoms", False)

# Create a collection of test-cases and the associated references
numberOfTests = 3
testCases = list()
references = list()
testCases.append([ot.Uniform(-1.0, 3.0)] * 2)
references.append(ot.Triangular(-2.0, 2.0, 6.0))
testCases.append([ot.Normal(), ot.Normal(1.0, 2.0), ot.Normal(-2.0, 2.0)])
references.append(ot.Normal(-1.0, 3.0))
testCases.append([ot.Exponential()] * 3)
references.append(ot.Gamma(3.0, 1.0, 0.0))
print("testCases=", testCases)
print("references=", references)
for testIndex in range(len(testCases)):
    # Instanciate one distribution object
    distribution = ot.RandomMixture(testCases[testIndex])
    distribution.setBlockMin(5)
    distribution.setBlockMax(20)
    distributionReference = references[testIndex]
    print("Distribution ", repr(distribution))
    print("Distribution ", distribution)
# - to draw some curves

# %%
from __future__ import print_function
import openturns as ot
import openturns.viewer as viewer
from matplotlib import pylab as plt
ot.Log.Show(ot.Log.NONE)

# %%
# Create an 1-d distribution
dist_1 = ot.Normal()

# Create a 2-d distribution
dist_2 = ot.ComposedDistribution(
    [ot.Normal(), ot.Triangular(0.0, 2.0, 3.0)], ot.ClaytonCopula(2.3))

# Create a 3-d distribution
copula_dim3 = ot.Student(5.0, 3).getCopula()
dist_3 = ot.ComposedDistribution(
    [ot.Normal(),
     ot.Triangular(0.0, 2.0, 3.0),
     ot.Exponential(0.2)], copula_dim3)

# %%
# Get the dimension fo the distribution
dist_2.getDimension()

# %%
# Get the 2nd marginal
dist_2.getMarginal(1)
Beispiel #28
0
distributionCollection.add(logistic)
continuousDistributionCollection.add(logistic)

normal = ot.Normal(1.0, 2.0)
distributionCollection.add(normal)
continuousDistributionCollection.add(normal)

truncatednormal = ot.TruncatedNormal(1.0, 1.0, 0.0, 3.0)
distributionCollection.add(truncatednormal)
continuousDistributionCollection.add(truncatednormal)

student = ot.Student(10.0, 10.0)
distributionCollection.add(student)
continuousDistributionCollection.add(student)

triangular = ot.Triangular(-1.0, 2.0, 4.0)
distributionCollection.add(triangular)
continuousDistributionCollection.add(triangular)

uniform = ot.Uniform(1.0, 2.0)
distributionCollection.add(uniform)
continuousDistributionCollection.add(uniform)

weibull = ot.WeibullMin(1.0, 1.0, 2.0)
distributionCollection.add(weibull)
continuousDistributionCollection.add(weibull)

geometric = ot.Geometric(0.5)
distributionCollection.add(geometric)
discreteDistributionCollection.add(geometric)
Beispiel #29
0
elif ot.LogNormal().__class__.__name__ == 'ComposedDistribution':
    copula = ot.IndependentCopula(2)
    marginals = [ot.Uniform(1.0, 2.0), ot.Normal(2.0, 3.0)]
    distribution = ot.ComposedDistribution(marginals, copula)
elif ot.LogNormal().__class__.__name__ == 'CumulativeDistributionNetwork':
    coll = [ot.Normal(2),ot.Dirichlet([0.5, 1.0, 1.5])]
    distribution = ot.CumulativeDistributionNetwork(coll, ot.BipartiteGraph([[0,1], [0,1]]))
elif ot.LogNormal().__class__.__name__ == 'Histogram':
    distribution = ot.Histogram([-1.0, 0.5, 1.0, 2.0], [0.45, 0.4, 0.15])
elif ot.LogNormal().__class__.__name__ == 'KernelMixture':
    kernel = ot.Uniform()
    sample = ot.Normal().getSample(5)
    bandwith = [1.0]
    distribution = ot.KernelMixture(kernel, bandwith, sample)
elif ot.LogNormal().__class__.__name__ == 'MaximumDistribution':
    coll = [ot.Uniform(2.5, 3.5), ot.LogUniform(1.0, 1.2), ot.Triangular(2.0, 3.0, 4.0)]
    distribution = ot.MaximumDistribution(coll)
elif ot.LogNormal().__class__.__name__ == 'Multinomial':
    distribution = ot.Multinomial(5, [0.2])
elif ot.LogNormal().__class__.__name__ == 'RandomMixture':
    coll = [ot.Triangular(0.0, 1.0, 5.0), ot.Uniform(-2.0, 2.0)]
    weights = [0.8, 0.2]
    cst = 3.0
    distribution = ot.RandomMixture(coll, weights, cst)
elif ot.LogNormal().__class__.__name__ == 'TruncatedDistribution':
    distribution = ot.TruncatedDistribution(ot.Normal(2.0, 1.5), 1.0, 4.0)
elif ot.LogNormal().__class__.__name__ == 'UserDefined':
    distribution = ot.UserDefined([[0.0], [1.0], [2.0]], [0.2, 0.7, 0.1])
elif ot.LogNormal().__class__.__name__ == 'ZipfMandelbrot':
    distribution = ot.ZipfMandelbrot(10, 2.5, 0.3)
else:
distribution = []
lower = constraints.Lower()
upper = constraints.Upper()

# Variable #10 Q
distribution.append(ot.Gumbel(0.00524, 626.14))
distribution[0].setParameter(ot.GumbelAB()([1013, 558]))
distribution[0] = ot.TruncatedDistribution(distribution[0], float(lower[0]),
                                           float(upper[0]))
# Variable #22 Ks
distribution.append(ot.Normal(30, 7.5))
distribution[1] = ot.TruncatedDistribution(distribution[1], float(lower[1]),
                                           float(upper[1]))
# Variable #25 Zv
distribution.append(ot.Triangular(49, 50, 51))
# Variable #2 Zm
distribution.append(ot.Triangular(54, 54.5, 55))

# =============================================================================
# ================================= RUN =======================================
# =============================================================================

# Denote the input index
indexNumber = 3
# 1 for first order indice, 0 for total order
indexChoice = 1
# -1 for maximization, 1 for minimization
MINMAX = -1

Res = []