Beispiel #1
0
def logLikelihood(X):
    '''
    Evaluate the log-likelihood of a TruncatedNormal on a sample.
    '''
    samplesize = sample.getSize()
    mu = X[0]
    sigma = X[1]
    a = sample.getMin()[0]
    b = sample.getMax()[0]
    delta = (b - a) / samplesize
    a -= delta
    b += delta
    distribution = ot.TruncatedNormal(mu, sigma, a, b)
    samplelogpdf = distribution.computeLogPDF(sample)
    loglikelihood = samplelogpdf.computeMean() * samplesize
    return loglikelihood
import openturns as ot
from matplotlib import pyplot as plt
from openturns.viewer import View
if (ot.TruncatedNormal().__class__.__name__ == 'ComposedDistribution'):
    correlation = ot.CorrelationMatrix(2)
    correlation[1, 0] = 0.25
    aCopula = ot.NormalCopula(correlation)
    marginals = [ot.Normal(1.0, 2.0), ot.Normal(2.0, 3.0)]
    distribution = ot.ComposedDistribution(marginals, aCopula)
elif (ot.TruncatedNormal().__class__.__name__ ==
      'CumulativeDistributionNetwork'):
    distribution = ot.CumulativeDistributionNetwork(
        [ot.Normal(2), ot.Dirichlet([0.5, 1.0, 1.5])],
        ot.BipartiteGraph([[0, 1], [0, 1]]))
else:
    distribution = ot.TruncatedNormal()
dimension = distribution.getDimension()
if dimension <= 2:
    if distribution.getDimension() == 1:
        distribution.setDescription(['$x$'])
        pdf_graph = distribution.drawPDF()
        cdf_graph = distribution.drawCDF()
        fig = plt.figure(figsize=(10, 4))
        plt.suptitle(str(distribution))
        pdf_axis = fig.add_subplot(121)
        cdf_axis = fig.add_subplot(122)
        View(pdf_graph, figure=fig, axes=[pdf_axis], add_legend=False)
        View(cdf_graph, figure=fig, axes=[cdf_axis], add_legend=False)
    else:
        distribution.setDescription(['$x_1$', '$x_2$'])
        pdf_graph = distribution.drawPDF()
Beispiel #3
0
            inPoint[i] = 0.0
    return inPoint


def cleanCovariance(inCovariance):
    dim = inCovariance.getDimension()
    for j in range(dim):
        for i in range(dim):
            if (m.fabs(inCovariance[i, j]) < 1.e-10):
                inCovariance[i, j] = 0.0
    return inCovariance


# Instantiate one distribution object
referenceDistribution = [
    ot.TruncatedNormal(2.0, 1.5, 1.0, 4.0),
    ot.TruncatedNormal(2.0, 1.5, 1.0, 200.0),
    ot.TruncatedNormal(2.0, 1.5, -200.0, 4.0),
    ot.TruncatedNormal(2.0, 1.5, 1.0, 4.0)
]
distribution = [
    ot.TruncatedDistribution(ot.Normal(2.0, 1.5), 1.0, 4.0),
    ot.TruncatedDistribution(ot.Normal(2.0, 1.5), 1.0,
                             ot.TruncatedDistribution.LOWER),
    ot.TruncatedDistribution(ot.Normal(2.0, 1.5), 4.0,
                             ot.TruncatedDistribution.UPPER),
    ot.TruncatedDistribution(ot.Normal(2.0, 1.5),
                             ot.Interval([1.0], [4.0], [True], [True]))
]

# add a 2-d test
Beispiel #4
0
    for i in range(dim):
        if (m.fabs(inPoint[i]) < 1.e-10):
            inPoint[i] = 0.0
    return inPoint

def cleanCovariance(inCovariance):
    dim = inCovariance.getDimension()
    for j in range(dim):
        for i in range(dim):
            if (m.fabs(inCovariance[i, j]) < 1.e-10):
                inCovariance[i, j] = 0.0
    return inCovariance


# Instanciate one distribution object
referenceDistribution = [ot.TruncatedNormal(2.0, 1.5, 1.0, 4.0),
                         ot.TruncatedNormal(2.0, 1.5, 1.0, 200.0),
                         ot.TruncatedNormal(2.0, 1.5, -200.0, 4.0),
                         ot.TruncatedNormal(2.0, 1.5, 1.0, 4.0)]
distribution = [ot.TruncatedDistribution(ot.Normal(2.0, 1.5), 1.0, 4.0),
                ot.TruncatedDistribution(
                    ot.Normal(2.0, 1.5), 1.0, ot.TruncatedDistribution.LOWER),
                ot.TruncatedDistribution(
                    ot.Normal(2.0, 1.5), 4.0, ot.TruncatedDistribution.UPPER),
                ot.TruncatedDistribution(ot.Normal(2.0, 1.5), ot.Interval([1.0], [4.0], [True], [True]))]

# add a 2-d test
dimension = 2
# This distribution takes too much time for the test
#size = 70
#ref = ot.Normal(dimension)
Beispiel #5
0
        gnb.showInformation(bn)
    except ImportError:
        pass
def showInference(model, evs=None, size=None):
    try:
        # fails outside notebook
        import pyAgrum.lib.notebook as gnb
        gnb.showInference(model, evs=evs, size=size)
    except ImportError:
        pass

# **Probabilistic model**

# Marginal distributions
Torque = ot.LogNormal(0.0, 0.25)
Angle = ot.TruncatedNormal(0.0, 2.0, -8.0, 8.0)
Joint = ot.Uniform(1.8, 2.2)

# Dependence
rho = 0.5
TorqueAngleCopula = ot.NormalCopula(ot.CorrelationMatrix(2, [1.0, rho, rho, 1.0]))
copula = ot.ComposedCopula([TorqueAngleCopula, ot.IndependentCopula(1)])

# Joint distribution if needed
TorqueAngle = ot.ComposedDistribution([Torque, Angle], TorqueAngleCopula)
fullDistribution = ot.ComposedDistribution([Torque, Angle, Joint], copula)

# Leakage angle (rd)
angleMax = 5.0

# Leakage joint (mm)
inf_distribution = factory.build(sample)
print('estimated distribution=', inf_distribution)

# with bounds
data = [
    0.6852, 0.9349, 0.5884, 1.727, 1.581, 0.3193, -0.5701, 1.623, 2.210,
    -0.3440, -0.1646
]
sample = ot.Sample([[x] for x in data])
size = sample.getSize()
xMin = sample.getMin()[0]
xMax = sample.getMax()[0]
delta = xMax - xMin
a = xMin - delta / (size + 2)
b = xMax + delta / (size + 2)
distribution = ot.TruncatedNormal()
factory = ot.MethodOfMomentsFactory(distribution)
factory.setKnownParameter([a, b], [2, 3])
solver = factory.getOptimizationAlgorithm()
sampleMean = sample.computeMean()[0]
sampleSigma = sample.computeStandardDeviation()[0]
startingPoint = [sampleMean, sampleSigma]
solver.setStartingPoint(startingPoint)
factory.setOptimizationAlgorithm(solver)
lowerBound = [-1.0, 0]
upperBound = [-1.0, 1.5]
finiteLowerBound = [False, True]
finiteUpperBound = [False, True]
bounds = ot.Interval(lowerBound, upperBound, finiteLowerBound,
                     finiteUpperBound)
factory = ot.MethodOfMomentsFactory(distribution, bounds)
Beispiel #7
0
distributionCollection.add(gumbel)
continuousDistributionCollection.add(gumbel)

lognormal = ot.LogNormal(1.0, 1.0, 2.0)
distributionCollection.add(lognormal)
continuousDistributionCollection.add(lognormal)

logistic = ot.Logistic(1.0, 1.0)
distributionCollection.add(logistic)
continuousDistributionCollection.add(logistic)

normal = ot.Normal(1.0, 2.0)
distributionCollection.add(normal)
continuousDistributionCollection.add(normal)

truncatednormal = ot.TruncatedNormal(1.0, 1.0, 0.0, 3.0)
distributionCollection.add(truncatednormal)
continuousDistributionCollection.add(truncatednormal)

student = ot.Student(10.0, 10.0)
distributionCollection.add(student)
continuousDistributionCollection.add(student)

triangular = ot.Triangular(-1.0, 2.0, 4.0)
distributionCollection.add(triangular)
continuousDistributionCollection.add(triangular)

uniform = ot.Uniform(1.0, 2.0)
distributionCollection.add(uniform)
continuousDistributionCollection.add(uniform)
from __future__ import print_function
import openturns as ot

ot.TESTPREAMBLE()
ot.RandomGenerator.SetSeed(0)


mu = [0.0] * 4
sigma = [1.0] * 4
a = [-4., -1., 1., 3.]
b = [4., 4., 2., 6.]

ot.PlatformInfo.SetNumericalPrecision(2)
for i in range(4):
    distribution = ot.TruncatedNormal(mu[i], sigma[i], a[i], b[i])
    size = 10000
    sample = distribution.getSample(size)
    factory = ot.TruncatedNormalFactory()
    estimatedDistribution = factory.build(sample)
    print("distribution=", repr(distribution))
    print("Estimated distribution=", repr(estimatedDistribution))
    estimatedDistribution = factory.build()
    print("Default distribution=", estimatedDistribution)
    estimatedDistribution = factory.build(
        distribution.getParameter())
    print("Distribution from parameters=", estimatedDistribution)
    estimatedTruncatedNormal = factory.buildAsTruncatedNormal(sample)
    print("TruncatedNormal          =", distribution)
    print("Estimated TruncatedNormal=", estimatedTruncatedNormal)
    estimatedTruncatedNormal = factory.buildAsTruncatedNormal()
        5 * 1e-5, 1e-5, 5 * 1e-6, 1e-6, 5 * 1e-7, 1e-7, 5 * 1e-8
    ])
    coeff = 1.0 / (2.0 * np.linalg.norm(gvec_tilde, ord=1))
    gvec = gvec_tilde * coeff
    dotprod = np.dot(gvec, yvec)
    return 1.0 / (1 + dotprod)


# number of RVs
N = 16

# construct joint pdf
z = []
for i in range(N):
    if i % 2 == 0:
        z.append(ot.TruncatedNormal(0, 1, 0, 3))
    else:
        z.append(ot.TruncatedNormal(0, 1, -3, 0))
jpdf = ot.ComposedDistribution(z)

# generate cross-validation set
Ncv = 100000
ot.RandomGenerator.SetSeed(13)
cv_test_points, cv_values = get_ed(qoi_mero, jpdf, Ncv, 'R')

### results storage
meanz = []
varz = []
err_cv_max_m = []
err_cv_mean_m = []
err_cv_rms_m = []
Beispiel #10
0
ot.RandomGenerator.SetSeed(0)
ot.Log.Show(ot.Log.NONE)

# %%
# Generate a sample
# -----------------

# %%
# We create a `TruncatedNormal` and generate a small sample.

# %%
a = -1
b = 2.5
mu = 2.0
sigma = 3.0
distribution = ot.TruncatedNormal(mu, sigma, a, b)
sample = distribution.getSample(11)

# %%
# In order to see the distribution and the sample, we draw the PDF of the distribution and generate a clouds which X coordinates are the sample values.

# %%
graph = distribution.drawPDF()
graph.setLegends(["TruncatedNormal"])
graph.setColors(["red"])
zeros = ot.Sample(sample.getSize(), 1)
cloud = ot.Cloud(sample, zeros)
cloud.setLegend("Sample")
graph.add(cloud)
graph.setLegendPosition("topleft")
view = viewer.View(graph)
Beispiel #11
0
import openturns as ot
from matplotlib import pyplot as plt
from openturns.viewer import View
if ot.TruncatedNormal().__class__.__name__ == 'ComposedDistribution':
    correlation = ot.CorrelationMatrix(2)
    correlation[1, 0] = 0.25
    aCopula = ot.NormalCopula(correlation)
    marginals = [ot.Normal(1.0, 2.0), ot.Normal(2.0, 3.0)]
    distribution = ot.ComposedDistribution(marginals, aCopula)
elif ot.TruncatedNormal(
).__class__.__name__ == 'CumulativeDistributionNetwork':
    distribution = ot.CumulativeDistributionNetwork(
        [ot.Normal(2), ot.Dirichlet([0.5, 1.0, 1.5])],
        ot.BipartiteGraph([[0, 1], [0, 1]]))
elif ot.TruncatedNormal().__class__.__name__ == 'Histogram':
    distribution = ot.Histogram([-1.0, 0.5, 1.0, 2.0], [0.45, 0.4, 0.15])
else:
    distribution = ot.TruncatedNormal()
dimension = distribution.getDimension()
if dimension == 1:
    distribution.setDescription(['$x$'])
    pdf_graph = distribution.drawPDF()
    cdf_graph = distribution.drawCDF()
    fig = plt.figure(figsize=(10, 4))
    plt.suptitle(str(distribution))
    pdf_axis = fig.add_subplot(121)
    cdf_axis = fig.add_subplot(122)
    View(pdf_graph, figure=fig, axes=[pdf_axis], add_legend=False)
    View(cdf_graph, figure=fig, axes=[cdf_axis], add_legend=False)
elif dimension == 2:
    distribution.setDescription(['$x_1$', '$x_2$'])
y_obs = ot.Normal(thetaTrue[0], 1.0).getSample(obsSize)
x_obs = y_obs
RWMHsampler = ot.RandomWalkMetropolisHastings(prior, initialState,
                                              instrumental)
RWMHsampler.setLikelihood(conditional, y_obs, model, x_obs)
print("Log-likelihood of thetaTrue = {!r}".format(
    RWMHsampler.computeLogLikelihood(thetaTrue)))
# produces an error with current master branch
real_504 = RWMHsampler.getRealization()
print("With 504 observations, getRealization() produces {!r}".format(
    real_504[0]))
ott.assert_almost_equal(real_504[0], 2.0)

# this example throws in ot 1.19 as it tries to evaluate the likelihood outside the support of the prior
# see MetropolisHastingsImplementation::computeLogPosterior
obs = ot.TruncatedNormal(0.5, 0.5, 0.0, 10.0).getSample(50)
likelihood = ot.GeneralizedPareto()
prior = ot.ComposedDistribution(
    [ot.LogUniform(-1.40, 4.0),
     ot.Normal(), ot.Normal()])
proposals = [
    ot.Uniform(-prior.getMarginal(k).getStandardDeviation()[0],
               +prior.getMarginal(k).getStandardDeviation()[0])
    for k in range(prior.getDimension())
]
initialState = prior.getMean()
mh_coll = [
    ot.RandomWalkMetropolisHastings(prior, initialState, proposals[i], [i])
    for i in range(2)
]
for mh in mh_coll:
Beispiel #13
0
from __future__ import print_function
import openturns as ot
import openturns.testing as ott
from math import sqrt

ot.TESTPREAMBLE()
ot.RandomGenerator.SetSeed(0)

mu = [0.0] * 4
sigma = [1.0] * 4
a = [-4., -1., 1., 3.]
b = [4., 4., 2., 6.]

for i in range(4):
    ot.PlatformInfo.SetNumericalPrecision(1 if i == 2 else 2)
    distribution = ot.TruncatedNormal(mu[i], sigma[i], a[i], b[i])
    size = 10000
    sample = distribution.getSample(size)
    factory = ot.TruncatedNormalFactory()
    estimatedDistribution = factory.build(sample)
    print("distribution=", repr(distribution))
    print("Estimated distribution=", repr(estimatedDistribution))
    estimatedDistribution = factory.build()
    print("Default distribution=", estimatedDistribution)
    estimatedDistribution = factory.build(distribution.getParameter())
    print("Distribution from parameters=", estimatedDistribution)
    estimatedTruncatedNormal = factory.buildAsTruncatedNormal(sample)
    print("TruncatedNormal          =", distribution)
    print("Estimated TruncatedNormal=", estimatedTruncatedNormal)
    estimatedTruncatedNormal = factory.buildAsTruncatedNormal()
    print("Default TruncatedNormal=", estimatedTruncatedNormal)