# In order to initialize the LatinHypercube sampling class, the user needs to define a list of distributions
# for each one of the parameters that need to be sampled.
#
# Apart from the distributions list, the number of samples  :code:`nsamples` to be drawn is required.
# The :code:`random_state` parameter defines the seed of the random generator.
#
# Finally, the design criterion can be defined by the user. The default case is the :class:`.Random`.
# For more details on the various criteria you can refer to the documentation of the criteria
# :class:`.Random`, :class:`.Centered`, :class:`.Maximin`, :class:`.MinCorrelation`
#
# In the case of user-defined criteria an instantiation of the UserCriterion class is provided instead of the
# built-in criteria.

# %%

dist1 = Uniform(loc=0., scale=1.)
dist2 = Uniform(loc=0., scale=1.)

lhs_user_defined = LatinHypercubeSampling(distributions=[dist1, dist2],
                                          nsamples=5,
                                          criterion=UserCriterion())
print(lhs_user_defined._samples)

# %% md
#
# Plot the samples
# ------------------------------------
#
# The samples generated using the LatinHypercube sampling method can be retrieved using the *samples* attribute. This
# attribute is a :any:`numpy.ndarray`.
Example #2
0
# %%


def sinusoidal_function(x):
    return x * np.sin(x) / 10.0


# %% md
#
# Create a distribution object, generate samples and evaluate the function at the samples.

# %%

np.random.seed(1)

dist = Uniform(loc=0, scale=10)
n_samples = 200
x = dist.rvs(n_samples)
y = sinusoidal_function(x)

# %% md
#
# Create an object from the PCE class, construct a total-degree polynomial basis given a maximum polynomial degree, and
# compute the PCE coefficients using least squares regression.

# %%

max_degree = 15
polynomial_basis = TotalDegreeBasis(dist, max_degree)
least_squares = LeastSquareRegression()
pce_lstsq = PolynomialChaosExpansion(polynomial_basis=polynomial_basis,
import matplotlib.pyplot as plt

# %% md
#
# Set-up problem with g-function.

# %%

a_values = [0.001, 89.9, 5.54, 42.10, 0.78, 1.26, 0.04, 0.79, 74.51, 4.32, 82.51, 41.62]
na = len(a_values)

model = PythonModel(model_script='local_pfn.py', model_object_name='gfun_sensitivity', delete_files=True,
                    a_values=a_values, var_names=['X{}'.format(i) for i in range(na)])
runmodel_object = RunModel(model=model)

dist_object = [Uniform(), ] * na

sens = MorrisSensitivity(runmodel_object=runmodel_object,
                         distributions=dist_object,
                         n_levels=20,
                         maximize_dispersion=True)
sens.run(n_trajectories=10)

print(['a{}={}'.format(i + 1, ai) for i, ai in enumerate(a_values)])

fig, ax = plt.subplots()
ax.scatter(sens.mustar_indices, sens.sigma_indices)
for i, (mu, sig) in enumerate(zip(sens.mustar_indices, sens.sigma_indices)):
    ax.text(x=mu + 0.01, y=sig + 0.01, s='X{}'.format(i + 1))
ax.set_xlabel(r'$\mu^{\star}$', fontsize=14)
ax.set_ylabel(r'$\sigma$', fontsize=14)
Example #4
0
    v4 = x[:, 4] * np.sin(x[:, 0]) + x[:, 5] * np.sin(np.sum(
        x[:, :2], axis=1)) + x[:, 6] * np.sin(np.sum(
            x[:, :3], axis=1)) + x[:, 7] * np.sin(np.sum(x[:, :4], axis=1))

    return (u1 + u2 + u3 + u4)**2 + (v1 + v2 + v3 + v4)**2


# %% md
#
# Create a distribution object, generate samples and evaluate the function at the samples.

# %%

np.random.seed(1)

dist_1 = Uniform(loc=0, scale=2 * np.pi)
dist_2 = Uniform(loc=0, scale=1)

marg = [dist_1] * 4
marg_1 = [dist_2] * 4
marg.extend(marg_1)

joint = JointIndependent(marginals=marg)

n_samples = 9000
x = joint.rvs(n_samples)
y = function(x)

# %% md
#
# Create an object from the PCE class. Compute PCE coefficients using least squares regression.
Example #5
0
                            cores_per_task=1)
print('Example: Created the model object.')

# %% md
#
# Towards defining the sampling scheme
# The fire load density is assumed to be uniformly distributed between 50 :math:`MJ/m^2` and 450 :math:`MJ/m^2`.
# The yield strength is assumed to be normally distributed, with the parameters
# being: mean = 250 :math:`MPa` and coefficient of variation of :math:`7%`.
#
# Creating samples using MCS.

# %%

d_n = Normal(loc=50, scale=400)
d_u = Uniform(location=2.50e8, scale=1.75e7)
x_mcs = MonteCarloSampling(distributions=[d_n, d_u],
                           samples_number=100,
                           random_state=987979)

# %% md
#
# Running simulations using the previously defined model object and samples

# %%

sample_points = x_mcs.samples
abaqus_sfe_model.run(samples=sample_points)

# %% md
#
Example #6
0
# %%

def function(x, y):
    return (4 - 2.1 * x ** 2 + x ** 4 / 3) * x ** 2 + x * y + (-4 + 4 * y ** 2) * y ** 2


# %% md
#
# Create a distribution object, generate samples and evaluate the function at the samples.

# %%

np.random.seed(1)

dist_1 = Uniform(loc=-2, scale=4)
dist_2 = Uniform(loc=-1, scale=2)

marg = [dist_1, dist_2]
joint = JointIndependent(marginals=marg)

n_samples = 250
x = joint.rvs(n_samples)
y = function(x[:, 0], x[:, 1])

# %% md
#
# Visualize the 2D function.

# %%
# %% md
#
# Import the necessary libraries.

# %%
from UQpy.distributions import Uniform
from UQpy.run_model.RunModel import RunModel
from UQpy.sampling import MonteCarloSampling

# %% md
#
# Define the distribution objects.

# %%

d1 = Uniform(location=0.02, scale=0.06)
d2 = Uniform(location=0.02, scale=0.01)
d3 = Uniform(location=0.02, scale=0.01)
d4 = Uniform(location=0.0025, scale=0.0075)
d5 = Uniform(location=0.02, scale=0.06)
d6 = Uniform(location=0.02, scale=0.01)
d7 = Uniform(location=0.02, scale=0.01)
d8 = Uniform(location=0.0025, scale=0.0075)

# %% md
#
# Draw the samples using MCS.

# %%

x = MonteCarloSampling(distributions=[d1, d2, d3, d4, d5, d6, d7, d8],
Example #8
0
# %%


def function(x, y):
    return x**2 + y**2


# %% md
#
# Create a distribution object, generate samples and evaluate the function at the samples.

# %%

np.random.seed(1)

dist_1 = Uniform(loc=-5.12, scale=10.24)
dist_2 = Uniform(loc=-5.12, scale=10.24)

marg = [dist_1, dist_2]
joint = JointIndependent(marginals=marg)

n_samples = 100
x = joint.rvs(n_samples)
y = function(x[:, 0], x[:, 1])

# %% md
#
# Visualize the 2D function.

# %%
Example #9
0
# Import the necessary libraries.

# %%
import numpy as np

from UQpy.distributions import Uniform
from UQpy.run_model.RunModel import RunModel
from UQpy.sampling import MonteCarloSampling

# %% md
#
# Define the distribution objects.

# %%

dist1 = Uniform(location=15000, scale=10000)
dist2 = Uniform(location=450000, scale=80000)
dist3 = Uniform(location=2.0e8, scale=0.5e8)

# %% md
#
# Draw the samples using MCS.

# %%

x = MonteCarloSampling(distributions=[dist1, dist2, dist3] * 6,
                       samples_number=5,
                       random_state=938475)
samples = np.array(x.samples).round(2)

# %% md
Example #10
0
# %%


def function(x):
    return 100 * (np.exp(-2 / (x[:, 0]**1.75)) + np.exp(-2 / (x[:, 1]**1.5)) +
                  np.exp(-2 / (x[:, 2]**1.25)))


# %% md
#
# Define the input probability distributions.

# %%

# input distributions
dist = Uniform(loc=0, scale=1)
marg = [dist] * 3
joint = JointIndependent(marginals=marg)

# %% md
#
# Compute reference mean and variance values using Monte Carlo sampling.

# %%

# reference moments via Monte Carlo Sampling
n_samples_mc = 1000000
xx = joint.rvs(n_samples_mc)
yy = function(xx)
mean_ref = yy.mean()
var_ref = yy.var()
         1 / (sigma * np.sqrt(2 * np.pi)) * np.exp(-(bins - mu)**2 /
                                                   (2 * sigma**2)),
         linewidth=2,
         color='r')
plt.title('data as histogram and true distribution to be estimated')
plt.show()

#%% md
#
# In a Bayesian setting, the definition of a prior pdf is a key point. The prior for the parameters must be defined in
# the model. Note that if no prior is given, an improper, uninformative, prior is chosen, :math:`p(\theta)=1` for all
# :math:`\theta`.

#%%

p0 = Uniform(loc=0., scale=15)
p1 = Lognormal(s=1., loc=0., scale=1.)
prior = JointIndependent(marginals=[p0, p1])

candidate_model = DistributionModel(distributions=Normal(loc=None, scale=None),
                                    n_parameters=2,
                                    prior=prior)

# Learn the unknown parameters using MCMC
from UQpy.sampling import MetropolisHastings

mh1 = MetropolisHastings(jump=10,
                         burn_length=10,
                         seed=[1.0, 0.2],
                         random_state=123)
Example #12
0
    b = 0.1
    term1 = np.sin(xx[0])
    term2 = a * np.sin(xx[1])**2
    term3 = b * xx[2]**4 * np.sin(xx[0])
    return term1 + term2 + term3


# %% md
#
# The Ishigami function has three indepdent random inputs, which are uniformly distributed in
# interval :math:`[-\pi, \pi]`.

# %%

# input distributions
dist1 = Uniform(loc=-np.pi, scale=2 * np.pi)
dist2 = Uniform(loc=-np.pi, scale=2 * np.pi)
dist3 = Uniform(loc=-np.pi, scale=2 * np.pi)
marg = [dist1, dist2, dist3]
joint = JointIndependent(marginals=marg)

# %% md
#
# We now define our complete PCE, which will be further used for the best model selection algorithm.

# %%

# %% md
#
# We must now select a polynomial basis. Here we opt for a total-degree (TD) basis, such that the univariate
# polynomials have a maximum degree equal to :math:`P` and all multivariate polynomial have a total-degree
Example #13
0
np.random.seed(100)
mu, sigma = 10, 1  # true mean and standard deviation
data = np.random.normal(mu, sigma, 100).reshape((-1, 1))
np.random.seed()

# plot the data and true distribution
count, bins, ignored = plt.hist(data, 30, density=True)
plt.plot(bins,
         1 / (sigma * np.sqrt(2 * np.pi)) * np.exp(-(bins - mu)**2 /
                                                   (2 * sigma**2)),
         linewidth=2,
         color='r')
plt.title('data as histogram and true distribution to be estimated')
plt.show()

p0 = Uniform(loc=0., scale=15)
p1 = Lognormal(s=1., loc=0., scale=1.)
prior = JointIndependent(marginals=[p0, p1])

# create an instance of class Model
candidate_model = DistributionModel(distributions=Normal(loc=None, scale=None),
                                    n_parameters=2,
                                    prior=prior)

#%% md
#
# Learn the unknown parameters using :class:`.ImportanceSampling`. If no proposal is given, the samples are sampled
# from the prior.

#%%
import matplotlib.pyplot as plt

#%% md
#
# Set-up problem with g-function.

#%%

model = PythonModel(model_script='local_pfn.py',
                    model_object_name='fun2_sensitivity',
                    delete_files=True,
                    var_names=['X{}'.format(i) for i in range(5)])
runmodel_object = RunModel(model=model)

dist_object = [
    Uniform(),
] * 5

sens = MorrisSensitivity(runmodel_object=runmodel_object,
                         distributions=dist_object,
                         n_levels=20,
                         maximize_dispersion=True)
sens.run(n_trajectories=10)

fig, ax = plt.subplots(figsize=(5, 3.5))
ax.scatter(sens.mustar_indices, sens.sigma_indices, s=60)
for i, (mu, sig) in enumerate(zip(sens.mustar_indices, sens.sigma_indices)):
    ax.text(x=mu + 0.01, y=sig + 0.01, s='X{}'.format(i + 1), fontsize=14)
ax.set_xlabel(r'$\mu^{\star}$', fontsize=18)
ax.set_ylabel(r'$\sigma$', fontsize=18)
# ax.set_title('Morris sensitivity indices', fontsize=16)
Example #15
0
def setup():
    model = PythonModel(model_script='pfn.py', model_object_name='gfun_sensitivity', delete_files=True,
                        a_values=[0.001, 99.], var_names=['X{}'.format(i) for i in range(2)])
    runmodel_object = RunModel(model=model)
    dist_object = [Uniform(), Uniform()]
    yield runmodel_object, dist_object
from UQpy.surrogates import GaussianProcessRegression
from UQpy.sampling import MonteCarloSampling, AdaptiveKriging
from UQpy.run_model.RunModel import RunModel
from UQpy.distributions import Uniform
from local_BraninHoo import function
import time
from UQpy.utilities.MinimizeOptimizer import MinimizeOptimizer

# %% md
#
# Using UQpy :class:`MonteCarloSampling` class to generate samples for two random variables, which are uniformly
# distributed

# %%

marginals = [Uniform(loc=-5, scale=15), Uniform(loc=0, scale=15)]
x = MonteCarloSampling(distributions=marginals, nsamples=20)

# %% md
#
# :class:`.RunModel` class is used to define an object to evaluate the model at sample points.

# %%

model = PythonModel(model_script='local_BraninHoo.py',
                    model_object_name='function')
rmodel = RunModel(model=model)

# %% md
#
# :class:`.Kriging` class defines an object to generate a surrogate model for a given set of data.
Example #17
0
def S_to_R(S, w, t):
    dw = w[1] - w[0]
    fac = np.ones(len(w))
    fac[1:len(w) - 1:2] = 4
    fac[2:len(w) - 2:2] = 2
    fac = fac * dw / 3
    R = np.zeros(len(t))
    for i in range(len(t)):
        R[i] = 2 * np.dot(fac, S * np.cos(w * t[i]))
    return R


R_g = S_to_R(S, w, t)

distribution = Uniform(0, 1)

Translate_object = Translation(distributions=distribution,
                               time_interval=dt,
                               frequency_interval=dw,
                               n_time_intervals=nt,
                               n_frequency_intervals=nw,
                               correlation_function_gaussian=R_g,
                               samples_gaussian=samples)
samples_ng = Translate_object.samples_non_gaussian


def test_samples_ng_shape():
    assert samples_ng.shape == samples.shape

Example #18
0
            nsamples=5000)
print(x.samples.shape)

plt.figure()
plt.plot(x.samples[:, 0], x.samples[:, 1], 'o')
plt.show()

# %% md
# DREAM algorithm: compare with :class:`.MetropolisHastings` (inputs parameters are set as their default values)
# ---------------------------------------------------------------------------------------------------------------------

# %%

# Define a function to sample seed uniformly distributed in the 2d space ([-20, 20], [-4, 4])
prior_sample = lambda nsamples: np.array([[-2, -2]]) + np.array(
    [[4, 4]]) * JointIndependent([Uniform(), Uniform()]).rvs(nsamples=nsamples)

fig, ax = plt.subplots(ncols=2, figsize=(12, 4))
seed = prior_sample(nsamples=7)

x = MetropolisHastings(dimension=2,
                       burn_length=500,
                       jump=50,
                       seed=seed.tolist(),
                       log_pdf_target=log_Rosenbrock,
                       nsamples=1000)
ax[0].plot(x.samples[:, 0], x.samples[:, 1], 'o')

x = DREAM(dimension=2,
          burn_length=500,
          jump=50,
# where :math:`\tilde{w}` are the normalized weights.
#
# [1] *Sequential Monte Carlo Methods in Practice*, A. Doucet, N. de Freitas, and N. Gordon, 2001, Springer, New York

from UQpy.distributions import Uniform, JointIndependent
from UQpy.sampling import ImportanceSampling
import matplotlib.pyplot as plt
import numpy as np


def log_Rosenbrock(x, param):
    return (-(100 * (x[:, 1] - x[:, 0]**2)**2 + (1 - x[:, 0])**2) / param)


proposal = JointIndependent(
    [Uniform(loc=-8, scale=16),
     Uniform(loc=-10, scale=60)])
print(proposal.get_parameters())

w = ImportanceSampling(log_pdf_target=log_Rosenbrock,
                       args_target=(20, ),
                       proposal=proposal,
                       nsamples=5000)

#%% md
#
# Look at distribution of weights
# -------------------------------

#%%
import matplotlib.pyplot as plt
from matplotlib import cm
from matplotlib.ticker import LinearLocator, FormatStrFormatter
import numpy as np
# from scipy.spatial import Delaunay
from scipy.spatial import voronoi_plot_2d
from sklearn.gaussian_process import GaussianProcessRegressor
from sklearn.gaussian_process.kernels import Matern

#%% md
#
# Create a distribution object.

#%%

marginals = [Uniform(loc=0., scale=1.), Uniform(loc=0., scale=1.)]
strata = VoronoiStrata(seeds_number=16, dimension=2)

#%% md
#
# Using UQpy :class:`.TrueStratifiedSampling` class to generate samples for two random variables, which are uniformly
# distributed between :math:`0` and :math:`1`.

#%%

x = TrueStratifiedSampling(distributions=marginals, strata_object=strata, nsamples_per_stratum=1, random_state=1)

#%% md
#
# This plot shows the samples and strata generated by the :class:`.TrueStratifiedSampling` class.
Example #21
0
# %% md
#
# The algorithm-specific parameters for MetropolisHastings are proposal and proposal_is_symmetric
# -------------------------------------------------------------------------------------------------
# The default proposal is standard normal (symmetric).

# %%

# Define a few proposals to try out
from UQpy.distributions import JointIndependent, Normal, Uniform

proposals = [
    JointIndependent([Normal(), Normal()]),
    JointIndependent(
        [Uniform(loc=-0.5, scale=1.5),
         Uniform(loc=-0.5, scale=1.5)]),
    Normal()
]

proposals_is_symmetric = [True, False, False]

fig, ax = plt.subplots(ncols=3, figsize=(16, 4))
for i, (proposal, symm) in enumerate(zip(proposals, proposals_is_symmetric)):
    print(i)
    try:
        x = MetropolisHastings(dimension=2,
                               burn_length=500,
                               jump=100,
                               log_pdf_target=log_pdf_target,
                               proposal=proposal,