Beispiel #1
0
def test_derivatives_1_no_samples():
    dist1 = Normal(loc=200, scale=20)
    dist2 = Normal(loc=150, scale=10)
    rx = np.array([[1.0, 0.0], [0.0, 1.0]])
    ntf_obj = Nataf(distributions=[dist1, dist2], corr_x=rx)
    for file_name in glob.glob("Model_Runs_*"):
        shutil.rmtree(file_name)
    with pytest.raises(Exception):
        assert TaylorSeries._derivatives(nataf_object=ntf_obj, runmodel_object=model_i)
Beispiel #2
0
def test_sorm(setup):
    dist1 = Normal(loc=500, scale=100)
    dist2 = Normal(loc=1000, scale=100)
    dist = [dist1, dist2]
    form_obj = FORM(distributions=dist, runmodel_object=setup)
    form_obj.run()
    sorm_obj = SORM(form_object=form_obj)
    for file_name in glob.glob("Model_Runs_*"):
        shutil.rmtree(file_name)
    np.testing.assert_allclose(sorm_obj.failure_probability, 2.8803e-7, rtol=1e-02)
Beispiel #3
0
def test_derivatives_6_second():
    model = PythonModel(model_script='pfn2.py', model_object_name='model_j', delete_files=True)
    h_func = RunModel(model=model)
    dist1 = Normal(loc=500, scale=100)
    dist2 = Normal(loc=1000, scale=100)
    point_u = np.array([1.73673009, 0.16383283])
    rx = np.array([[1.0, 0.0], [0.0, 1.0]])
    ntf_obj = Nataf(distributions=[dist1, dist2], corr_x=rx)
    hessian = TaylorSeries._derivatives(point_u=point_u, runmodel_object=h_func,
                                        nataf_object=ntf_obj, order='second')
    np.testing.assert_allclose(hessian, [[-0.00720754, 0.00477726], [0.00477726, -0.00316643]], rtol=1e-04)
Beispiel #4
0
def test_derivatives_5_run_model(setup):
    dist1 = Normal(loc=200, scale=20)
    dist2 = Normal(loc=150, scale=10)
    point_u = np.array([-2, 1])
    rx = np.array([[1.0, 0.0], [0.0, 1.0]])
    ntf_obj = Nataf(distributions=[dist1, dist2], corr_x=rx)
    gradient, qoi, array_of_samples = TaylorSeries._derivatives(point_u=point_u,
                                                                runmodel_object=setup,
                                                                nataf_object=ntf_obj)
    for file_name in glob.glob("Model_Runs_*"):
        shutil.rmtree(file_name)
    np.testing.assert_allclose(array_of_samples[0], [160, 160], rtol=1e-09)
    np.testing.assert_allclose(gradient, [20, -10], rtol=1e-09)
Beispiel #5
0
    def get_polys(self, x):
        """
        Calculates the normalized Hermite polynomials evaluated at sample points.

        :param x: :class:`numpy.ndarray` containing the samples.
        :return: Α list of :class:`numpy.ndarray` with the design matrix and the
                    normalized polynomials.
        """
        a, b = -np.inf, np.inf
        mean_ = Polynomials.get_mean(self)
        std_ = Polynomials.get_std(self)
        x_ = Polynomials.standardize_normal(x, mean_, std_)

        norm = Normal(0, 1)
        pdf_st = norm.pdf

        p = []
        for i in range(self.degree):
            p.append(special.hermitenorm(i, monic=False))

        return Polynomials.normalized(self.degree, x_, a, b, pdf_st, p)
Beispiel #6
0
    def __init__(
        self,
        pdf_target: Union[Callable, list[Callable]] = None,
        log_pdf_target: Union[Callable, list[Callable]] = None,
        args_target: tuple = None,
        burn_length: Annotated[int, Is[lambda x: x >= 0]] = 0,
        jump: int = 1,
        dimension: int = None,
        seed: list = None,
        save_log_pdf: bool = False,
        concatenate_chains: bool = True,
        n_chains: int = None,
        proposal: Distribution = None,
        proposal_is_symmetric: bool = False,
        random_state: RandomStateType = None,
        nsamples: PositiveInteger = None,
        nsamples_per_chain: PositiveInteger = None,
    ):
        """
        Metropolis-Hastings algorithm :cite:`MCMC1` :cite:`MCMC2`

        :param pdf_target: Target density function from which to draw random samples. Either `pdf_target` or
         `log_pdf_target` must be provided (the latter should be preferred for better numerical stability).

         If `pdf_target` is a callable, it refers to the joint pdf to sample from, it must take at least one input
         **x**, which are the point(s) at which to evaluate the pdf. Within :class:`.MCMC` the pdf_target is evaluated
         as:
         :code:`p(x) = pdf_target(x, \*args_target)`

         where **x** is a :class:`numpy.ndarray  of shape :code:`(nsamples, dimension)` and `args_target` are additional
         positional arguments that are provided to :class:`.MCMC` via its `args_target` input.

         If `pdf_target` is a list of callables, it refers to independent marginals to sample from. The marginal in
         dimension :code:`j` is evaluated as:
         :code:`p_j(xj) = pdf_target[j](xj, \*args_target[j])` where **x** is a :class:`numpy.ndarray` of shape
         :code:`(nsamples, dimension)`
        :param log_pdf_target: Logarithm of the target density function from which to draw random samples.
         Either `pdf_target` or `log_pdf_target` must be provided (the latter should be preferred for better numerical
         stability).

         Same comments as for input `pdf_target`.
        :param args_target: Positional arguments of the pdf / log-pdf target function. See `pdf_target`
        :param burn_length: Length of burn-in - i.e., number of samples at the beginning of the chain to discard (note:
         no thinning during burn-in). Default is :math:`0`, no burn-in.
        :param jump: Thinning parameter, used to reduce correlation between samples. Setting :code:`jump=n` corresponds
         to skipping :code:`n-1` states between accepted states of the chain. Default is :math:`1` (no thinning).
        :param dimension: A scalar value defining the dimension of target density function. Either `dimension` and
         `n_chains` or `seed` must be provided.
        :param seed: Seed of the Markov chain(s), shape :code:`(n_chains, dimension)`.
         Default: :code:`zeros(n_chains x dimension)`.

         If seed is not provided, both n_chains and dimension must be provided.
        :param save_log_pdf: Boolean that indicates whether to save log-pdf values along with the samples.
         Default: :any:`False`
        :param concatenate_chains: Boolean that indicates whether to concatenate the chains after a run, i.e., samples
         are stored as an :class:`numpy.ndarray` of shape :code:`(nsamples * n_chains, dimension)` if :any:`True`,
         :code:`(nsamples, n_chains, dimension)` if :any:`False`.
         Default: :any:`True`
        :param n_chains: The number of Markov chains to generate. Either dimension and `n_chains` or `seed` must be
         provided.
        :param proposal: Proposal distribution, must have a log_pdf/pdf and rvs method. Default: standard
         multivariate normal
        :param proposal_is_symmetric: Indicates whether the proposal distribution is symmetric, affects computation of
         acceptance probability alpha Default: :any:`False`, set to :any:`True` if default proposal is used
        :param random_state: Random seed used to initialize the pseudo-random number generator. Default is
         :any:`None`.

        :param nsamples: Number of samples to generate.
        :param nsamples_per_chain: Number of samples to generate per chain.
        """
        self.nsamples = nsamples
        self.nsamples_per_chain = nsamples_per_chain
        super().__init__(
            pdf_target=pdf_target,
            log_pdf_target=log_pdf_target,
            args_target=args_target,
            dimension=dimension,
            seed=seed,
            burn_length=burn_length,
            jump=jump,
            save_log_pdf=save_log_pdf,
            concatenate_chains=concatenate_chains,
            random_state=random_state,
            n_chains=n_chains,
        )

        self.logger = logging.getLogger(__name__)
        # Initialize algorithm specific inputs
        self.proposal = proposal
        self.proposal_is_symmetric = proposal_is_symmetric
        if self.proposal is None:
            if self.dimension is None:
                raise ValueError(
                    "UQpy: Either input proposal or dimension must be provided."
                )
            from UQpy.distributions import JointIndependent, Normal

            self.proposal = JointIndependent([Normal()] * self.dimension)
            self.proposal_is_symmetric = True
        else:
            self._check_methods_proposal(self.proposal)

        self.logger.info("\nUQpy: Initialization of " +
                         self.__class__.__name__ + " algorithm complete.")

        if (nsamples is not None) or (nsamples_per_chain is not None):
            self.run(
                nsamples=nsamples,
                nsamples_per_chain=nsamples_per_chain,
            )
Beispiel #7
0
import shutil

import numpy as np
from UQpy.run_model.RunModel import RunModel
from UQpy.run_model.model_execution.PythonModel import PythonModel
from UQpy.distributions import Normal
from UQpy.reliability import FORM
from UQpy.reliability import SORM
from UQpy.distributions import Lognormal

m0 = 7
v0 = 1.4
mu = np.log(m0) - np.log(np.sqrt(1 + (v0 / m0) ** 2))
scale = np.exp(mu)
s = np.sqrt(np.log(1 + (v0 / m0) ** 2))
loc_ = 0.0

dist1 = Normal(loc=20., scale=2)
dist2 = Lognormal(s=s, loc=0.0, scale=scale)
model = PythonModel(model_script='pfn.py', model_object_name="example4",)
RunModelObject4 = RunModel(model=model)
form = FORM(distributions=[dist1, dist2], runmodel_object=RunModelObject4)
form.run()
Q0 = SORM(form_object=form)


# print results
print('SORM probability of failure: %s' % Q0.failure_probability)

Beispiel #8
0
#%%
import shutil

import numpy as np
import matplotlib.pyplot as plt
from UQpy.run_model.RunModel import RunModel
from UQpy.run_model.model_execution.PythonModel import PythonModel
from UQpy.distributions import Normal
from UQpy.reliability import FORM


model = PythonModel(model_script='pfn.py', model_object_name="example1")
RunModelObject = RunModel(model=model)

dist1 = Normal(loc=200., scale=20.)
dist2 = Normal(loc=150, scale=10.)
Q = FORM(distributions=[dist1, dist2], runmodel_object=RunModelObject, tol1=1e-5, tol2=1e-5)
Q.run()


# print results
print('Design point in standard normal space: %s' % Q.DesignPoint_U)
print('Design point in original space: %s' % Q.DesignPoint_X)
print('Hasofer-Lind reliability index: %s' % Q.beta)
print('FORM probability of failure: %s' % Q.failure_probability)
print(Q.dg_u_record)


# Supporting function
def multivariate_gaussian(pos, mu, Sigma):
# :math:`g(U) = -\frac{1}{\sqrt{d}}\sum_{i=1}^{d} u_i + \beta`
#
# The probability of failure in this case is :math:`P(F) ≈ 10^{−3}` for :math:`β = 3.0902`
#
# Initially we have to import the necessary modules.

#%%

import shutil

from UQpy.run_model.RunModel import RunModel
from UQpy.run_model.model_execution.PythonModel import PythonModel
from UQpy.distributions import Normal
from UQpy.reliability import FORM

dist1 = Normal(loc=0., scale=1.)
dist2 = Normal(loc=0., scale=1.)

model = PythonModel(model_script='pfn.py', model_object_name="example2")
RunModelObject2 = RunModel(model=model)

Z = FORM(distributions=[dist1, dist2], runmodel_object=RunModelObject2)
Z.run()

# print results
print('Design point in standard normal space: %s' % Z.DesignPoint_U)
print('Design point in original space: %s' % Z.DesignPoint_X)
print('Hasofer-Lind reliability index: %s' % Z.beta)
print('FORM probability of failure: %s' % Z.failure_probability)

Beispiel #10
0
         the Ne eigenvalues
    """
    ev = [(m * np.pi / lx)**2 + (n * np.pi / ly)**2 for m in range(1, Ne + 1)
          for n in range(1, Ne + 1)]
    ev = np.array(ev)

    return ev[:Ne]


# %% md
#
# Create a distribution object.

# %%

pdf_lx = Normal(loc=2, scale=0.02)
pdf_ly = Normal(loc=1, scale=0.01)
margs = [pdf_lx, pdf_ly]
joint = JointIndependent(marginals=margs)

# %% md
#
# Define the number of input dimensions and choose the number of output dimensions (number of eigenvalues).

# %%

dim_in = 2
dim_out = 10

# %% md
#
Beispiel #11
0
k_hi = (omega - epsilon)**2 * m
k_lo = (omega + epsilon)**2 * m

# %% md
#
# Plot the failure domain

# %%

x = np.linspace(2, 8, 1000)
y = np.linspace(25, 225, 1000)

X, Y = np.meshgrid(x, y)
Z = np.zeros((1000, 1000))

d1 = Normal(loc=5, scale=1)
d2 = Normal(loc=125, scale=20)

dist = JointIndependent(marginals=[d1, d2])

for i in range(len(x)):
    Z[i, :] = dist.pdf(
        np.append(np.atleast_2d(X[i, :]), np.atleast_2d(Y[i, :]), 0).T)

fig, ax = plt.subplots()
CS = ax.contour(X, Y, Z, 15)
plt.plot(m, k_hi, 'k')
plt.plot(m, k_lo, 'k')
# plt.fill_between(m,k_lo,k_hi)
plt.xlim([mu_m - 3 * sigma_m, mu_m + 3 * sigma_m])
plt.ylim([mu_k - 3 * sigma_k, mu_k + 3 * sigma_k])
Beispiel #12
0
count, bins, ignored = plt.hist(data_1, 30, density=True)
plt.plot(bins, 1 / (sigma * np.sqrt(2 * np.pi)) * np.exp(- (bins - mu) ** 2 / (2 * sigma ** 2)),
         linewidth=2, color='r')
plt.title('Histogram of the data')
plt.show()

#%% md
#
# Create an instance of the class Model. The user must define the number of parameters to be estimated, in this case 2
# (mean and standard deviation), and set those parameters to be learnt as None when instantiating the Distribution
# object. For maximum likelihood estimation, no prior pdf is required.

#%%

# set parameters to be learnt as None
dist = Normal(loc=None, scale=None)
candidate_model = DistributionModel(n_parameters=2, distributions=dist)

ml_estimator = MLE(inference_model=candidate_model, data=data_1, n_optimizations=3)
print('ML estimates of the mean={0:.3f} (true=0.) and std. dev={1:.3f} (true=0.1)'.format(
    ml_estimator.mle[0], ml_estimator.mle[1]))

#%% md
#
# We can also fix one of the parameters and learn the remaining one

#%%

d = Normal(loc=0., scale=None)
candidate_model = DistributionModel(n_parameters=1, distributions=d)
Beispiel #13
0
#%%

param_true = np.array([1.0, 2.0]).reshape(1, -1)
var_n = 1
error_covariance = var_n * np.eye(50)
print(param_true.shape)

z = RunModel(samples=param_true,
             model_script='local_pfn_models.py',
             model_object_name='model_quadratic',
             vec=False,
             var_names=['theta_1', 'theta_2'])
data_clean = z.qoi_list[0].reshape((-1, ))
data = data_clean + Normal(scale=np.sqrt(var_n)).rvs(nsamples=data_clean.size,
                                                     random_state=456).reshape(
                                                         (-1, ))

#%% md
#
# Define the models, compute the true values of the evidence.
#
# For all three models, a Gaussian prior is chosen for the parameters, with mean and covariance matrix of the
# appropriate dimensions. Each model is given prior probability :math:`P(m_{j}) = 1/3`.

#%%

model_names = ['model_linear', 'model_quadratic', 'model_cubic']
model_n_params = [1, 2, 3]
model_prior_means = [[0.], [0., 0.], [0., 0., 0.]]
model_prior_stds = [[10.], [1., 1.], [1., 2., 0.25]]
Beispiel #14
0
                            verbose=True,
                            cores_per_task=1)
print('Example: Created the model object.')

# %% md
#
# Towards defining the sampling scheme
# The fire load density is assumed to be uniformly distributed between 50 :math:`MJ/m^2` and 450 :math:`MJ/m^2`.
# The yield strength is assumed to be normally distributed, with the parameters
# being: mean = 250 :math:`MPa` and coefficient of variation of :math:`7%`.
#
# Creating samples using MCS.

# %%

d_n = Normal(loc=50, scale=400)
d_u = Uniform(location=2.50e8, scale=1.75e7)
x_mcs = MonteCarloSampling(distributions=[d_n, d_u],
                           samples_number=100,
                           random_state=987979)

# %% md
#
# Running simulations using the previously defined model object and samples

# %%

sample_points = x_mcs.samples
abaqus_sfe_model.run(samples=sample_points)

# %% md
# Example of a multivariate distribution from joint independent marginals
# ------------------------------------------------------------------------

#%%
from UQpy.distributions import Normal, JointIndependent
from UQpy.distributions import Gumbel, JointCopula

#%% md
#
# Define a Copula
# ---------------
# The definition of bivariate distribution with a copula, is similar to defining a multivariate distribution from
# independent marginals. In both cases a list of marginals needs to be defined. In case of

#%%
marginals = [Normal(loc=0., scale=1), Normal(loc=0., scale=1)]
copula = Gumbel(theta=3.)

# dist_1 is a multivariate normal with independent marginals
dist_1 = JointIndependent(marginals)
print('Does the distribution with independent marginals have an rvs method?')
print(hasattr(dist_1, 'rvs'))

# dist_2 exhibits dependence between the two dimensions, defined using a gumbel copula
dist_2 = JointCopula(marginals=marginals, copula=copula)
print('Does the distribution with copula have an rvs method?')
print(hasattr(dist_2, 'rvs'))

#%% md
#
# Plot the pdf of the distribution before and after the copula
Beispiel #16
0
# %% md
#
# Step-by-step: continuous univariate distribution
# -------------------------------------------------
# First, we import UQpy's normal distribution class.

# %%
from UQpy.distributions import Normal

# %% md
#
# We'll start by constructing two identical standard normal distributions :code:`normal1` and :code:`normal2`

# %%

normal1 = normal2 = Normal()

# %% md
#
# Next, we'll construct a :code:`MonteCarloSampling` object :code:`mc` to generate random samples following those
# distributions. Here, we specify an optional initial number of samples, :code:`nsamples` to be generated at the
# object's construction. For teh purposes of this demonstration, we also supply a random seed :code:`random_state`.
#
# We access the generated samples via the :code:`samples` attribute.

# %%

mc = MonteCarloSampling(distributions=[normal1, normal2],
                        nsamples=5,
                        random_state=RandomState(123))
plt.title('data as histogram and true distribution to be estimated')
plt.show()

#%% md
#
# In a Bayesian setting, the definition of a prior pdf is a key point. The prior for the parameters must be defined in
# the model. Note that if no prior is given, an improper, uninformative, prior is chosen, :math:`p(\theta)=1` for all
# :math:`\theta`.

#%%

p0 = Uniform(loc=0., scale=15)
p1 = Lognormal(s=1., loc=0., scale=1.)
prior = JointIndependent(marginals=[p0, p1])

candidate_model = DistributionModel(distributions=Normal(loc=None, scale=None),
                                    n_parameters=2,
                                    prior=prior)

# Learn the unknown parameters using MCMC
from UQpy.sampling import MetropolisHastings

mh1 = MetropolisHastings(jump=10,
                         burn_length=10,
                         seed=[1.0, 0.2],
                         random_state=123)

bayes_estimator = BayesParameterEstimation(inference_model=candidate_model,
                                           data=data_1,
                                           nsamples=500,
                                           sampling_class=mh1)
import matplotlib.pyplot as plt
from UQpy.inference import DistributionModel, MLE
from UQpy.distributions import Normal
from UQpy.inference import MinimizeOptimizer
from UQpy.distributions import JointIndependent, JointCopula, Gumbel
from UQpy.sampling import ImportanceSampling

#%% md
#
# First data is generated from a true model. A distribution with copulas does not possess a fit method, thus sampling is
# performed using importance sampling/resampling.

#%%

# dist_true exhibits dependence between the two dimensions, defined using a gumbel copula
dist_true = JointCopula(marginals=[Normal(), Normal()],
                        copula=Gumbel(theta=2.))

# generate data using importance sampling: sample from a bivariate gaussian without copula, then weight samples
u = ImportanceSampling(
    proposal=JointIndependent(marginals=[Normal(), Normal()]),
    log_pdf_target=dist_true.log_pdf,
    nsamples=500)
print(u.samples.shape)
print(u.weights.shape)
# Resample to obtain 5,000 data points
u.resample(nsamples=5000)
data_2 = u.unweighted_samples
print('Shape of data: {}'.format(data_2.shape))

fig, ax = plt.subplots()
Beispiel #19
0
from UQpy.reliability import SubsetSimulation
from UQpy.run_model.RunModel import RunModel
from UQpy.sampling import ModifiedMetropolisHastings, Stretch
# First import the file that contains the newly defined Rosenbrock distribution
from local_Rosenbrock import Rosenbrock

# %% md
#
# :class:`.ModifiedMetropolisHastings` Initial Samples

# %%

m = PythonModel(model_script='local_Rosenbrock_pfn.py', model_object_name="RunPythonModel")
model = RunModel(model=m)
dist = Rosenbrock(p=100.)
dist_prop1 = Normal(loc=0, scale=1)
dist_prop2 = Normal(loc=0, scale=10)

x = stats.norm.rvs(loc=0, scale=1, size=(100, 2), random_state=83276)

mcmc_init1 = ModifiedMetropolisHastings(dimension=2, log_pdf_target=dist.log_pdf, seed=x.tolist(),
                                                           burn_length=1000, proposal=[dist_prop1, dist_prop2],
                                                           random_state=8765)
mcmc_init1.run(10000)

sampling=Stretch(log_pdf_target=dist.log_pdf, dimension=2, n_chains=1000, random_state=38546)
x_ss_MMH = SubsetSimulation(sampling=sampling, runmodel_object=model, conditional_probability=0.1,
                            nsamples_per_subset=10000, samples_init=mcmc_init1.samples)

for i in range(len(x_ss_MMH.performance_function_per_level)):
    plt.scatter(x_ss_MMH.samples[i][:, 0], x_ss_MMH.samples[i][:, 1], marker='o')
Beispiel #20
0
rstate = np.random.RandomState(123)
data_noisy = data_clean + rstate.randn(*data_clean.shape)

from scipy.stats import norm


def log_target(x, data, x_domain):
    log_target_value = np.zeros(x.shape[0])
    for i, xx in enumerate(x):
        h_xx = xx[0] * x_domain + xx[1] * x_domain**2
        log_target_value[i] = np.sum(
            [norm.logpdf(hxi - datai) for hxi, datai in zip(h_xx, data)])
    return log_target_value


proposal = JointIndependent([Normal(scale=0.1), Normal(scale=0.05)])

sampler = MetropolisHastings(nsamples=500,
                             dimension=2,
                             log_pdf_target=log_target,
                             burn_length=10,
                             jump=10,
                             n_chains=1,
                             args_target=(data_noisy, domain),
                             proposal=proposal)

print(sampler.samples.shape)
samples = sampler.samples

plt.plot(samples[:, 0], samples[:, 1], 'o', alpha=0.5)
plt.plot(1., 2., marker='x', color='orange')
Beispiel #21
0
# The probability of failure in this case is :math:`P_f ≈ 0.079` for :math:`\beta = 1.413`

# %%

# %% md
#
# Initially we have to import the necessary modules.

# %%

from UQpy.run_model.RunModel import RunModel
from UQpy.run_model.model_execution.PythonModel import PythonModel
from UQpy.distributions import Normal
from UQpy.reliability import FORM

dist1 = Normal(loc=20., scale=3.5)
dist2 = Normal(loc=5., scale=0.8)
dist3 = Normal(loc=4., scale=0.4)

model = PythonModel(
    model_script='pfn.py',
    model_object_name="example3",
)
RunModelObject3 = RunModel(model=model)

Z0 = FORM(distributions=[dist1, dist2, dist3], runmodel_object=RunModelObject3)
Z0.run()

print('Design point in standard normal space: %s' % Z0.DesignPoint_U)
print('Design point in original space: %s' % Z0.DesignPoint_X)
print('Hasofer-Lind reliability index: %s' % Z0.beta)
Beispiel #22
0
    ax.plot(np.sqrt(adaptive_covariance[:, 0, i, i]),
            label='dimension {}'.format(i))
ax.set_title('Adaptive proposal std. dev. in both dimensions')
ax.legend()
plt.show()

# %% md
# MMH: target pdf is given as a joint pdf
# ----------------------------------------
# The target pdf should be a 1 dimensional distribution or set of 1D distributions.

# %%

from UQpy.distributions import Normal

proposal = [Normal(), Normal()]
proposal_is_symmetric = [False, False]

x = ModifiedMetropolisHastings(dimension=2,
                               burn_length=500,
                               jump=50,
                               log_pdf_target=log_Rosenbrock,
                               proposal=proposal,
                               proposal_is_symmetric=proposal_is_symmetric,
                               n_chains=1,
                               nsamples=500)

fig, ax = plt.subplots()
ax.plot(x.samples[:, 0], x.samples[:, 1], linestyle='none', marker='.')

# %% md
Beispiel #23
0
# %%


def oakley_function(x):
    return 5 + x + np.cos(x)


# %% md
#
# Create a distribution object, generate samples and evaluate the function at the samples.

# %%

np.random.seed(1)

dist = Normal(loc=0, scale=2)
n_samples = 100
x = dist.rvs(n_samples)
y = oakley_function(x)

# %% md
#
# Create an object from the PCE class, construct a total-degree polynomial basis given a maximum polynomial degree, and
# compute the PCE coefficients using least squares regression.

# %%

max_degree = 8
polynomial_basis = TotalDegreeBasis(dist, max_degree)
least_squares = LeastSquareRegression()
pce_lstsq = PolynomialChaosExpansion(polynomial_basis=polynomial_basis,
Beispiel #24
0
#%%

# Generate data

param_true = np.array([1.0, 2.0]).reshape((1, -1))
print('Shape of true parameter vector: {}'.format(param_true.shape))

model = PythonModel(model_script='local_pfn_models.py', model_object_name='model_quadratic', delete_files=True,
                    var_names=['theta_0', 'theta_1'])
h_func = RunModel(model=model)
h_func.run(samples=param_true)

# Add noise
error_covariance = 1.
data_clean = np.array(h_func.qoi_list[0])
noise = Normal(loc=0., scale=np.sqrt(error_covariance)).rvs(nsamples=50).reshape((50,))
data_3 = data_clean + noise
print('Shape of data: {}'.format(data_3.shape))


#%% md
#
# Then we create an instance of the Model class, using model_type='python', and we perform maximum likelihood estimation
# of the two parameters.

#%%

candidate_model = ComputationalModel(n_parameters=2, runmodel_object=h_func, error_covariance=error_covariance)

optimizer = MinimizeOptimizer(method='nelder-mead')
ml_estimator = MLE(inference_model=candidate_model, data=data_3, n_optimizations=1)
Beispiel #25
0
from UQpy.sampling import MonteCarloSampling, AdaptiveKriging
from UQpy.run_model.RunModel import RunModel
from UQpy.distributions import Normal
from local_series import series
import matplotlib.pyplot as plt
import time
from UQpy.utilities.MinimizeOptimizer import MinimizeOptimizer

# %% md
#
# Using UQpy :class:`.MonteCarloSampling` class to generate samples for two random variables, which are normally
# distributed with mean :math:`0` and variance :math:`1`.

# %%

marginals = [Normal(loc=0., scale=4.), Normal(loc=0., scale=4.)]
x = MonteCarloSampling(distributions=marginals, nsamples=20, random_state=1)

# %% md
#
# RunModel class is used to define an object to evaluate the model at sample points.

# %%

model = PythonModel(model_script='local_series.py', model_object_name='series')
rmodel = RunModel(model=model)

# %% md
#
# :class:`.Kriging` class defines an object to generate a surrogate model for a given set of data.
Beispiel #26
0
pick_model = 'all'

# %% md
#
# Example 1: Three scalar random variables
# ----------------------------------------------------
# In this example, we pass three scalar random variables. Note that this is different from assigning a single variable
# with three components, which will be handled in the following example.
#
# Here we will pass the samples both as an ndarray and as a list. Recall that UQpy converts all samples into an ndarray
# of at least two dimensions internally.

# %%

if pick_model in {'scalar', 'vector', 'all'}:
    d = Normal(loc=0, scale=1)
    x_mcs = MonteCarloSampling(distributions=[d, d, d],
                               nsamples=5,
                               random_state=987979)
    names = ['var1', 'var11', 'var111']

    # UQpy returns samples as an ndarray. Convert them to a list for part 1.2
    x_mcs_list = list(x_mcs.samples)
    print(
        "Monte Carlo samples of three random variables from a standard normal distribution."
    )
    print('Samples stored as an array:')
    print('Data type:', type(x_mcs.samples))
    print('Number of samples:', len(x_mcs.samples))
    print('Dimensions of samples:', np.shape(x_mcs.samples))
    print('Samples')
Beispiel #27
0
# Generate data
param_true = np.array([1.0, 2.0]).reshape((1, -1))
print('Shape of true parameter vector: {}'.format(param_true.shape))

model = PythonModel(model_script='local_pfn_models.py',
                    model_object_name='model_quadratic',
                    var_names=['theta_0', 'theta_1'])
h_func = RunModel(model=model)
h_func.run(samples=param_true)
data_clean = np.array(h_func.qoi_list[0])
print(data_clean.shape)

# Add noise, use a RandomState for reproducible results
error_covariance = 1.
noise = Normal(loc=0.,
               scale=np.sqrt(error_covariance)).rvs(nsamples=50,
                                                    random_state=123).reshape(
                                                        (50, ))
data_3 = data_clean + noise
print('Shape of data: {}'.format(data_3.shape))
print(data_3[:4])

p0 = Normal()
p1 = Normal()
prior = JointIndependent(marginals=[p0, p1])

inference_model = ComputationalModel(n_parameters=2,
                                     runmodel_object=h_func,
                                     error_covariance=error_covariance,
                                     prior=prior)

proposal = JointIndependent([Normal(scale=0.1), Normal(scale=0.05)])
param_true = np.array([1.0, 2.0]).reshape((1, -1))
print('Shape of true parameter vector: {}'.format(param_true.shape))

model = PythonModel(model_script='local_pfn_models.py',
                    model_object_name='model_quadratic',
                    delete_files=True,
                    var_names=['theta_0', 'theta_1'])
h_func = RunModel(model=model)
h_func.run(samples=param_true)

# Add noise
error_covariance = 1.
data_clean = np.array(h_func.qoi_list[0])
noise = Normal(loc=0.,
               scale=np.sqrt(error_covariance)).rvs(nsamples=50).reshape(
                   (50, ))
data_3 = data_clean + noise
print('Shape of data: {}'.format(data_3.shape))

inference_model = ComputationalModel(n_parameters=2,
                                     runmodel_object=h_func,
                                     error_covariance=error_covariance)

sampling = ImportanceSampling(proposal=JointIndependent([Normal(scale=2, )] *
                                                        2))
bayes_estimator =\
    BayesParameterEstimation(inference_model=inference_model,
                             data=data_3,
                             sampling_class=sampling,
                             nsamples=5000)
Beispiel #29
0
print(seed)
print(x.samples[0, :, :])

# %% md
#
# The algorithm-specific parameters for MetropolisHastings are proposal and proposal_is_symmetric
# -------------------------------------------------------------------------------------------------
# The default proposal is standard normal (symmetric).

# %%

# Define a few proposals to try out
from UQpy.distributions import JointIndependent, Normal, Uniform

proposals = [
    JointIndependent([Normal(), Normal()]),
    JointIndependent(
        [Uniform(loc=-0.5, scale=1.5),
         Uniform(loc=-0.5, scale=1.5)]),
    Normal()
]

proposals_is_symmetric = [True, False, False]

fig, ax = plt.subplots(ncols=3, figsize=(16, 4))
for i, (proposal, symm) in enumerate(zip(proposals, proposals_is_symmetric)):
    print(i)
    try:
        x = MetropolisHastings(dimension=2,
                               burn_length=500,
                               jump=100,