Пример #1
0
def test_regression_model():
    param_true = np.array([1.0, 2.0]).reshape((1, -1))
    from UQpy.run_model.model_execution.PythonModel import PythonModel
    model = PythonModel(model_script='pfn_models.py',
                        model_object_name='model_quadratic',
                        var_names=['theta_0', 'theta_1'])
    h_func = RunModel(model=model)
    h_func.run(samples=param_true)

    # Add noise
    error_covariance = 1.
    data_clean = np.array(h_func.qoi_list[0])
    noise = Normal(loc=0., scale=np.sqrt(error_covariance)).rvs(
        nsamples=4, random_state=1).reshape((4, ))
    data_3 = data_clean + noise

    candidate_model = ComputationalModel(n_parameters=2,
                                         runmodel_object=h_func,
                                         error_covariance=error_covariance)

    optimizer = MinimizeOptimizer(method='nelder-mead')
    ml_estimator = MLE(inference_model=candidate_model,
                       data=data_3,
                       n_optimizations=1,
                       random_state=1,
                       optimizer=optimizer)

    assert ml_estimator.mle[0] == 0.8689097631871134
    assert ml_estimator.mle[1] == 2.0030767805841143
Пример #2
0
def test_akmcs_expected_feasibility():
    from UQpy.surrogates.kriging.regression_models.LinearRegression import LinearRegression
    from UQpy.surrogates.kriging.correlation_models.ExponentialCorrelation import ExponentialCorrelation

    marginals = [Normal(loc=0., scale=4.), Normal(loc=0., scale=4.)]
    x = MonteCarloSampling(distributions=marginals,
                           nsamples=20,
                           random_state=1)
    model = PythonModel(model_script='series.py', model_object_name="series")
    rmodel = RunModel(model=model)
    regression_model = LinearRegression()
    correlation_model = ExponentialCorrelation()
    K = Kriging(
        regression_model=regression_model,
        correlation_model=correlation_model,
        optimizations_number=10,
        correlation_model_parameters=[1, 1],
        optimizer=MinimizeOptimizer('l-bfgs-b'),
    )
    # OPTIONS: 'U', 'EFF', 'Weighted-U'
    learning_function = ExpectedFeasibility(eff_a=0,
                                            eff_epsilon=2,
                                            eff_stop=0.001)
    a = AdaptiveKriging(distributions=marginals,
                        runmodel_object=rmodel,
                        surrogate=K,
                        learning_nsamples=10**3,
                        n_add=1,
                        learning_function=learning_function,
                        random_state=2)
    a.run(nsamples=25, samples=x.samples)

    assert a.samples[23, 0] == 1.366058523912817
    assert a.samples[20, 1] == -12.914668932772358
Пример #3
0
def test_akmcs_samples_error():
    from UQpy.surrogates.kriging.regression_models.LinearRegression import LinearRegression
    from UQpy.surrogates.kriging.correlation_models.ExponentialCorrelation import ExponentialCorrelation

    marginals = [Normal(loc=0., scale=4.), Normal(loc=0., scale=4.)]
    x = MonteCarloSampling(distributions=marginals,
                           nsamples=20,
                           random_state=0)
    model = PythonModel(model_script='series.py', model_object_name="series")
    rmodel = RunModel(model=model)
    regression_model = LinearRegression()
    correlation_model = ExponentialCorrelation()
    K = Kriging(regression_model=regression_model,
                correlation_model=correlation_model,
                optimizer=MinimizeOptimizer('l-bfgs-b'),
                optimizations_number=10,
                correlation_model_parameters=[1, 1],
                random_state=1)
    # OPTIONS: 'U', 'EFF', 'Weighted-U'
    learning_function = WeightedUFunction(weighted_u_stop=2)
    with pytest.raises(NotImplementedError):
        a = AdaptiveKriging(distributions=[Normal(loc=0., scale=4.)] * 3,
                            runmodel_object=rmodel,
                            surrogate=K,
                            learning_nsamples=10**3,
                            n_add=1,
                            learning_function=learning_function,
                            random_state=2,
                            samples=x.samples)
Пример #4
0
def test_akmcs_u():
    from UQpy.surrogates.kriging.regression_models.LinearRegression import LinearRegression
    from UQpy.surrogates.kriging.correlation_models.ExponentialCorrelation import ExponentialCorrelation

    marginals = [Normal(loc=0., scale=4.), Normal(loc=0., scale=4.)]
    x = MonteCarloSampling(distributions=marginals,
                           nsamples=20,
                           random_state=1)
    model = PythonModel(model_script='series.py', model_object_name="series")
    rmodel = RunModel(model=model)
    regression_model = LinearRegression()
    correlation_model = ExponentialCorrelation()
    K = Kriging(regression_model=regression_model,
                correlation_model=correlation_model,
                optimizer=MinimizeOptimizer('l-bfgs-b'),
                optimizations_number=10,
                correlation_model_parameters=[1, 1],
                random_state=0)
    # OPTIONS: 'U', 'EFF', 'Weighted-U'
    learning_function = UFunction(u_stop=2)
    a = AdaptiveKriging(distributions=marginals,
                        runmodel_object=rmodel,
                        surrogate=K,
                        learning_nsamples=10**3,
                        n_add=1,
                        learning_function=learning_function,
                        random_state=2)
    a.run(nsamples=25, samples=x.samples)

    assert a.samples[23, 0] == -4.141979058326188
    assert a.samples[20, 1] == -1.6476534435429009
Пример #5
0
def test_akmcs_expected_improvement_global_fit():
    from UQpy.surrogates.kriging.regression_models.LinearRegression import LinearRegression
    from UQpy.surrogates.kriging.correlation_models.ExponentialCorrelation import ExponentialCorrelation

    marginals = [Normal(loc=0., scale=4.), Normal(loc=0., scale=4.)]
    x = MonteCarloSampling(distributions=marginals,
                           nsamples=20,
                           random_state=1)
    model = PythonModel(model_script='series.py', model_object_name="series")
    rmodel = RunModel(model=model)
    regression_model = LinearRegression()
    correlation_model = ExponentialCorrelation()
    K = Kriging(
        regression_model=regression_model,
        correlation_model=correlation_model,
        optimizations_number=10,
        correlation_model_parameters=[1, 1],
        optimizer=MinimizeOptimizer('l-bfgs-b'),
    )
    # OPTIONS: 'U', 'EFF', 'Weighted-U'
    learning_function = ExpectedImprovementGlobalFit()
    a = AdaptiveKriging(distributions=marginals,
                        runmodel_object=rmodel,
                        surrogate=K,
                        learning_nsamples=10**3,
                        n_add=1,
                        learning_function=learning_function,
                        random_state=2)
    a.run(nsamples=25, samples=x.samples)

    assert a.samples[23, 0] == 11.939859785098493
    assert a.samples[20, 1] == -8.429899469300118
Пример #6
0
def test_subset():  # Define the structural problem
    n_variables = 2
    model = 'pfn5.py'
    Example = 'Example1'

    omega = 6
    epsilon = 0.01
    mu_m = 5
    sigma_m = 1
    mu_k = 125
    sigma_k = 25
    m = np.linspace(mu_m - 3 * sigma_m, mu_m + 3 * sigma_m, 101)
    d_m = Normal(loc=mu_m, scale=sigma_m)
    d_k = Normal(loc=mu_k, scale=sigma_k)
    dist_nominal = JointIndependent(marginals=[d_m, d_k])

    from UQpy.sampling import Stretch

    n_samples_set = 1000
    p_cond = 0.1
    n_chains = int(n_samples_set * p_cond)

    mc = MonteCarloSampling(distributions=dist_nominal,
                            nsamples=n_samples_set,
                            random_state=1)

    init_sus_samples = mc.samples
    from UQpy.run_model.RunModel import RunModel
    from UQpy.run_model.model_execution.PythonModel import PythonModel

    model = PythonModel(model_script=model, model_object_name=Example)
    RunModelObject_SuS = RunModel(model=model)

    sampling = Stretch(pdf_target=dist_nominal.pdf,
                       dimension=2,
                       n_chains=1000,
                       random_state=0)

    SuS_object = SubsetSimulation(sampling=sampling,
                                  runmodel_object=RunModelObject_SuS,
                                  conditional_probability=p_cond,
                                  nsamples_per_subset=n_samples_set,
                                  samples_init=init_sus_samples)

    print(SuS_object.failure_probability)
    assert SuS_object.failure_probability == 3.1200000000000006e-05
Пример #7
0
def test_form_example():
    path = os.path.abspath(os.path.dirname(__file__))
    os.chdir(path)
    model = PythonModel(model_script='pfn3.py',
                        model_object_name='example1',
                        delete_files=True)
    RunModelObject = RunModel(model=model)
    dist1 = Normal(loc=200., scale=20.)
    dist2 = Normal(loc=150, scale=10.)
    Q = FORM(distributions=[dist1, dist2],
             runmodel_object=RunModelObject,
             tol1=1e-5,
             tol2=1e-5)
    Q.run()

    # print results
    np.allclose(Q.DesignPoint_U, np.array([-2., 1.]))
    np.allclose(Q.DesignPoint_X, np.array([160., 160.]))
    assert Q.beta[0] == 2.236067977499917
    assert Q.failure_probability[0] == 0.012673659338729965
    np.allclose(Q.dg_u_record, np.array([0., 0.]))
Пример #8
0
fig, ax = plt.subplots()
CS = ax.contour(X, Y, Z, 15)
plt.plot(m, k_hi, 'k')
plt.plot(m, k_lo, 'k')
# plt.fill_between(m,k_lo,k_hi)
plt.xlim([3.5, 4.5])
plt.ylim([130, 150])
plt.xlabel(r'Mass ($m$)')
plt.ylabel(r'Stiffness ($k$)')
plt.grid(True)
plt.tight_layout()
plt.show()

m = PythonModel(model_script='local_Resonance_pfn.py',
                model_object_name="RunPythonModel")
model = RunModel(model=m)

# %% md
#
# Monte Carlo Simulation

# %%

x_mcs = MonteCarloSampling(distributions=[d1, d2])
x_mcs.run(nsamples=1000000)

model.run(samples=x_mcs.samples)

A = np.asarray(model.qoi_list) < 0
pf = np.shape(np.asarray(
    model.qoi_list)[np.asarray(model.qoi_list) < 0])[0] / 1000000
#%%

fig = voronoi_plot_2d(x.strata_object.voronoi)
plt.title('Stratified Samples (U(0,1)) - Voronoi Stratification')
plt.plot(x.samples[:, 0], x.samples[:, 1], 'dm')
plt.ylim(0, 1)
plt.xlim(0, 1)
plt.show()

#%% md
#
# :class:`.RunModel` class is used to define an object to evaluate the model at sample points.

#%%

rmodel = RunModel(model_script='local_python_model_function.py')

#%% md
#
# This figure shows the actual function defined in python model script.

#%%

rmodel1 = RunModel(model_script='local_python_model_function.py')
rmodel1.run(samples=x.samples)

num = 50
x1 = np.linspace(0, 1, num)
x2 = np.linspace(0, 1, num)
x1v, x2v = np.meshgrid(x1, x2)
y_act = np.zeros([num, num])
Пример #10
0
x = TrueStratifiedSampling(distributions=marginals,
                           strata_object=strata,
                           nsamples_per_stratum=1,
                           random_state=1)

# %% md
#
# RunModel is used to evaluate function values at sample points. Model is defined as a function in python file
# 'python_model_function.py'.

# %%

model = PythonModel(model_script='local_python_model_function.py',
                    model_object_name="y_func")
rmodel = RunModel(model=model)

rmodel.run(samples=x.samples)

# %% md
#
# Using UQpy GaussianProcessRegression class to generate a surrogate for generated data. In this illustration, Quadratic regression model and
# Exponential correlation model are used.

# %%

regression_model = ConstantRegression()
kernel = Matern(nu=0.5)

from UQpy.utilities.MinimizeOptimizer import MinimizeOptimizer
Пример #11
0
# :math:`g(U) = -\frac{1}{\sqrt{d}}\sum_{i=1}^{d} u_i + \beta`
#
# The probability of failure in this case is :math:`P(F) ≈ 10^{−3}` for :math:`β = 3.0902`
#
# Initially we have to import the necessary modules.

#%%

import shutil

from UQpy.run_model.RunModel import RunModel
from UQpy.run_model.model_execution.PythonModel import PythonModel
from UQpy.distributions import Normal
from UQpy.reliability import FORM

dist1 = Normal(loc=0., scale=1.)
dist2 = Normal(loc=0., scale=1.)

model = PythonModel(model_script='pfn.py', model_object_name="example2")
RunModelObject2 = RunModel(model=model)

Z = FORM(distributions=[dist1, dist2], runmodel_object=RunModelObject2)
Z.run()

# print results
print('Design point in standard normal space: %s' % Z.DesignPoint_U)
print('Design point in original space: %s' % Z.DesignPoint_X)
print('Hasofer-Lind reliability index: %s' % Z.beta)
print('FORM probability of failure: %s' % Z.failure_probability)

Пример #12
0
# %% md
#
# 1.1 Pass samples as ndarray, Python class called, serial execution
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# This examples uses the following files:
# - model_script = python_model.py

# %%

if pick_model in {'scalar', 'all'}:
    # Call to RunModel - Here we run the model while instantiating the RunModel object.
    t = time.time()
    m11 = RunModel(ntasks=1,
                   model_script='python_model.py',
                   model_object_name='SumRVs',
                   model_dir='Python_Runs',
                   verbose=True)
    m11.run(samples=x_mcs.samples, )
    t_ser_python = time.time() - t
    print("\nTime for serial execution:")
    print(t_ser_python)
    print()
    print("The values returned from the Matlab simulation:")
    print(m11.qoi_list)

# %% md
#
# 1.2 Pass samples as list, Python function called, parallel execution
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# This examples uses the following files:
Пример #13
0
# Import this newly defined Rosenbrock distribution into the Distributions module
from UQpy.distributions import Normal
from UQpy.reliability import SubsetSimulation
from UQpy.run_model.RunModel import RunModel
from UQpy.sampling import ModifiedMetropolisHastings, Stretch
# First import the file that contains the newly defined Rosenbrock distribution
from local_Rosenbrock import Rosenbrock

# %% md
#
# :class:`.ModifiedMetropolisHastings` Initial Samples

# %%

m = PythonModel(model_script='local_Rosenbrock_pfn.py', model_object_name="RunPythonModel")
model = RunModel(model=m)
dist = Rosenbrock(p=100.)
dist_prop1 = Normal(loc=0, scale=1)
dist_prop2 = Normal(loc=0, scale=10)

x = stats.norm.rvs(loc=0, scale=1, size=(100, 2), random_state=83276)

mcmc_init1 = ModifiedMetropolisHastings(dimension=2, log_pdf_target=dist.log_pdf, seed=x.tolist(),
                                                           burn_length=1000, proposal=[dist_prop1, dist_prop2],
                                                           random_state=8765)
mcmc_init1.run(10000)

sampling=Stretch(log_pdf_target=dist.log_pdf, dimension=2, n_chains=1000, random_state=38546)
x_ss_MMH = SubsetSimulation(sampling=sampling, runmodel_object=model, conditional_probability=0.1,
                            nsamples_per_subset=10000, samples_init=mcmc_init1.samples)
Пример #14
0
def setup():
    model = PythonModel(model_script='pfn1.py',
                        model_object_name='model_i',
                        delete_files=True)
    h_func = RunModel(model=model)
    yield h_func
Пример #15
0
def setup():
    model = PythonModel(model_script='pfn.py', model_object_name='gfun_sensitivity', delete_files=True,
                        a_values=[0.001, 99.], var_names=['X{}'.format(i) for i in range(2)])
    runmodel_object = RunModel(model=model)
    dist_object = [Uniform(), Uniform()]
    yield runmodel_object, dist_object
Пример #16
0
plt.legend(['SROM Approximation', 'Gamma CDF'],
           loc=5,
           prop={'size': 12},
           bbox_to_anchor=(1, 0.75))
plt.show()

# %% md
#
# Run the model 'eigenvalue_model.py' for each sample generated through :class:`.TrueStratifiedSampling` class. This
# model defines the stiffness matrix corresponding to each sample and estimate the eigenvalues of the matrix.

# %%

m = PythonModel(model_script='local_eigenvalue_model.py',
                model_object_name="RunPythonModel")
model = RunModel(model=m)
# model = RunModel(model_script='local_eigenvalue_model.py')
model.run(samples=y.samples)
r_srom = model.qoi_list

# %% md
#
# :class:`MonteCarloSampling` class is used to generate 1000 samples.

# %%

x_mcs = MonteCarloSampling(distributions=marginals, nsamples=1000)

# %% md
#
# Run the model 'eigenvalue_model.py' for each sample generated through :class:`.MonteCarloSampling` class.
Пример #17
0
from scipy.stats import multivariate_normal, norm

#%% md
#
# Generate data from a quadratic function

#%%

param_true = np.array([1.0, 2.0]).reshape(1, -1)
var_n = 1
error_covariance = var_n * np.eye(50)
print(param_true.shape)

z = RunModel(samples=param_true,
             model_script='local_pfn_models.py',
             model_object_name='model_quadratic',
             vec=False,
             var_names=['theta_1', 'theta_2'])
data_clean = z.qoi_list[0].reshape((-1, ))
data = data_clean + Normal(scale=np.sqrt(var_n)).rvs(nsamples=data_clean.size,
                                                     random_state=456).reshape(
                                                         (-1, ))

#%% md
#
# Define the models, compute the true values of the evidence.
#
# For all three models, a Gaussian prior is chosen for the parameters, with mean and covariance matrix of the
# appropriate dimensions. Each model is given prior probability :math:`P(m_{j}) = 1/3`.

#%%
Пример #18
0
def setup():
    path = os.path.abspath(os.path.dirname(__file__))
    os.chdir(path)
    model = PythonModel(model_script='pfn4.py', model_object_name='model_k', delete_files=True)
    h_func = RunModel(model=model)
    yield h_func
Пример #19
0
# Using UQpy :class:`.MonteCarloSampling` class to generate samples for two random variables, which are normally
# distributed with mean :math:`0` and variance :math:`1`.

# %%

marginals = [Normal(loc=0., scale=4.), Normal(loc=0., scale=4.)]
x = MonteCarloSampling(distributions=marginals, nsamples=20, random_state=1)

# %% md
#
# RunModel class is used to define an object to evaluate the model at sample points.

# %%

model = PythonModel(model_script='local_series.py', model_object_name='series')
rmodel = RunModel(model=model)

# %% md
#
# :class:`.Kriging` class defines an object to generate a surrogate model for a given set of data.

# %%

from UQpy.surrogates.gaussian_process.regression_models import LinearRegression
from UQpy.surrogates.gaussian_process.kernels import RBF

bounds = [[10**(-3), 10**3], [10**(-3), 10**2], [10**(-3), 10**2]]
optimizer = MinimizeOptimizer(method="L-BFGS-B", bounds=bounds)
K = GaussianProcessRegression(regression_model=LinearRegression(),
                              kernel=RBF(),
                              optimizer=optimizer,
Пример #20
0
# %%

x = MonteCarloSampling(distributions=[dist1, dist2, dist3] * 6,
                       samples_number=5,
                       random_state=938475)
samples = np.array(x.samples).round(2)

# %% md
#
# Run the model.

# %%

names_ = [
    'fc1', 'fy1', 'Es1', 'fc2', 'fy2', 'Es2', 'fc3', 'fy3', 'Es3', 'fc4',
    'fy4', 'Es4', 'fc5', 'fy5', 'Es5', 'fc6', 'fy6', 'Es6'
]

opensees_rc6_model = RunModel(samples=samples,
                              ntasks=5,
                              model_script='opensees_model.py',
                              input_template='import_variables.tcl',
                              var_names=names_,
                              model_object_name="opensees_run",
                              output_script='process_opensees_output.py',
                              output_object_name='read_output')

outputs = opensees_rc6_model.qoi_list
print(outputs)
Пример #21
0
import shutil

import numpy as np
from UQpy.run_model.RunModel import RunModel
from UQpy.run_model.model_execution.PythonModel import PythonModel
from UQpy.distributions import Normal
from UQpy.reliability import FORM
from UQpy.reliability import SORM
from UQpy.distributions import Lognormal

m0 = 7
v0 = 1.4
mu = np.log(m0) - np.log(np.sqrt(1 + (v0 / m0) ** 2))
scale = np.exp(mu)
s = np.sqrt(np.log(1 + (v0 / m0) ** 2))
loc_ = 0.0

dist1 = Normal(loc=20., scale=2)
dist2 = Lognormal(s=s, loc=0.0, scale=scale)
model = PythonModel(model_script='pfn.py', model_object_name="example4",)
RunModelObject4 = RunModel(model=model)
form = FORM(distributions=[dist1, dist2], runmodel_object=RunModelObject4)
form.run()
Q0 = SORM(form_object=form)


# print results
print('SORM probability of failure: %s' % Q0.failure_probability)

Пример #22
0
#
# 1.1 Pass sampled as ndarray, specify format in generated input file, serial execution
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# This examples uses the following files:
#
# - model_script = matlab_model_sum_scalar.py
# - input_template = sum_scalar.m
# - output_script = process_matlab_output.py

# %%

if pick_model == 'scalar' or pick_model == 'all':
    # Call to RunModel - Here we run the model while instantiating the RunModel object.
    t = time.time()
    m = RunModel(ntasks=1, model_script='matlab_model_sum_scalar.py',
                 input_template='sum_scalar.m', var_names=names, model_object_name="matlab",
                 output_script='process_matlab_output.py', output_object_name='read_output',
                 resume=False, model_dir='Matlab_Model', fmt="{:>10.4f}", verbose=True)
    m.run(x_mcs.samples)
    t_ser_matlab = time.time() - t
    print("\nTime for serial execution:")
    print(t_ser_matlab)
    print()
    print("The values returned from the Matlab simulation:")
    print(m.qoi_list)


# %% md
#
# 1.2 Samples passed as list, no format specification, parallel execution
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# This examples uses the following files:
Пример #23
0
from UQpy.run_model.model_execution.PythonModel import PythonModel
from UQpy.distributions import Uniform
from UQpy.sensitivity import MorrisSensitivity
import matplotlib.pyplot as plt

#%% md
#
# Set-up problem with g-function.

#%%

model = PythonModel(model_script='local_pfn.py',
                    model_object_name='fun2_sensitivity',
                    delete_files=True,
                    var_names=['X{}'.format(i) for i in range(5)])
runmodel_object = RunModel(model=model)

dist_object = [
    Uniform(),
] * 5

sens = MorrisSensitivity(runmodel_object=runmodel_object,
                         distributions=dist_object,
                         n_levels=20,
                         maximize_dispersion=True)
sens.run(n_trajectories=10)

fig, ax = plt.subplots(figsize=(5, 3.5))
ax.scatter(sens.mustar_indices, sens.sigma_indices, s=60)
for i, (mu, sig) in enumerate(zip(sens.mustar_indices, sens.sigma_indices)):
    ax.text(x=mu + 0.01, y=sig + 0.01, s='X{}'.format(i + 1), fontsize=14)
Пример #24
0
x = TrueStratifiedSampling(distributions=marginals,
                           strata_object=strata,
                           nsamples_per_stratum=1,
                           random_state=2)

# %% md
#
# RunModel is used to evaluate function values at sample points. Model is defined as a function in python file
# 'python_model_function.py'.

# %%

model = PythonModel(model_script='local_python_model_1Dfunction.py',
                    model_object_name='y_func',
                    delete_files=True)
rmodel = RunModel(model=model)
rmodel.run(samples=x.samples)

from UQpy.surrogates.gaussian_process.regression_models import LinearRegression
from UQpy.utilities.MinimizeOptimizer import MinimizeOptimizer

bounds = [[10**(-3), 10**3], [10**(-3), 10**2]]
optimizer = MinimizeOptimizer(method='L-BFGS-B', bounds=bounds)

K = GaussianProcessRegression(regression_model=LinearRegression(),
                              kernel=RBF(),
                              optimizer=optimizer,
                              optimizations_number=20,
                              hyperparameters=[1, 0.1],
                              random_state=2)
K.fit(samples=x.samples, values=rmodel.qoi_list)
Пример #25
0
# distributed

# %%

marginals = [Uniform(loc=-5, scale=15), Uniform(loc=0, scale=15)]
x = MonteCarloSampling(distributions=marginals, nsamples=20)

# %% md
#
# :class:`.RunModel` class is used to define an object to evaluate the model at sample points.

# %%

model = PythonModel(model_script='local_BraninHoo.py',
                    model_object_name='function')
rmodel = RunModel(model=model)

# %% md
#
# :class:`.Kriging` class defines an object to generate a surrogate model for a given set of data.

# %%

from UQpy.surrogates.gaussian_process.regression_models import LinearRegression
from UQpy.surrogates.gaussian_process.kernels import RBF

bounds = [[10**(-3), 10**3], [10**(-3), 10**2], [10**(-3), 10**2]]
optimizer = MinimizeOptimizer(method="L-BFGS-B", bounds=bounds)
K = GaussianProcessRegression(regression_model=LinearRegression(),
                              kernel=RBF(),
                              optimizer=optimizer,
Пример #26
0
fig1 = x.strata_object.plot_2d()
plt.title("STS samples U(0,1) and space stratification")
plt.plot(x.samples[:16, 0], x.samples[:16, 1], 'ro')
plt.xlim([0, 1])
plt.ylim([0, 1])
plt.show()

#%% md
#
# RunModel class is used to estimate the function value at sample points generated using
# :class:`.TrueStratifiedSampling` class.

#%%

rmodel = RunModel(model_script='local_python_model_function.py', vec=False)

#%% md
#
# This figure shows the actual function defined in python model script.

#%%

rmodel1 = RunModel(model_script='local_python_model_function.py', vec=False)
rmodel1.run(samples=x.samples)
num = 100
x1 = np.linspace(0, 1, num)
x2 = np.linspace(0, 1, num)
x1v, x2v = np.meshgrid(x1, x2)
y_act = np.zeros([num, num])
r1model = RunModel(model_script='local_python_model_function.py')
Пример #27
0
from UQpy.run_model.RunModel import RunModel

#%% md
#
# First we generate synthetic data, and add some noise to it.

#%%

# Generate data

param_true = np.array([1.0, 2.0]).reshape((1, -1))
print('Shape of true parameter vector: {}'.format(param_true.shape))

model = PythonModel(model_script='local_pfn_models.py', model_object_name='model_quadratic', delete_files=True,
                    var_names=['theta_0', 'theta_1'])
h_func = RunModel(model=model)
h_func.run(samples=param_true)

# Add noise
error_covariance = 1.
data_clean = np.array(h_func.qoi_list[0])
noise = Normal(loc=0., scale=np.sqrt(error_covariance)).rvs(nsamples=50).reshape((50,))
data_3 = data_clean + noise
print('Shape of data: {}'.format(data_3.shape))


#%% md
#
# Then we create an instance of the Model class, using model_type='python', and we perform maximum likelihood estimation
# of the two parameters.
Пример #28
0
d6 = Uniform(location=0.02, scale=0.01)
d7 = Uniform(location=0.02, scale=0.01)
d8 = Uniform(location=0.0025, scale=0.0075)

# %% md
#
# Draw the samples using MCS.

# %%

x = MonteCarloSampling(distributions=[d1, d2, d3, d4, d5, d6, d7, d8],
                       samples_number=12,
                       random_state=349875)

# %% md
#
# Run the model.

# %%

run_ = RunModel(samples=x.samples,
                ntasks=6,
                model_script='dyna_script.py',
                input_template='dyna_input.k',
                var_names=['x0', 'y0', 'z0', 'R0', 'x1', 'y1', 'z1', 'R1'],
                model_dir='dyna_test',
                cluster=True,
                verbose=False,
                fmt='{:>10.4f}',
                cores_per_task=12)
Пример #29
0
# %%

# %% md
#
# Initially we have to import the necessary modules.

# %%

from UQpy.run_model.RunModel import RunModel
from UQpy.run_model.model_execution.PythonModel import PythonModel
from UQpy.distributions import Normal
from UQpy.reliability import FORM

dist1 = Normal(loc=20., scale=3.5)
dist2 = Normal(loc=5., scale=0.8)
dist3 = Normal(loc=4., scale=0.4)

model = PythonModel(
    model_script='pfn.py',
    model_object_name="example3",
)
RunModelObject3 = RunModel(model=model)

Z0 = FORM(distributions=[dist1, dist2, dist3], runmodel_object=RunModelObject3)
Z0.run()

print('Design point in standard normal space: %s' % Z0.DesignPoint_U)
print('Design point in original space: %s' % Z0.DesignPoint_X)
print('Hasofer-Lind reliability index: %s' % Z0.beta)
print('FORM probability of failure: %s' % Z0.failure_probability)
Пример #30
0
# %% md
#
# Set-up problem with g-function.

# %%

a_values = [0.001, 99.]
na = len(a_values)

model = PythonModel(model_script='local_pfn.py',
                    model_object_name='gfun_sensitivity',
                    delete_files=True,
                    a_values=a_values,
                    var_names=['X{}'.format(i) for i in range(na)])
runmodel_object = RunModel(model=model)

dist_object = [
    Uniform(),
] * na

# %% md
#
# First plot contour of the function, clearly X2 has little influence on the function compared to X1.

# %%

x = np.arange(0, 1, 0.02)
y = np.arange(0, 1, 0.02)
xx, yy = np.meshgrid(x, y, sparse=False)
runmodel_object.run(samples=np.vstack([xx.reshape((-1, )),