# # Using UQpy :class:`MonteCarloSampling` class to generate samples for two random variables, which are uniformly # distributed # %% marginals = [Uniform(loc=-5, scale=15), Uniform(loc=0, scale=15)] x = MonteCarloSampling(distributions=marginals, nsamples=20) # %% md # # :class:`.RunModel` class is used to define an object to evaluate the model at sample points. # %% model = PythonModel(model_script='local_BraninHoo.py', model_object_name='function') rmodel = RunModel(model=model) # %% md # # :class:`.Kriging` class defines an object to generate a surrogate model for a given set of data. # %% from UQpy.surrogates.gaussian_process.regression_models import LinearRegression from UQpy.surrogates.gaussian_process.kernels import RBF bounds = [[10**(-3), 10**3], [10**(-3), 10**2], [10**(-3), 10**2]] optimizer = MinimizeOptimizer(method="L-BFGS-B", bounds=bounds) K = GaussianProcessRegression(regression_model=LinearRegression(), kernel=RBF(),
from UQpy.distributions import Normal from UQpy.inference import MinimizeOptimizer from UQpy.run_model.RunModel import RunModel #%% md # # First we generate synthetic data, and add some noise to it. #%% # Generate data param_true = np.array([1.0, 2.0]).reshape((1, -1)) print('Shape of true parameter vector: {}'.format(param_true.shape)) model = PythonModel(model_script='local_pfn_models.py', model_object_name='model_quadratic', delete_files=True, var_names=['theta_0', 'theta_1']) h_func = RunModel(model=model) h_func.run(samples=param_true) # Add noise error_covariance = 1. data_clean = np.array(h_func.qoi_list[0]) noise = Normal(loc=0., scale=np.sqrt(error_covariance)).rvs(nsamples=50).reshape((50,)) data_3 = data_clean + noise print('Shape of data: {}'.format(data_3.shape)) #%% md # # Then we create an instance of the Model class, using model_type='python', and we perform maximum likelihood estimation # of the two parameters.
# # Using UQpy :class:`.MonteCarloSampling` class to generate samples for two random variables, which are normally # distributed with mean :math:`0` and variance :math:`1`. # %% marginals = [Normal(loc=0., scale=4.), Normal(loc=0., scale=4.)] x = MonteCarloSampling(distributions=marginals, nsamples=20, random_state=1) # %% md # # RunModel class is used to define an object to evaluate the model at sample points. # %% model = PythonModel(model_script='local_series.py', model_object_name='series') rmodel = RunModel(model=model) # %% md # # :class:`.Kriging` class defines an object to generate a surrogate model for a given set of data. # %% from UQpy.surrogates.gaussian_process.regression_models import LinearRegression from UQpy.surrogates.gaussian_process.kernels import RBF bounds = [[10**(-3), 10**3], [10**(-3), 10**2], [10**(-3), 10**2]] optimizer = MinimizeOptimizer(method="L-BFGS-B", bounds=bounds) K = GaussianProcessRegression(regression_model=LinearRegression(), kernel=RBF(),
# %% x = TrueStratifiedSampling(distributions=marginals, strata_object=strata, nsamples_per_stratum=1, random_state=1) # %% md # # RunModel is used to evaluate function values at sample points. Model is defined as a function in python file # 'python_model_function.py'. # %% model = PythonModel(model_script='local_python_model_function.py', model_object_name="y_func") rmodel = RunModel(model=model) rmodel.run(samples=x.samples) # %% md # # Using UQpy GaussianProcessRegression class to generate a surrogate for generated data. In this illustration, Quadratic regression model and # Exponential correlation model are used. # %% regression_model = ConstantRegression() kernel = Matern(nu=0.5) from UQpy.utilities.MinimizeOptimizer import MinimizeOptimizer
from UQpy.inference import BIC import matplotlib.pyplot as plt from UQpy.distributions import Normal from UQpy.inference import ComputationalModel #%% md # # First we generate synthetic data using the quadratic model, and add some noise to it. #%% param_true = np.array([1.0, 2.0]).reshape((1, -1)) print('Shape of true parameter vector: {}'.format(param_true.shape)) model = PythonModel(model_script='pfn_models.py', model_object_name='model_quadratic', var_names=['theta_0', 'theta_1']) h_func = RunModel(model=model) h_func.run(samples=param_true) # Add noise error_covariance = 1. data_clean = np.array(h_func.qoi_list[0]) noise = Normal(loc=0., scale=np.sqrt(error_covariance)).rvs(nsamples=50).reshape( (50, )) data_1 = data_clean + noise print('Shape of data: {}'.format(data_1.shape)) #%% md #
# %% x = TrueStratifiedSampling(distributions=marginals, strata_object=strata, nsamples_per_stratum=1, random_state=2) # %% md # # RunModel is used to evaluate function values at sample points. Model is defined as a function in python file # 'python_model_function.py'. # %% model = PythonModel(model_script='local_python_model_1Dfunction.py', model_object_name='y_func', delete_files=True) rmodel = RunModel(model=model) rmodel.run(samples=x.samples) from UQpy.surrogates.gaussian_process.regression_models import LinearRegression from UQpy.utilities.MinimizeOptimizer import MinimizeOptimizer bounds = [[10**(-3), 10**3], [10**(-3), 10**2]] optimizer = MinimizeOptimizer(method='L-BFGS-B', bounds=bounds) K = GaussianProcessRegression(regression_model=LinearRegression(), kernel=RBF(), optimizer=optimizer, optimizations_number=20, hyperparameters=[1, 0.1],
gamma.cdf(np.arange(3, 12, 0.05), a=2, loc=3, scale=1), linewidth=l) plt.legend(['SROM Approximation', 'Gamma CDF'], loc=5, prop={'size': 12}, bbox_to_anchor=(1, 0.75)) plt.show() # %% md # # Run the model 'eigenvalue_model.py' for each sample generated through :class:`.TrueStratifiedSampling` class. This # model defines the stiffness matrix corresponding to each sample and estimate the eigenvalues of the matrix. # %% m = PythonModel(model_script='local_eigenvalue_model.py', model_object_name="RunPythonModel") model = RunModel(model=m) # model = RunModel(model_script='local_eigenvalue_model.py') model.run(samples=y.samples) r_srom = model.qoi_list # %% md # # :class:`MonteCarloSampling` class is used to generate 1000 samples. # %% x_mcs = MonteCarloSampling(distributions=marginals, nsamples=1000) # %% md #
from UQpy import PythonModel # Import this newly defined Rosenbrock distribution into the Distributions module from UQpy.distributions import Normal from UQpy.reliability import SubsetSimulation from UQpy.run_model.RunModel import RunModel from UQpy.sampling import ModifiedMetropolisHastings, Stretch # First import the file that contains the newly defined Rosenbrock distribution from local_Rosenbrock import Rosenbrock # %% md # # :class:`.ModifiedMetropolisHastings` Initial Samples # %% m = PythonModel(model_script='local_Rosenbrock_pfn.py', model_object_name="RunPythonModel") model = RunModel(model=m) dist = Rosenbrock(p=100.) dist_prop1 = Normal(loc=0, scale=1) dist_prop2 = Normal(loc=0, scale=10) x = stats.norm.rvs(loc=0, scale=1, size=(100, 2), random_state=83276) mcmc_init1 = ModifiedMetropolisHastings(dimension=2, log_pdf_target=dist.log_pdf, seed=x.tolist(), burn_length=1000, proposal=[dist_prop1, dist_prop2], random_state=8765) mcmc_init1.run(10000) sampling=Stretch(log_pdf_target=dist.log_pdf, dimension=2, n_chains=1000, random_state=38546) x_ss_MMH = SubsetSimulation(sampling=sampling, runmodel_object=model, conditional_probability=0.1, nsamples_per_subset=10000, samples_init=mcmc_init1.samples)
plt.show() fig, ax = plt.subplots() CS = ax.contour(X, Y, Z, 15) plt.plot(m, k_hi, 'k') plt.plot(m, k_lo, 'k') # plt.fill_between(m,k_lo,k_hi) plt.xlim([3.5, 4.5]) plt.ylim([130, 150]) plt.xlabel(r'Mass ($m$)') plt.ylabel(r'Stiffness ($k$)') plt.grid(True) plt.tight_layout() plt.show() m = PythonModel(model_script='local_Resonance_pfn.py', model_object_name="RunPythonModel") model = RunModel(model=m) # %% md # # Monte Carlo Simulation # %% x_mcs = MonteCarloSampling(distributions=[d1, d2]) x_mcs.run(nsamples=1000000) model.run(samples=x_mcs.samples) A = np.asarray(model.qoi_list) < 0 pf = np.shape(np.asarray(