# %% md # # Using UQpy GaussianProcessRegression class to generate a surrogate for generated data. In this illustration, Quadratic regression model and # Exponential correlation model are used. # %% regression_model = ConstantRegression() kernel = Matern(nu=0.5) from UQpy.utilities.MinimizeOptimizer import MinimizeOptimizer optimizer = MinimizeOptimizer(method="L-BFGS-B") K = GaussianProcessRegression(regression_model=regression_model, optimizer=optimizer, kernel=kernel, optimizations_number=20, hyperparameters=[1, 1, 0.1]) K.fit(samples=x.samples, values=rmodel.qoi_list) print(K.hyperparameters) # %% md # # This plot shows the actual model which is used to evaluate the samples to identify the function values. # %% num = 25 x1 = np.linspace(0, 1, num) x2 = np.linspace(0, 1, num)
rmodel = RunModel(model=model) # %% md # # :class:`.Kriging` class defines an object to generate a surrogate model for a given set of data. # %% from UQpy.surrogates.gaussian_process.regression_models import LinearRegression from UQpy.surrogates.gaussian_process.kernels import RBF bounds = [[10**(-3), 10**3], [10**(-3), 10**2], [10**(-3), 10**2]] optimizer = MinimizeOptimizer(method="L-BFGS-B", bounds=bounds) K = GaussianProcessRegression(regression_model=LinearRegression(), kernel=RBF(), optimizer=optimizer, hyperparameters=[1, 1, 0.1], optimizations_number=10) # %% md # # Choose an appropriate learning function. # %% from UQpy.sampling.adaptive_kriging_functions.ExpectedImprovement import ExpectedImprovement # %% md # # :class:`AdaptiveKriging` class is used to generate new sample using :class:`UFunction` as active learning function.
fig1.colorbar(surf, shrink=0.5, aspect=5) plt.show() #%% md # # :class:`.Kriging` class generated a surrogate model using :class:`.TrueStratifiedSampling` samples and function value # at those points. #%% from UQpy.surrogates.gaussian_process.regression_models import LinearRegression from UQpy.surrogates.gaussian_process.kernels import RBF bounds = [[10**(-3), 10**3], [10**(-3), 10**2], [10**(-3), 10**2]] K = GaussianProcessRegression(regression_model=LinearRegression(), kernel=RBF(), optimizer=MinimizeOptimizer(method="L-BFGS-B", bounds=bounds), hyperparameters=[1, 1, 0.1], optimizations_number=20) K.fit(samples=x.samples, values=rmodel1.qoi_list) print(K.hyperparameters) #%% md # # This figure shows the surrogate model generated using :class:`.Kriging` class from initial samples. #%% num = 25 x1 = np.linspace(0, 1, num) x2 = np.linspace(0, 1, num) x1v, x2v = np.meshgrid(x1, x2) y = np.zeros([num, num])
model = PythonModel(model_script='local_python_model_1Dfunction.py', model_object_name='y_func', delete_files=True) rmodel = RunModel(model=model) rmodel.run(samples=x.samples) from UQpy.surrogates.gaussian_process.regression_models import LinearRegression from UQpy.utilities.MinimizeOptimizer import MinimizeOptimizer bounds = [[10**(-3), 10**3], [10**(-3), 10**2]] optimizer = MinimizeOptimizer(method='L-BFGS-B', bounds=bounds) K = GaussianProcessRegression(regression_model=LinearRegression(), kernel=RBF(), optimizer=optimizer, optimizations_number=20, hyperparameters=[1, 0.1], random_state=2) K.fit(samples=x.samples, values=rmodel.qoi_list) print(K.hyperparameters) # %% md # # RunModel is used to evaluate function values at sample points. Model is defined as a function in python file # 'python_model_function.py'. # %% num = 1000 x1 = np.linspace(min(x.samples), max(x.samples), num)
# %% bounds_1 = [[10**(-4), 10**3], [10**(-3), 10**2]] optimizer1 = MinimizeOptimizer(method='L-BFGS-B', bounds=bounds_1) # %% md # # Define the 'GaussianProcessRegressor' class object, the input attributes defined here are kernel, optimizer, initial # estimates of hyperparameters and number of times MLE is identified using random starting point. # %% gpr1 = GaussianProcessRegression(kernel=kernel1, hyperparameters=[10**(-3), 10**(-2)], optimizer=optimizer1, optimizations_number=10, noise=False, regression_model=LinearRegression()) # %% md # # Call the 'fit' method to train the surrogate model (GPR). # %% gpr1.fit(X_train, y_train) # %% md # # The maximum likelihood estimates of the hyperparameters are as follows:
# %% bounds_2 = [[10**(-3), 10**3], [10**(-3), 10**2], [10**(-3), 10**(2)]] optimizer2 = MinimizeOptimizer(method='L-BFGS-B', bounds=bounds_2) # %% md # # Define the 'GaussianProcessRegressor' class object, the input attributes defined here are kernel, optimizer, initial # estimates of hyperparameters and number of times MLE is identified using random starting point. # %% gpr2 = GaussianProcessRegression(kernel=kernel2, hyperparameters=[1, 1, 0.1], optimizer=optimizer2, optimizations_number=10, noise=True, regression_model=LinearRegression()) # %% md # # Call the 'fit' method to train the surrogate model (GPR). # %% gpr2.fit(X_train, y_train) # %% md # # The maximum likelihood estimates of the hyperparameters are as follows:
# %% cons = NonNegative(constraint_points=X_c, observed_error=0.03, z_value=2) # %% md # # Define the 'GaussianProcessRegressor' class object, the input attributes defined here are kernel, optimizer, initial # estimates of hyperparameters and number of times MLE is identified using random starting point. # %% gpr3 = GaussianProcessRegression( kernel=kernel3, hyperparameters=[10**(-3), 10**(-2), 10**(-10)], optimizer=optimizer3, optimizations_number=10, optimize_constraints=cons, bounds=bounds_3, noise=True, regression_model=QuadraticRegression()) # %% md # # Call the 'fit' method to train the surrogate model (GPR). # %% gpr3.fit(X_train, y_train) # %% md #