Example #1
0
print(K.hyperparameters)

# %% md
#
# This plot shows the actual model which is used to evaluate the samples to identify the function values.

# %%

num = 25
x1 = np.linspace(0, 1, num)
x2 = np.linspace(0, 1, num)

x1g, x2g = np.meshgrid(x1, x2)
x1gv, x2gv = x1g.reshape(x1g.size, 1), x2g.reshape(x2g.size, 1)

y2 = K.predict(np.concatenate([x1gv, x2gv],
                              1)).reshape(x1g.shape[0], x1g.shape[1])
model = PythonModel(model_script='local_python_model_function.py',
                    model_object_name="y_func")
r2model = RunModel(model=model)
r2model.run(samples=np.concatenate([x1gv, x2gv], 1))
y_act = np.array(r2model.qoi_list).reshape(x1g.shape[0], x1g.shape[1])

fig1 = plt.figure()
ax = fig1.gca(projection='3d')
surf = ax.plot_surface(x1g,
                       x2g,
                       y_act,
                       cmap=cm.coolwarm,
                       linewidth=0,
                       antialiased=False)
ax.set_zlim(-1, 15)
Example #2
0
print(K.hyperparameters)

#%% md
#
# This figure shows the surrogate model generated using :class:`.Kriging` class from initial samples.

#%%

num = 25
x1 = np.linspace(0, 1, num)
x2 = np.linspace(0, 1, num)
x1v, x2v = np.meshgrid(x1, x2)
y = np.zeros([num, num])
for i in range(num):
    for j in range(num):
        y[i, j] = K.predict(np.array([x1v[i, j], x2v[i, j]]))

fig2 = plt.figure()
ax2 = fig2.gca(projection='3d')
# Plot for estimated values
kr = ax2.plot_wireframe(x1v, x2v, y, color='Green', label='Kriging interpolate')

# Plot for scattered data
ID = ax2.scatter3D(x.samples[:, 0], x.samples[:, 1], rmodel1.qoi_list, color='Red', label='Input data')
plt.legend(handles=[kr, ID])
plt.show()

#%% md
#
# A :class:`.RefinedStratifiedSampling` class object is initiated by using the :class:`.TrueStratifiedSampling`,
# :class:`.RunModel` and :class:`.Kriging` object.
Example #3
0
                              hyperparameters=[1, 0.1],
                              random_state=2)
K.fit(samples=x.samples, values=rmodel.qoi_list)
print(K.hyperparameters)

# %% md
#
# RunModel is used to evaluate function values at sample points. Model is defined as a function in python file
# 'python_model_function.py'.

# %%

num = 1000
x1 = np.linspace(min(x.samples), max(x.samples), num)

y, y_sd = K.predict(x1.reshape([num, 1]), return_std=True)

# %% md
#
# Actual model is evaluated at all points to compare it with kriging surrogate.

# %%

rmodel.run(samples=x1, append_samples=False)

# %% md
#
# This plot shows the input data as blue dot, blue curve is actual function and orange curve represents response curve.
# This plot also shows the gradient and 95% confidence interval of the kriging surrogate.

# %%
Example #4
0
# %%

gpr1.hyperparameters

print('Length Scale: ', gpr1.hyperparameters[0])
print('Process Variance: ', gpr1.hyperparameters[1])

# %% md
#
# Use 'predict' method to compute surrogate prediction at the test samples. The attribute 'return_std' is a boolean
# indicator. If 'True', 'predict' method also returns the standard error at the test samples.

# %%

y_pred1, y_std1 = gpr1.predict(X_test, return_std=True)

# %% md
#
# The plot shows the test function in dashed red line and 13 training points are represented by blue dots. Also, blue
# curve shows the GPR prediction for :math:`x \in (0, 1)` and yellow shaded region represents 95% confidence interval.

# %%

fig, ax = plt.subplots(figsize=(8.5, 7))
ax.plot(X_test, y_test, 'r--', linewidth=2, label='Test Function')
ax.plot(X_train,
        y_train,
        'bo',
        markerfacecolor='b',
        markersize=10,
Example #5
0
# %%

print(gpr2.hyperparameters)

print('Length Scale: ', gpr2.hyperparameters[0])
print('Process Variance: ', gpr2.hyperparameters[1])
print('Noise Variance: ', gpr2.hyperparameters[2])

# %% md
#
# Use 'predict' method to compute surrogate prediction at the test samples. The attribute 'return_std' is a boolean
# indicator. If 'True', 'predict' method also returns the standard error at the test samples.

# %%

y_pred2, y_std2 = gpr2.predict(X_test, return_std=True)

# %% md
#
# The plot shows the test function in dashed red line and 13 training points are represented by blue dots. Also, blue
# curve shows the GPR prediction for $x \in (0, 1)$ and yellow shaded region represents 95% confidence interval.

# %%

fig, ax = plt.subplots(figsize=(8.5, 7))
ax.plot(X_test, y_test, 'r--', linewidth=2, label='Test Function')
ax.plot(X_train,
        y_train,
        'bo',
        markerfacecolor='b',
        markersize=10,
Example #6
0
# %%

print(gpr3.hyperparameters)

print('Length Scale: ', gpr3.hyperparameters[0])
print('Process Variance: ', gpr3.hyperparameters[1])
print('Noise Variance: ', gpr3.hyperparameters[2])

# %% md
#
# Use 'predict' method to compute surrogate prediction at the test samples. The attribute 'return_std' is a boolean
# indicator. If 'True', 'predict' method also returns the standard error at the test samples.

# %%

y_pred3, y_std3 = gpr3.predict(X_test, return_std=True)

# %% md
#
# The plot shows the test function in dashed red line and 13 training points are represented by blue dots. Also, blue
# curve shows the GPR prediction for $x \in (0, 1)$ and yellow shaded region represents 95% confidence interval.

# %%

fig, ax = plt.subplots(figsize=(8.5, 7))
ax.plot(X_test, y_test, 'r--', linewidth=2, label='Test Function')
ax.plot(X_train,
        y_train,
        'bo',
        markerfacecolor='b',
        markersize=10,