def cokriging(i): # Xt_e = np.array([[1.57894737, 17.8947368], # [8.42105263, 17.8947368], # [1.57894737, 2.10526316], # [8.42105263, 2.10526316], # ]) Xt_e = np.array([ [50.0891841, 11.000994], [49.9108168, 38.999006], [84.200043, 41.5655806], [16.0943345, 7.97111717], [15.7955774, 41.584603], [83.9713379, 7.94023456], ]) Xt_c = arr_xy_low yt_e = arr_stress_high_train[i] yt_c = np.array(low_predict_data_by_force()[0][i]) sm = MFK(theta0=np.array(Xt_e.shape[1] * [1.0]), print_global=False) sm.set_training_values(Xt_c, yt_c, name=0) sm.set_training_values(Xt_e, yt_e) sm.train() Xp = arr_xy_high y = sm.predict_values(Xp) # MSE = sm.predict_variances(Xp) # der = sm.predict_derivatives(Xp, kx=0) return y.flatten()
def run_mfk_example(self): import numpy as np import matplotlib.pyplot as plt from smt.applications import MFK # Define the def LF_function(x): import numpy as np return (0.5 * ((x * 6 - 2)**2) * np.sin( (x * 6 - 2) * 2) + (x - 0.5) * 10.0 - 5) def HF_function(x): import numpy as np return ((x * 6 - 2)**2) * np.sin((x * 6 - 2) * 2) # Problem set up ndim = 1 Xt_e = np.linspace(0, 1, 4, endpoint=True).reshape(-1, ndim) Xt_c = np.linspace(0, 1, 11, endpoint=True).reshape(-1, ndim) nt_exp = Xt_e.shape[0] nt_cheap = Xt_c.shape[0] # Evaluate the HF and LF functions yt_e = HF_function(Xt_e) yt_c = LF_function(Xt_c) sm = MFK(theta0=np.array(Xt_e.shape[1] * [1.0])) # low-fidelity dataset names being integers from 0 to level-1 sm.set_training_values(Xt_c, yt_c, name=0) # high-fidelity dataset without name sm.set_training_values(Xt_e, yt_e) # train the model sm.train() x = np.linspace(0, 1, 101, endpoint=True).reshape(-1, 1) # query the outputs y = sm.predict_values(x) MSE = sm.predict_variances(x) der = sm.predict_derivatives(x, kx=0) plt.figure() plt.plot(x, HF_function(x), label="reference") plt.plot(x, y, linestyle="-.", label="mean_gp") plt.scatter(Xt_e, yt_e, marker="o", color="k", label="HF doe") plt.scatter(Xt_c, yt_c, marker="*", color="g", label="LF doe") plt.legend(loc=0) plt.ylim(-10, 17) plt.xlim(-0.1, 1.1) plt.xlabel(r"$x$") plt.ylabel(r"$y$") plt.show()
print(Xt_e.shape[1] * [1.0]) # low-fidelity dataset names being integers from 0 to level-1 sm.set_training_values(Xt_c, yt_c, name=0) print(Xt_c) # high-fidelity dataset without name sm.set_training_values(Xt_e, yt_e) print(Xt_e) # train the model sm.train() x = np.linspace(0, 1, 101, endpoint=True).reshape(-1, 1) # query the outputs y = sm.predict_values(x) MSE = sm.predict_variances(x) der = sm.predict_derivatives(x, kx=0) plt.figure() plt.plot(x, HF_function(x), label="reference") plt.plot(x, LF_function(x), label="reference") plt.plot(x, y, linestyle="-.", label="mean_gp") plt.scatter(Xt_e, yt_e, marker="o", color="k", label="HF doe") plt.scatter(Xt_c, yt_c, marker="*", color="g", label="LF doe") plt.legend(loc=0) plt.ylim(-10, 17) plt.xlim(-0.1, 1.1) plt.xlabel(r"$x$")