def f(x): return x * np.sin(4 * np.pi * x) X = lb + (ub - lb) * lhs(D, N) y = f(X) + noise * np.random.randn(N, 1) # Generate test data N_star = 400 X_star = lb + (ub - lb) * np.linspace(0, 1, N_star)[:, None] y_star = f(X_star) # Normalize Input Data if ModelInfo["Normalize_input_data"] == 1: X_m = np.mean(X, axis=0) X_s = np.std(X, axis=0) X = Normalize(X, X_m, X_s) X_star = Normalize(X_star, X_m, X_s) # Normalize Output Data if ModelInfo["Normalize_output_data"] == 1: y_m = np.mean(y, axis=0) y_s = np.std(y, axis=0) y = Normalize(y, y_m, y_s) y_star = Normalize(y_star, y_m, y_s) ModelInfo.update({"X": X}) ModelInfo.update({"y": y}) # Training
def f(x): return (x < -0.5) + 1.0 + 1.5 * (x > 0.5) X = lb + (ub - lb) * lhs(X_dim, N) Y = f(X) + noise * np.random.randn(N, Y_dim) # Generate test data N_star = 400 X_star = lb + (ub - lb) * np.linspace(0, 1, N_star)[:, None] Y_star = f(X_star) # Normalize Input Data if Normalize_input_data == 1: X_m = np.mean(X, axis=0) X_s = np.std(X, axis=0) X = Normalize(X, X_m, X_s) X_star = Normalize(X_star, X_m, X_s) # Normalize Output Data if Normalize_output_data == 1: Y_m = np.mean(Y, axis=0) Y_s = np.std(Y, axis=0) Y = Normalize(Y, Y_m, Y_s) Y_star = Normalize(Y_star, Y_m, Y_s) # Model creation model = ConditionalVariationalAutoencoders(X, Y, layers_encoder_0, layers_encoder_1, layers_decoder,
layers_decoder = np.array([Z_dim,50,100,Y_dim]) # generate synthetic data def f(z): return z/10 + z/np.linalg.norm(z,2,axis = 1, keepdims = True) Z = np.random.randn(N,2) Y = f(Z) Normalize_data = 1 # Normalize Output Data if Normalize_data == 1: Y_m = np.mean(Y, axis = 0) Y_s = np.std(Y, axis = 0) Y = Normalize(Y, Y_m, Y_s) # Model creation model = VariationalAutoencoders(Y, layers_encoder, layers_decoder, max_iter = 5000, N_batch = 200, monitor_likelihood = 10, lrate = 1e-3) model.train() mean_star, var_star = model.generate_samples(1000) plt.figure(figsize=(10,5)) plt.rcParams.update({'font.size': 14}) plt.subplot(1, 2, 1) plt.scatter(Y[:,0], Y[:,1], color='blue')
# Generate traning data def f(x): return x*np.sin(4*np.pi*x) X = lb + (ub-lb)*lhs(D, N) y = f(X) + noise*np.random.randn(N,1) # Generate test data N_star = 400 X_star = lb + (ub-lb)*np.linspace(0,1,N_star)[:,None] y_star = f(X_star) # Normalize Input Data if Normalize_input_data == 1: X_m = np.mean(X, axis = 0) X_s = np.std(X, axis = 0) X = Normalize(X, X_m, X_s) X_star = Normalize(X_star, X_m, X_s) # Normalize Output Data if Normalize_output_data == 1: y_m = np.mean(y, axis = 0) y_s = np.std(y, axis = 0) y = Normalize(y, y_m, y_s) y_star = Normalize(y_star, y_m, y_s) # Model creation M = 8 pgp = PGP(X, y, M, max_iter = 6000, N_batch = 1, monitor_likelihood = 10, lrate = 1e-3)
y = data["Y_H"] data_star = scipy.io.loadmat('SSTData/XStarMap.mat') XStarMap = data_star["XStar"] data_star = scipy.io.loadmat('SSTData/XStarBuoy.mat') XStarBuoy = data_star["XStar"] Normalize_input_data = 1 Normalize_output_data = 1 # Normalize Input Data if Normalize_input_data == 1: X_m = np.mean(X, axis=0) X_s = np.std(X, axis=0) X = Normalize(X, X_m, X_s) XStarMap = Normalize(XStarMap, X_m, X_s) XStarBuoy = Normalize(XStarBuoy, X_m, X_s) # Normalize Output Data if Normalize_output_data == 1: y_m = np.mean(y, axis=0) y_s = np.std(y, axis=0) y = Normalize(y, y_m, y_s) # Model creation M = 2000 pgp = PGP(X, y, M, max_iter=2000,