def ci_mll(n, gen_func, X_test): mlls = np.zeros(n) for i in range(n): X_gen = gen_func(2000) mlls[i] = mean_log_likelihood(X_gen, np.squeeze(X_test)) mean, err = mean_err(mlls) return mean, err
def ci_mmd(n, gen_func, X_test): mmds = np.zeros(n) for i in range(n): X_gen = gen_func(2000) mmds[i] = maximum_mean_discrepancy(X_gen, np.squeeze(X_test)) mean, err = mean_err(mmds) return mean, err
def ci_rsmth(n, gen_func, X_test): rsmth = np.zeros(n) for i in range(n): X_gen = gen_func(2000) rsmth[i] = variation(np.squeeze(X_test)) / variation(X_gen) mean, err = mean_err(rsmth) return mean, err
def ci_prc(n, gen_func, feasibility_func, n_points): prcs = np.zeros(n) for i in range(n): X_gen = gen_func(2000) prcs[i] = precision(X_gen, n_points, feasibility_func) mean, err = mean_err(prcs) return mean, err
def ci_rdiv(n, X_train, gen_func, d=None, k=None, bounds=None): rdivs = np.zeros(n) for i in range(n): if d is None or k is None or bounds is None: X_gen = gen_func(X_train.shape[0]) else: latent = np.random.uniform(bounds[0], bounds[1]) * np.ones( (X_train.shape[0], d)) latent[:, k] = np.random.uniform(bounds[0], bounds[1], size=X_train.shape[0]) X_gen = gen_func(latent) # from shape_plot import plot_samples # plot_samples(None, X_gen[:10], scatter=True, s=1, alpha=.7, c='k', fname='gen_%d' % k) rdivs[i] = rdiv(X_train, X_gen) mean, err = mean_err(rdivs) return mean, err
def ci_rssim(n, X_train, gen_func): rssims = np.zeros(n) for i in range(n): X_gen = gen_func(X_train.shape[0]) rssims[i] = rssim(X_train, X_gen) mean, err = mean_err(rssims) return mean, err #a = np.load('airfoil/airfoil_hole.npy')[0] #ax = a[:,0] #ay = a[:,1] # #b = np.load('airfoil/airfoil_hole.npy')[1] #bx = b[:,0] #by = b[:,1] # #print(ssim(np.array([-1.2, -1]), np.array([1.2, -0.9]))) #print(avg_dist(a,b))
def ci_cons(n, gen_func, latent_dim=2, bounds=(0.0, 1.0)): conss = np.zeros(n) for i in range(n): conss[i] = consistency(gen_func, latent_dim, bounds) mean, err = mean_err(conss) return mean, err
np.save('{}/opt_airfoil.npy'.format(save_dir), opt_airfoil_runs) np.save('{}/opt_history.npy'.format(save_dir), opt_perfs_runs) # Plot optimization history mean_perfs_runs = np.mean(opt_perfs_runs, axis=0) plt.figure() plt.plot(np.arange(n_eval + 1, dtype=int), opt_perfs) plt.title('Optimization History') plt.xlabel('Number of Evaluations') plt.ylabel('Optimal CL/CD') # plt.xticks(np.linspace(0, n_eval+1, 5, dtype=int)) plt.savefig('{}/opt_history.svg'.format(save_dir)) plt.close() # Plot the optimal airfoil mean_time_runs, err_time_runs = mean_err(time_runs) mean_final_perf_runs, err_final_perf_runs = mean_err(opt_perfs_runs[:, -1]) plt.figure() for opt_airfoil in opt_airfoil_runs: plt.plot(opt_airfoil[:, 0], opt_airfoil[:, 1], '-', c='k', alpha=1.0 / n_runs) plt.title('CL/CD: %.2f+/-%.2f time: %.2f+/-%.2f min' % (mean_final_perf_runs, err_final_perf_runs, mean_time_runs / 60, err_time_runs / 60)) plt.axis('equal') plt.savefig('{}/opt_airfoil.svg'.format(save_dir)) plt.close()
def ci_cons(n, gen_func, d=2, bounds=(0.0, 1.0), basis='cartesian'): conss = np.zeros(n) for i in range(n): conss[i] = consistency(gen_func, d, bounds, basis=basis) mean, err = mean_err(conss) return mean, err
print('######################################################') directory = 'trained_gan/{}_{}/{}'.format( latent_dim, noise_dim, i) model = GAN(latent_dim, noise_dim, X_train.shape[1], bezier_degree, bounds) model.restore(directory=directory) mmd = maximum_mean_discrepancy(model.synthesize, X_test) list_mmd.append(mmd) print(get_n_vars()) tf.keras.backend.clear_session() print(get_n_vars()) mmd_mean, mmd_err = mean_err(list_mmd) list_mmd_mean.append(mmd_mean) list_mmd_err.append(mmd_err) ax_mmd.bar(np.array(latent_dims) + p_list[j] * width, list_mmd_mean, width, yerr=list_mmd_err, label=str(noise_dim), color=c_list[j]) ax_mmd.legend(frameon=False, title='Noise dim.') ax_mmd.set_xticks(latent_dims) ax_mmd.set_xlabel('Latent dimension') ax_mmd.set_ylabel('MMD')