def plot_evaluate_active_subspace_density_1d_step( line, points_for_eval_in_interval, rotated_vertices, density_fn, points_for_eval, cnt_all, W, mapped_vertices, density_vals): f, axs = plt.subplots(1, 2, sharey=False, figsize=(16, 6)) axs[0].plot(line[0, :], line[1, :], '^b-', ms=10) axs[0].plot(points_for_eval_in_interval, 0, 'rs') axs[0].plot(rotated_vertices[0, :], rotated_vertices[1, :], 'o-k') axs[0].plot(rotated_vertices[0, :], rotated_vertices[1, :], 'o-k') ss_samples = np.vstack( (points_for_eval[:cnt_all], np.zeros(cnt_all, float))) axs[0].plot(ss_samples[0, :], ss_samples[1, :], 'r-') I = [0, 3] axs[0].plot(rotated_vertices[0, I], rotated_vertices[1, I], 'o-k') rotated_density_fn = lambda x: density_fn(np.dot(W.T, x)) limits = [ rotated_vertices[0, :].min(), rotated_vertices[0, :].max(), rotated_vertices[1, :].min(), rotated_vertices[1, :].max() ] X, Y, Z = get_meshgrid_function_data(rotated_density_fn, limits, 51) xx = np.vstack((X.flatten()[np.newaxis, :], Y.flatten()[np.newaxis, :])) I = np.where(np.absolute(np.dot(W.T, xx)) > 1)[1] Z = Z.flatten() levels = np.linspace(Z.min(), Z.max(), 30) Z[I] = np.nan cmap = plt.cm.coolwarm cmap.set_bad('white', 1.) Z = Z.reshape(X.shape[0], X.shape[1]) #axs[0].imshow(Z[::-1,:],extent=limits,cmap=cmap) axs[0].contourf(X, Y, Z, extent=limits, cmap=cmap, levels=levels) axs[1].set_xlim([mapped_vertices.min(), mapped_vertices.max()]) axs[1].set_ylim([0, 2]) axs[1].plot(points_for_eval[:cnt_all], density_vals[:cnt_all], 'k')
def plot_tensor_product_lagrange_basis_2d(level, ii, jj, ax=None): abscissa, tmp = clenshaw_curtis_pts_wts_1D(level) abscissa_1d = [abscissa, abscissa] barycentric_weights_1d = [compute_barycentric_weights_1d(abscissa_1d[0]), compute_barycentric_weights_1d(abscissa_1d[1])] training_samples = cartesian_product(abscissa_1d, 1) fn_vals = np.zeros((training_samples.shape[1], 1)) idx = jj*abscissa_1d[1].shape[0]+ii fn_vals[idx] = 1. def f(samples): return multivariate_barycentric_lagrange_interpolation( samples, abscissa_1d, barycentric_weights_1d, fn_vals, np.array([0, 1])) plot_limits = [-1, 1, -1, 1] num_pts_1d = 101 X, Y, Z = get_meshgrid_function_data(f, plot_limits, num_pts_1d) if ax is None: ax = create_3d_axis() cmap = mpl.cm.coolwarm plot_surface(X, Y, Z, ax, axis_labels=None, limit_state=None, alpha=0.3, cmap=mpl.cm.coolwarm, zorder=3, plot_axes=False) num_contour_levels = 30 offset = -(Z.max()-Z.min())/2 cmap = mpl.cm.gray ax.contourf( X, Y, Z, zdir='z', offset=offset, levels=np.linspace(Z.min(), Z.max(), num_contour_levels), cmap=cmap, zorder=-1) ax.plot(training_samples[0, :], training_samples[1, :], offset*np.ones(training_samples.shape[1]), 'o', zorder=100, color='b') x = np.linspace(-1, 1, 100) y = training_samples[1, idx]*np.ones((x.shape[0])) z = f(np.vstack((x[np.newaxis, :], y[np.newaxis, :])))[:, 0] ax.plot(x, Y.max()*np.ones((x.shape[0])), z, '-r') ax.plot(abscissa_1d[0], Y.max()*np.ones( (abscissa_1d[0].shape[0])), np.zeros(abscissa_1d[0].shape[0]), 'or') y = np.linspace(-1, 1, 100) x = training_samples[0, idx]*np.ones((y.shape[0])) z = f(np.vstack((x[np.newaxis, :], y[np.newaxis, :])))[:, 0] ax.plot(X.min()*np.ones((x.shape[0])), y, z, '-r') ax.plot(X.min()*np.ones( (abscissa_1d[1].shape[0])), abscissa_1d[1], np.zeros(abscissa_1d[1].shape[0]), 'or')
def plot_2d(function, num_XX_test_1d, bounds, XX_train_values=None, ax=None): #gp_mean_func = lambda XX: gp_predict(gp,x_kernel,kernel_ff,XX.T)[0] gp_mean_func = lambda XX: function(XX.T) num_contour_levels = 20 if ax is None: fig, ax = plt.subplots(1, 1, figsize=(8, 6)) X, Y, Z = get_meshgrid_function_data(gp_mean_func, bounds, num_XX_test_1d) cset = ax.contourf(X, Y, Z, levels=np.linspace(Z.min(), Z.max(), num_contour_levels)) if XX_train_values is not None: ax.plot(XX_train_values[:, 0], XX_train_values[:, 1], 'ko') plt.colorbar(cset, ax=ax) return ax
def plot(fun, ax): X, Y, Z = get_meshgrid_function_data(fun, plot_limits, num_samples_1d) dx = (plot_limits[1] - plot_limits[0]) / num_samples_1d dy = (plot_limits[3] - plot_limits[2]) / num_samples_1d print(('integral of func', Z.sum() * (dx * dy))) # should converge to 1 as num_samples_1d->\infty z_max = np.percentile(Z.flatten(), 99.9) if Z.max() / z_max < 10: z_max = Z.max() levels = np.linspace(0, z_max, num_contour_levels) cset = ax.contourf(X, Y, Z, levels=levels, cmap=mpl.cm.coolwarm) ax.set_xlim(0, 1) ax.set_ylim(0, 1) ax.set_xlabel('$z_1$') ax.set_ylabel('$z_2$')
def plot_optimization_objective_and_constraints_2D(constraints, objective, plot_limits): from pyapprox.visualization import get_meshgrid_function_data num_pts_1d = 100 num_contour_levels = 30 fig, axs = plt.subplots(1, 3, figsize=(3 * 8, 6)) #for ii in range(len(constraint_functions)+1): for ii in range(len(constraints.constraints) + 1): #if ii==len(constraint_functions): if ii == len(constraints.constraints): function = objective else: # def function(design_samples): # vals = np.empty((design_samples.shape[1])) # for jj in range(design_samples.shape[1]): # vals[jj]=constraint_functions[ii](design_samples[:,jj]) # return vals def function(design_samples): vals = np.empty((design_samples.shape[1])) for jj in range(design_samples.shape[1]): vals[jj] = constraints(design_samples[:, jj], [ii]) return vals X, Y, Z = get_meshgrid_function_data(function, plot_limits, num_pts_1d) norm = None cset = axs[ii].contourf(X, Y, Z, levels=np.linspace(Z.min(), Z.max(), num_contour_levels), cmap=mpl.cm.coolwarm, norm=norm) #for kk in range(len(constraint_functions)): for kk in range(len(constraints.constraints)): if ii == kk: ls = '-' else: ls = '--' axs[kk].contour(X, Y, Z, levels=[0], colors='k', linestyles=ls) plt.colorbar(cset, ax=axs[ii]) return fig, axs