def plot_map_of_elites(self, perfs, log_dir_name): """ Plot a heatmap of elites """ plot_heatmap(perfs, self.feature_dimensions[1].name, self.feature_dimensions[0].name, savefig_path=log_dir_name)
def plot_map_of_elites(self): """ Plot a heatmap of elites """ plot_heatmap(self.performances, self.feature_dimensions[1].name, self.feature_dimensions[0].name, savefig_path=self.log_dir_path, )
def plot_map_of_elites(self, iteration): """ Plot a heatmap of elites """ # Stringify the bins to be used as strings in the plot axes x_ax = [str(d) for d in self.bins['bin_{}'.format(self.descriptors[5])]] y_ax = [str(d) for d in self.bins['bin_{}'.format(self.descriptors[6])]] #x_ax = [str(d) for d in self.bins['bin_{}'.format(self.descriptors[0])]] #y_ax = [str(d) for d in self.bins['bin_{}'.format(self.descriptors[1])]] plot_heatmap(self.performances, x_ax, y_ax, v_min=0.0, v_max=5.0, savefig_path=self.log_dir_path, iteration=iteration, title=f"MAP-Elites for the city of Boston", **self.plot_args)
if len(diff) != 1: name_parts.append("differences" if run.diff == "diff" else "no differences") ddiff = "a" if run.diff == "nodiff" else "b" data = datas.setdefault( (run.dataset, run.bounds, run.method, ddiff), { "name": " ".join(name_parts), "runs": set() }) data["runs"].add(run) if figure == "success": plot_heatmap(datas, fig, map2success, "Successes (out of 5)", flat=args.flat, grid=grid, vmin=0, vmax=5) elif figure == "normdist": plot_heatmap(datas, fig, map2normdist, "Normdist", flat=args.flat, grid=grid) elif figure == "row": plot_heatmap(datas, fig, map2row, "Row", flat=args.flat, grid=grid) elif figure == "blocks": plot_heatmap(datas, fig,
"#FF8031", "#02D4F9", "#4F4C4B", ] sns.set_palette(flatui) sns.palplot(sns.color_palette()) colors = plt.rcParams['axes.prop_cycle'].by_key()['color'] if not os.path.exists('figures'): os.makedirs('figures') plot_utils.plot_heatmap( 'saves/mnist_grid/grid_mnist_mlp_lr:inverse-time-decay_sgdm_T_5000_N_100/seed_3/result.pkl', key='train_sum_loss', xlabel='Decay', ylabel='Initial LR', cmap=plt.cm.Purples_r, levels=10, figsize=(8, 6), ) plt.xticks([-2, -1, 0, 1, 2], fontsize=18) plt.yticks([-3, -2, -1, 0, 1], fontsize=18) es_K100 = plot_utils.load_log( 'saves/mnist_lr_decay/train_sum_loss/es-mnist-mlp-obj:train_sum_loss-tune:lr:itd-T:5000-K:100-N:100-sigma:0.1-olr:0.1-seed:3', fname='frequent.csv') es_K10 = plot_utils.load_log( 'saves/mnist_lr_decay/train_sum_loss/es-mnist-mlp-obj:train_sum_loss-tune:lr:itd-T:5000-K:10-N:100-sigma:0.1-olr:0.01-seed:3', fname='frequent.csv') es_K1 = plot_utils.load_log(
sns.palplot(sns.color_palette()) colors = plt.rcParams['axes.prop_cycle'].by_key()['color'] figure_dir = 'figures/mnist_heatmaps' if not os.path.exists(figure_dir): os.makedirs(figure_dir) # Plot train loss heatmap # --------------------------------------------------------------------------- plot_utils.plot_heatmap( 'saves/mnist_grid_sgdm/grid_mnist_mlp_lr:inverse-time-decay_sgdm_T_5000_N_40/seed_3/result.pkl', key='unroll_obj', xlabel='Log LR Decay', ylabel='Log Init LR', cmap=plt.cm.Purples_r, levels=30, sigma=1.0, use_smoothing=True, show_contours=True, contour_alpha=0.2, figsize=(8, 6), ) es_K10 = plot_utils.load_log( 'saves/mnist_lr_decay/train_sum_loss/es-mnist-mlp-obj:train_sum_loss-tune:lr:inverse-time-decay-T:5000-K:10-nc:1-npc:1000-sigma:0.1-olr:0.01-ob1:0.9-ob2:0.999-ic:-1-oc:-1-seed:3', fname='frequent.csv') pes_K10 = plot_utils.load_log( 'saves/mnist_lr_decay/train_sum_loss/pes-mnist-mlp-obj:train_sum_loss-tune:lr:inverse-time-decay-T:5000-K:10-nc:1-npc:1000-sigma:0.1-olr:0.01-ob1:0.9-ob2:0.999-ic:-1-oc:-1-seed:3', fname='frequent.csv') es_K100 = plot_utils.load_log(
def ftr(): print("Loading data...\n") X = np.loadtxt('X.txt') y = np.loadtxt('y.txt') print("Randomly partitioning into training and test sets...\n") n_train = 800 X_train, y_train, X_test, y_test = randomly_partition(X, y, n_train) l_arr = [0.1, 0.35] #l_arr = [0.2, 0.25, 0.30, 0.35, 0.40] #l_arr = [0.1, 0.3, 0.5, 0.7, 0.9] var_0_arr = [1, 0.5] #var_0_arr = [0.45, 0.50, 0.55, 0.60, 0.65] #var_0_arr = [0.1, 0.3, 0.5, 0.7, 0.9] N = len(l_arr) M = len(var_0_arr) Z_matrix = np.zeros(shape=(N, M)) for i in range(0, N): for j in range(0, M): l = l_arr[i] var_0 = var_0_arr[j] print("( l , var_0 ) = ( {} , {} )".format(l, var_0)) X_tilde_train = get_x_tilde( evaluate_basis_functions(l, X_train, X_train)) X_tilde_test = get_x_tilde( evaluate_basis_functions(l, X_test, X_train)) w_map, log_Z, predict_laplace = find_map(X_tilde_train, y_train, var_0) Z_matrix[i][j] = log_Z print("Log Z = {}\n".format(log_Z)) verbose = True if (verbose): predict_map = lambda X: predict(X_tilde=X, w=w_map) probs_test_laplace = predict_laplace(X_tilde_test) display_metrics(probs_test_laplace, y_test, "Test laplace") probs_train_laplace = predict_laplace(X_tilde_train) display_metrics(probs_train_laplace, y_train, "Train laplace") probs_test_map = predict_map(X_tilde_test) display_metrics(probs_test_map, y_test, "Test map") probs_train_map = predict_map(X_tilde_train) display_metrics(probs_train_map, y_train, "Train map") expansion_function = lambda x: evaluate_basis_functions( l, x, X_train) plot_predictive_general( X, y, predict_map, expansion_function, title="MAP predictions (s^2, l)=({}, {})".format(var_0, l)) plot_predictive_general( X, y, predict_laplace, expansion_function, title="Laplace predictions (s^2, l)=({}, {})".format( var_0, l)) plot_heatmap(Z_matrix, var_0_arr, l_arr)