def plot_active_learning_query(result, bo_iter, num_initial_points, query_points, num_query=1): for i in range(bo_iter): def pred_var(x): _, var = result.history[i].models["OBJECTIVE"].model.predict_f(x) return var _, ax = plot_function_2d( pred_var, search_space.lower - 0.01, search_space.upper + 0.01, grid_density=100, contour=True, colorbar=True, figsize=(10, 6), title=["Variance contour with queried points at iter:" + str(i + 1)], xlabel="$X_1$", ylabel="$X_2$", ) plot_bo_points( query_points[: num_initial_points + (i * num_query)], ax[0, 0], num_initial_points )
observations = dataset.observations.numpy() arg_min_idx = tf.squeeze(tf.argmin(observations, axis=0)) print(f"query point: {query_points[arg_min_idx, :]}") print(f"observation: {observations[arg_min_idx, :]}") # %% [markdown] # We can visualise how the optimizer performed by plotting all the acquired observations, along with the true function values and optima, either in a two-dimensional contour plot ... # %% from util.plotting import plot_bo_points, plot_function_2d _, ax = plot_function_2d(scaled_branin, search_space.lower, search_space.upper, grid_density=30, contour=True) plot_bo_points(query_points, ax[0, 0], num_initial_points, arg_min_idx) ax[0, 0].set_xlabel(r'$x_1$') ax[0, 0].set_xlabel(r'$x_2$') # %% [markdown] # ... or as a three-dimensional plot # %% from util.plotting_plotly import add_bo_points_plotly fig = plot_function_plotly(scaled_branin, search_space.lower, search_space.upper,
observer.manual_fix() result, new_history = bo.optimize( 15 - len(history), history[-1].datasets, history[-1].models, acquisition_rule, history[-1].acquisition_state ).astuple() history.extend(new_history) # %% [markdown] # We can repeat this until we've spent our optimization budget, using a loop if appropriate. But here, we'll just plot the data if it exists, safely by using `result`'s `is_ok` attribute. # %% from util.plotting import plot_bo_points, plot_function_2d if result.is_ok: data = result.unwrap().datasets[OBJECTIVE] arg_min_idx = tf.squeeze(tf.argmin(data.observations, axis=0)) _, ax = plot_function_2d( branin, search_space.lower, search_space.upper, 30, contour=True ) plot_bo_points(data.query_points.numpy(), ax[0, 0], 5, arg_min_idx) # %% [markdown] # ## LICENSE # # [Apache License 2.0](https://github.com/secondmind-labs/trieste/blob/develop/LICENSE)
tf.argmin(result.datasets[OBJECTIVE].observations, axis=0)) print( f"query point: {result.datasets[OBJECTIVE].query_points[arg_min_idx, :]}") # %% [markdown] # We can visualise where the optimizer queried on a contour plot of the Branin with the failure region. The minimum observation can be seen along the bottom axis towards the right, outside of the failure region. # %% import matplotlib.pyplot as plt from util.plotting import plot_gp_2d, plot_function_2d, plot_bo_points mask_fail = result.datasets[FAILURE].observations.numpy().flatten().astype( int) == 0 fig, ax = plot_function_2d(masked_branin, search_space.lower, search_space.upper, grid_density=50, contour=True) plot_bo_points( result.datasets[FAILURE].query_points.numpy(), ax=ax[0, 0], num_init=num_init_points, mask_fail=mask_fail, ) plt.show() # %% [markdown] # We can also plot the mean and variance of the predictive distribution over the search space, first for the objective data and model ... # %% from util.plotting_plotly import plot_gp_plotly, add_bo_points_plotly
result = bo.optimize(20, initial_data, models, acquisition_rule=rule) if result.error is not None: raise result.error final_data = result.datasets arg_min_idx = tf.squeeze(tf.argmin(final_data[OBJECTIVE].observations, axis=0)) print(f"query point: {final_data[OBJECTIVE].query_points[arg_min_idx, :]}") # %% [markdown] # We can visualise where the optimizer queried on a contour plot of the Branin with the failure region. The minimum observation can be seen along the bottom axis towards the right, outside of the failure region. # %% mask_fail = final_data[FAILURE].observations.numpy().flatten().astype(int) == 0 fig, ax = plot_function_2d(masked_branin, mins, maxs, grid_density=50, contour=True) plot_bo_points( final_data[FAILURE].query_points.numpy(), ax=ax[0, 0], num_init=num_init_points, mask_fail=mask_fail, ) plt.show() # %% [markdown] # We can also plot the mean and variance of the predictive distribution over the search space, first for the objective data and model ... # %% arg_min_idx = tf.squeeze(tf.argmin(final_data[OBJECTIVE].observations, axis=0)) fig = plot_gp_plotly(regression_model, mins, maxs, grid_density=50)
if result.error is not None: raise result.error dataset = result.datasets[OBJECTIVE] # %% [markdown] # ## Visualising the result # # We can take a look at where we queried the observer, both the original query points (crosses) and new query points (dots), and where they lie with respect to the contours of the Branin. # %% arg_min_idx = tf.squeeze(tf.argmin(dataset.observations, axis=0)) query_points = dataset.query_points.numpy() observations = dataset.observations.numpy() _, ax = plot_function_2d( branin, lower_bound.numpy(), upper_bound.numpy(), grid_density=30, contour=True ) plot_bo_points(query_points, ax[0, 0], num_initial_data_points, arg_min_idx) # %% [markdown] # We can also visualise the observations on a three-dimensional plot of the Branin. We'll add the contours of the mean and variance of the model's predictive distribution as translucent surfaces. # %% fig = plot_gp_plotly(gpr, lower_bound.numpy(), upper_bound.numpy(), grid_density=30) fig = add_bo_points_plotly( x=query_points[:, 0], y=query_points[:, 1], z=observations[:, 0], num_init=num_initial_data_points, idx_best=arg_min_idx,
# %% num_initial_points = 20 initial_query_points = search_space.sample(num_initial_points) initial_data = observer(initial_query_points) # %% [markdown] # ... and visualise the data across the design space: each figure contains the contour lines of each objective function. # %% _, ax = plot_function_2d( vlmop2, mins, maxs, grid_density=100, contour=True, title=["Obj 1", "Obj 2"], figsize=(12, 6), colorbar=True, xlabel="$X_1$", ylabel="$X_2$", ) plot_bo_points(initial_query_points, ax=ax[0, 0], num_init=num_initial_points) plot_bo_points(initial_query_points, ax=ax[0, 1], num_init=num_initial_points) plt.show() # %% [markdown] # ... and in the objective space. The `plot_mobo_points_in_obj_space` will automatically search for non-dominated points and colours them in purple. # %% plot_mobo_points_in_obj_space(initial_data.observations) plt.show()
# We can now get the best point found by the optimizer. Note this isn't necessarily the point that was last evaluated. # %% query_points = dataset.query_points.numpy() observations = dataset.observations.numpy() arg_min_idx = tf.squeeze(tf.argmin(observations, axis=0)) print(f"query point: {query_points[arg_min_idx, :]}") print(f"observation: {observations[arg_min_idx, :]}") # %% [markdown] # We can visualise how the optimizer performed by plotting all the acquired observations, along with the true function values and optima, either in a two-dimensional contour plot ... # %% _, ax = plot_function_2d(branin, mins, maxs, grid_density=30, contour=True) plot_bo_points(query_points, ax[0, 0], num_initial_points, arg_min_idx) # %% [markdown] # ... or as a three-dimensional plot # %% fig = plot_function_plotly(branin, mins, maxs, grid_density=20) fig.update_layout(height=500, width=500) fig = add_bo_points_plotly( x=query_points[:, 0], y=query_points[:, 1], z=observations[:, 0], num_init=num_initial_points, idx_best=arg_min_idx,