def plot_nees_comp(true_x, m_1, P_1, m_2, P_2): nees_1 = nees(true_x, m_1, P_1) nees_2 = nees(true_x, m_2, P_2) _, ax = plt.subplots() ax.plot(nees_1, "-b", label="kf") ax.plot(nees_2, "--g", label="slr") plt.show()
def run_smoothing(smoother, states, measurements, prior_mean, prior_cov, cost_fn, init_traj=None): """Common function that runs a smoother and collects metrics Some iterative smoothers may return early if they exceed the limit on the number of loss-improving trials. In those cases, the metrics are extended with the last element to a list of length `smoother.num_iter` """ if init_traj is not None: _, _, ms, Ps, iter_cost = smoother.filter_and_smooth_with_init_traj( measurements, prior_mean, prior_cov, init_traj, 1, cost_fn) stored_est = smoother.stored_estimates() next(stored_est) stored_est = list(stored_est) else: _, _, ms, Ps, iter_cost = smoother.filter_and_smooth( measurements, prior_mean, prior_cov, cost_fn) stored_est = list(smoother.stored_estimates()) rmses = calc_iter_metrics( lambda means, covs, states: rmse(means[:, :2], states), stored_est, states, smoother.num_iter) # assert np.allclose(ms_st, ms) neeses = calc_iter_metrics( lambda means, covs, states: np.mean( nees(states, means[:, :2], covs[:, :2, :2])), stored_est, states, smoother.num_iter, ) return ms, Ps, iter_cost, rmses, neeses
def plot_nees_and_2d_est(true_x, meas, means_and_covs, sigma_level=3, skip_cov=1): K, D_x = true_x.shape _, (ax_1, ax_2) = plt.subplots(1, 2) ax_1.plot([0, K], [D_x, D_x], "--k", label="ref") ax_2.plot(true_x[:, 0], true_x[:, 1], ".k", label="true") if meas is not None: ax_2.plot(meas[:, 0], meas[:, 1], ".r", label="meas") for m, P, label in means_and_covs: traj_handle = plot_2d_mean_and_cov(ax_2, m[:, :2], P[:, :2, :2], sigma_level, label, skip_cov) filter_nees = nees(true_x, m, P) (nees_handle, ) = ax_1.plot(filter_nees, "-b", label=label) nees_handle.set_color(traj_handle.get_color()) ax_1.set_title("NEES") ax_1.set_xlabel("k") ax_1.set_ylabel(r"$\epsilon_{x, k}$") ax_1.legend() ax_2.set_title("Estimates") ax_2.set_xlabel("$pos_x$") ax_2.set_ylabel("$pos_y$") ax_2.legend() plt.show()
def gn_ieks(motion_model, meas_model, num_iter, states, measurements, prior_mean, prior_cov, cost_fn): smoother = Ieks(motion_model, meas_model, num_iter) _, _, ms, Ps, iter_cost = smoother.filter_and_smooth( measurements, prior_mean, prior_cov, cost_fn) rmses = calc_iter_metrics( lambda means, covs, states: rmse(means[:, :2], states), smoother.stored_estimates(), states) neeses = calc_iter_metrics( lambda means, covs, states: np.mean( nees(means[:, :2], states, covs[:, :2, :2])), smoother.stored_estimates(), states, ) return ms, Ps, iter_cost, rmses, neeses
def lm_ieks(motion_model, meas_model, num_iter, states, measurements, prior_mean, prior_cov, cost_fn): cost_improv_iter_lim = 10 lambda_ = 1e-2 nu = 10 smoother = LmIeks(motion_model, meas_model, num_iter, cost_improv_iter_lim, lambda_, nu) _, _, ms, Ps, iter_cost = smoother.filter_and_smooth( measurements, prior_mean, prior_cov, cost_fn) rmses = calc_iter_metrics( lambda means, covs, states: rmse(means[:, :2], states), smoother.stored_estimates(), states) neeses = calc_iter_metrics( lambda means, covs, states: np.mean( nees(means[:, :2], states, covs[:, :2, :2])), smoother.stored_estimates(), states, ) return ms, Ps, iter_cost, rmses, neeses