def test_merge_once_big(self): sim = new_simulation(point_dim=3, landmark_dim=3, num_points=2000, num_landmarks=120, seed=411) fg = factorgraph.GaussianFactorGraph() for f in sim.factors(): fg.add_factor(f) landmarks = sim.landmark_variables equiv_groups, equiv_pairs = sim.equivalences() fg.merge_landmarks(equiv_pairs) unique_landmarks = fg.landmarks unique_order = [ sim.unique_index_map[landmarks.index(k)] for k in unique_landmarks ] self.assertEqual(len(fg.correspondence_map.set_map().keys()), sim.num_unique_landmarks) fg_groups = set( frozenset(g) for g in fg.correspondence_map.set_map().values()) diff = equiv_groups.symmetric_difference(fg_groups) self.assertTrue(len(diff) == 0) Am, Ap, d, _ = fg.observation_system() m = np.ravel(sim.unique_landmark_positions[unique_order, :]) p = np.ravel(sim.point_positions) self.assertEqual(d.size, Am.shape[0]) self.assertEqual(d.size, Ap.shape[0]) self.assertEqual(m.size + p.size, Am.shape[1] + Ap.shape[1]) self.assertTrue(np.allclose(Am.dot(m) + Ap.dot(p), d))
def test_noise1(self): observation_noise = 0.05 odometry_noise = 0.06 sim = new_simulation(point_dim=3, landmark_dim=1, seed=11, observation_noise=observation_noise, odometry_noise=odometry_noise) fg = factorgraph.GaussianFactorGraph() for f in sim.factors(): fg.add_factor(f) optimizer = optim.LeastSquares(fg, verbosity=True) optimizer.optimize() optimizer.update() m_hat = np.concatenate([m.position for m in fg.landmarks]) p_hat = np.concatenate([p.position for p in fg.points]) m, p = np.ravel(sim.landmark_positions), np.ravel(sim.point_positions) mu_m_hat = np.mean(m - m_hat) sigma_m_hat = np.sqrt(np.linalg.norm(m - m_hat)**2 / len(m)) mu_p_hat = np.mean(p - p_hat) sigma_p_hat = np.sqrt(np.linalg.norm(p - p_hat)**2 / len(p)) self.assertTrue( np.linalg.norm(optimizer.res_d)**2 < len(optimizer.res_d) * 9 * observation_noise**2) self.assertTrue( np.linalg.norm(optimizer.res_t)**2 < len(optimizer.res_t) * 9 * odometry_noise**2)
def test_no_measurements(self): fg = factorgraph.GaussianFactorGraph() optimizer = optim.Occam(fg) optimizer.optimize() optimizer.update()
def occam_trial(seed=12, max_observations=4): observation_noise = 0.01 odometry_noise = 0.02 sim = new_simulation(point_dim=3, landmark_dim=1, num_points=NUM_POINTS, num_landmarks=NUM_LANDMARKS, seed=seed, observation_noise=observation_noise, odometry_noise=odometry_noise, noise_matrix='identity') fg = factorgraph.GaussianFactorGraph() for f in sim.factors(max_observations=max_observations): fg.add_factor(f) start = time.time() optimizer = optim.Occam(fg) optimizer.optimize() optimizer.update(merge=False) end = time.time() time_elapsed = end - start m_hat = np.concatenate([m.position for m in fg.landmarks]) p_hat = np.concatenate([p.position for p in fg.points]) m, p = np.ravel( sim.landmark_positions[observed_landmark_index(fg, sim), :]), np.ravel( sim.point_positions) me = np.mean(np.concatenate([(m - m_hat), (p - p_hat)])) return me, time_elapsed
def test_noise2(self): observation_noise = 0.01 odometry_noise = 0.02 sim = new_simulation(point_dim=3, landmark_dim=3, seed=20, observation_noise=observation_noise, odometry_noise=odometry_noise, noise_matrix='diag') fg = factorgraph.GaussianFactorGraph() for f in sim.factors(): fg.add_factor(f) optimizer = optim.Occam(fg) optimizer.optimize() optimizer.update() m_hat = np.concatenate([m.position for m in fg.landmarks]) p_hat = np.concatenate([p.position for p in fg.points]) m, p = np.ravel(sim.landmark_positions), np.ravel(sim.point_positions) self.assertTrue( np.linalg.norm(optimizer.res_d)**2 < len(optimizer.res_d) * 4 * observation_noise**2) self.assertTrue( np.linalg.norm(optimizer.res_t)**2 < len(optimizer.res_t) * 4 * odometry_noise**2) self.assertTrue(np.allclose(p, p_hat, atol=0.1))
def test_marginal_sequence(self): num_points = 100 num_free_points = 15 sim = new_simulation(point_dim=3, landmark_dim=1, num_points=num_points, seed=253) fg = factorgraph.GaussianFactorGraph(free_point_window=num_free_points) num_partitions = 10 partition_indptr = np.linspace(0, 100, num_partitions + 1, dtype=np.int) for i in range(num_partitions): if i > 0: sim.fix_points( list(range(partition_indptr[i - 1], partition_indptr[i]))) for f in sim.factors( (partition_indptr[i], partition_indptr[i + 1])): fg.add_factor(f) Bp, t, _ = fg.odometry_system() p = np.ravel(sim.point_positions[ max(0, partition_indptr[i + 1] - num_free_points):partition_indptr[i + 1], :]) self.assertEqual(t.size, Bp.shape[0]) self.assertEqual(p.size, Bp.shape[1]) self.assertTrue(np.allclose(Bp.dot(p), t))
def test_no_noise2(self): sim = new_simulation(point_dim=3, landmark_dim=3, seed=42) fg = factorgraph.GaussianFactorGraph() for f in sim.factors(): fg.add_factor(f) landmarks = fg.landmarks self.assertTrue(sim.num_unique_landmarks < len(landmarks)) optimizer = optim.Occam(fg) optimizer.optimize() optimizer.update() unique_landmarks = fg.landmarks self.assertEqual(sim.num_unique_landmarks, len(unique_landmarks)) sim_groups, _ = sim.equivalences() fg_groups = set( frozenset(g) for g in fg.correspondence_map.set_map().values()) diff = sim_groups.symmetric_difference(fg_groups) self.assertTrue(len(diff) == 0) m_hat = np.concatenate([m.position for m in fg.landmarks]) p_hat = np.concatenate([p.position for p in fg.points]) unique_order = [ sim.unique_index_map[landmarks.index(k)] for k in unique_landmarks ] m, p = np.ravel( sim.unique_landmark_positions[unique_order, :]), np.ravel( sim.point_positions) self.assertTrue(np.allclose(p, p_hat)) self.assertTrue(np.allclose(m, m_hat))
def test_no_measurements(self): fg = factorgraph.GaussianFactorGraph() optimizer = optim.WeightedLeastSquares(fg) optimizer.optimize() optimizer.update()
def test_odometry_speed(self): sim = new_simulation(seed=2, point_dim=3, landmark_dim=1) fg = factorgraph.GaussianFactorGraph(free_point_window=10) for f in sim.factors(): fg.add_factor(f) Bp, t, _ = fg.odometry_system() self.assertTrue(True)
def test_sequential_noise(self): num_points = 1000 observation_noise = 0.02 odometry_noise = 0.01 sim = new_simulation(point_dim=3, landmark_dim=3, num_points=1000, seed=67, observation_noise=observation_noise, odometry_noise=odometry_noise, noise_matrix='diag') num_partitions = 100 # partition_indptr = np.sort( # np.concatenate([[0], np.random.choice(sim.num_points, num_partitions - 1), [sim.num_points]])) partition_indptr = np.linspace(0, num_points, num_partitions + 1, dtype=np.int) fpw = int(np.max(partition_indptr[1:] - partition_indptr[:-1]) + 5) fg = factorgraph.GaussianFactorGraph(free_point_window=fpw) landmarks = sim.landmark_variables optimizer = optim.Occam(fg) for i in range(num_partitions): for f in sim.factors( (partition_indptr[i], partition_indptr[i + 1])): fg.add_factor(f) optimizer.optimize() optimizer.update() unique_landmarks = fg.landmarks unique_order = [ sim.unique_index_map[landmarks.index(k)] for k in unique_landmarks ] sim_groups, _ = sim.equivalences((0, partition_indptr[i + 1])) fg_groups = set( frozenset(g) for g in fg.correspondence_map.set_map().values()) diff = sim_groups.symmetric_difference(fg_groups) self.assertTrue(len(diff) == 0) p_hat = np.concatenate([p.position for p in fg.points]) m_hat = np.concatenate([m.position for m in fg.landmarks]) m = np.ravel(sim.unique_landmark_positions[unique_order, :]) p = np.ravel(sim.point_positions[:partition_indptr[i + 1], :]) self.assertTrue(np.allclose(p, p_hat, atol=0.1)) self.assertTrue(np.allclose(m, m_hat, atol=0.1))
def test_observation_array1(self): sim = new_simulation(point_dim=1, landmark_dim=1, seed=111) fg = factorgraph.GaussianFactorGraph() for f in sim.factors(): fg.add_factor(f) _, _, d_sys, _ = fg.observation_system() d_sim = np.array( [d for _, _, d, _ in zip(*sim.observation_measurements())]) self.assertEqual(len(d_sys), len(d_sim)) self.assertTrue(np.allclose(d_sys, d_sim))
def test_no_marginal4(self): sim = new_simulation(point_dim=3, landmark_dim=3, seed=314) fg = factorgraph.GaussianFactorGraph() for f in sim.factors(): fg.add_factor(f) A, b, _ = fg.odometry_system() x = np.ravel(sim.point_positions) self.assertEqual(b.size, A.shape[0]) self.assertEqual(x.size, A.shape[1]) self.assertTrue(np.allclose(A.dot(x), b))
def test_odometry_array3(self): sim = new_simulation(point_dim=3, landmark_dim=1, seed=123) fg = factorgraph.GaussianFactorGraph() for f in sim.factors(): fg.add_factor(f) _, t_sys, _ = fg.odometry_system() t_sim = np.concatenate( [sim.point_positions[0, :]] + [np.dot(R, t) for _, R, t, _ in zip(*sim.odometry_measurements())]) self.assertEqual(len(t_sys), len(t_sim)) self.assertTrue(np.allclose(t_sys, t_sim))
def test_no_marginal4(self): sim = new_simulation(point_dim=3, landmark_dim=3, seed=214) fg = factorgraph.GaussianFactorGraph() for f in sim.factors(): fg.add_factor(f) Am, Ap, d, _ = fg.observation_system() m = np.ravel(sim.landmark_positions) p = np.ravel(sim.point_positions) self.assertEqual(d.size, Am.shape[0]) self.assertEqual(d.size, Ap.shape[0]) self.assertEqual(m.size + p.size, Am.shape[1] + Ap.shape[1]) self.assertTrue(np.allclose(Am.dot(m) + Ap.dot(p), d))
def test_all_marginal2(self): num_points = 2000 num_fixed_points = num_points num_free_points = 0 sim = new_simulation(point_dim=3, landmark_dim=3, num_points=num_points, seed=352) fg = factorgraph.GaussianFactorGraph(num_free_points) for f in sim.fix_points(list(range(num_fixed_points))).factors(): fg.add_factor(f) A, b, _ = fg.odometry_system() self.assertEqual(b.size, A.shape[0]) self.assertEqual(0, A.shape[1])
def test_merge_sequence(self): sim = new_simulation(point_dim=1, landmark_dim=3, seed=412) fg = factorgraph.GaussianFactorGraph() landmarks = sim.landmark_variables num_partitions = 5 partition_indptr = np.sort( np.concatenate([[0], np.random.choice(sim.num_points, num_partitions - 1), [sim.num_points]])) for i in range(num_partitions): for f in sim.factors( (partition_indptr[i], partition_indptr[i + 1])): fg.add_factor(f) equiv_groups, equiv_pairs = sim.equivalences( (0, partition_indptr[i + 1])) fg.merge_landmarks(equiv_pairs) unique_landmarks = fg.landmarks unique_order = [ sim.unique_index_map[landmarks.index(k)] for k in unique_landmarks ] self.assertEqual(len(fg.correspondence_map.set_map().keys()), len(unique_landmarks)) fg_groups = set( frozenset(g) for g in fg.correspondence_map.set_map().values()) diff = equiv_groups.symmetric_difference(fg_groups) self.assertTrue(len(diff) == 0) Am, Ap, d, _ = fg.observation_system() m = np.ravel(sim.unique_landmark_positions[unique_order, :]) p = np.ravel(sim.point_positions[:partition_indptr[i + 1], :]) self.assertEqual(d.size, Am.shape[0]) self.assertEqual(d.size, Ap.shape[0]) self.assertEqual(m.size + p.size, Am.shape[1] + Ap.shape[1]) self.assertTrue(np.allclose(Am.dot(m) + Ap.dot(p), d))
def test_no_noise2(self): sim = new_simulation(point_dim=3, landmark_dim=3, seed=23) fg = factorgraph.GaussianFactorGraph() for f in sim.factors(): fg.add_factor(f) optimizer = optim.WeightedLeastSquares(fg, verbosity=True) optimizer.optimize() optimizer.update() m_hat = np.concatenate([m.position for m in fg.landmarks]) p_hat = np.concatenate([p.position for p in fg.points]) m, p = np.ravel(sim.landmark_positions), np.ravel(sim.point_positions) self.assertTrue(np.allclose(m, m_hat, atol=1e-4)) self.assertTrue(np.allclose(p, p_hat, atol=1e-3))
def test_under_marginal4(self): num_points = 2000 num_fixed_points = 111 num_free_points = 642 sim = new_simulation(point_dim=3, landmark_dim=3, num_points=num_points, seed=344) fg = factorgraph.GaussianFactorGraph(num_free_points) for f in sim.fix_points(list(range(num_fixed_points))).factors(): fg.add_factor(f) A, b, _ = fg.odometry_system() x = np.ravel(sim.point_positions[-(num_points - num_fixed_points):, :]) self.assertEqual(b.size, A.shape[0]) self.assertEqual(x.size, A.shape[1]) self.assertTrue(np.allclose(A.dot(x), b))
def test_sequential2(self): observation_noise = 0.01 odometry_noise = 0.02 sim = new_simulation(point_dim=3, landmark_dim=3, num_points=100, seed=268, observation_noise=observation_noise, odometry_noise=odometry_noise, noise_matrix='diag') num_partitions = 10 partition_indptr = np.sort( np.concatenate([[0], np.random.choice(sim.num_points, num_partitions - 1), [sim.num_points]])) fpw = int(np.max(partition_indptr[1:] - partition_indptr[:-1]) + 5) fg = factorgraph.GaussianFactorGraph(free_point_window=fpw) optimizer = optim.WeightedLeastSquares(fg) for i in range(num_partitions): for f in sim.factors( (partition_indptr[i], partition_indptr[i + 1])): fg.add_factor(f) optimizer.optimize() optimizer.update() p_hat = np.concatenate([p.position for p in fg.points]) p = np.ravel(sim.point_positions[:partition_indptr[i + 1], :]) self.assertTrue( np.linalg.norm(optimizer.res_d)**2 < len(optimizer.res_d) * 4 * observation_noise**2) self.assertTrue( np.linalg.norm(optimizer.res_t)**2 < len(optimizer.res_t) * 4 * odometry_noise**2) self.assertTrue(np.allclose(p, p_hat, atol=0.1))
def test_marginal_sequence(self): num_points = 100 num_free_points = 15 sim = new_simulation(point_dim=3, landmark_dim=1, num_points=num_points, seed=253) fg = factorgraph.GaussianFactorGraph(free_point_window=num_free_points) num_partitions = 10 partition_indptr = np.linspace(0, 100, num_partitions + 1, dtype=np.int) sim_landmarks = sim.landmark_variables for i in range(num_partitions): if i > 0: sim.fix_points( list(range(partition_indptr[i - 1], partition_indptr[i]))) for f in sim.factors( (partition_indptr[i], partition_indptr[i + 1])): fg.add_factor(f) Am, Ap, d, _ = fg.observation_system() fg_landmarks = fg.landmarks order = [sim_landmarks.index(k) for k in fg_landmarks] m = np.ravel(sim.landmark_positions[order, :]) p = np.ravel(sim.point_positions[ max(0, partition_indptr[i + 1] - num_free_points):partition_indptr[i + 1], :]) self.assertEqual(d.size, Am.shape[0]) self.assertEqual(d.size, Ap.shape[0]) self.assertEqual(m.size, Am.shape[1]) self.assertEqual(p.size, Ap.shape[1]) self.assertTrue(np.allclose(Am.dot(m) + Ap.dot(p), d))
def test_all_marginal2(self): num_points = 2000 num_fixed_points = num_points num_free_points = 0 sim = new_simulation(point_dim=3, landmark_dim=3, num_points=num_points, seed=252) fg = factorgraph.GaussianFactorGraph(num_free_points) for f in sim.fix_points(list(range(num_fixed_points))).factors(): fg.add_factor(f) Am, Ap, d, _ = fg.observation_system() m = np.ravel(sim.landmark_positions) self.assertEqual(d.size, Am.shape[0]) self.assertEqual(d.size, Ap.shape[0]) self.assertEqual(m.size, Am.shape[1]) self.assertEqual(0, Ap.shape[1]) self.assertTrue(np.allclose(Am.dot(m), d))
def test_under_marginal4(self): num_points = 2000 num_fixed_points = 111 num_free_points = 642 sim = new_simulation(point_dim=3, landmark_dim=3, num_points=num_points, seed=244) fg = factorgraph.GaussianFactorGraph(num_free_points) for f in sim.fix_points(list(range(num_fixed_points))).factors(): fg.add_factor(f) Am, Ap, d, _ = fg.observation_system() m = np.ravel(sim.landmark_positions) p = np.ravel(sim.point_positions[-(num_points - num_fixed_points):, :]) self.assertEqual(d.size, Am.shape[0]) self.assertEqual(d.size, Ap.shape[0]) self.assertEqual(m.size + p.size, Am.shape[1] + Ap.shape[1]) self.assertTrue(np.allclose(Am.dot(m) + Ap.dot(p), d))
def test_odom_only(self): import factor sim = new_simulation(point_dim=3, landmark_dim=1, seed=1776, observation_noise=0.0, odometry_noise=0.0) fg = factorgraph.GaussianFactorGraph() for f in sim.factors(): if isinstance(f, factor.ObservationFactor): continue fg.add_factor(f) optimizer = optim.Occam(fg) optimizer.optimize() optimizer.update() p_hat = np.concatenate([p.position for p in fg.points]) p = np.ravel(sim.point_positions) self.assertTrue(np.allclose(p, p_hat, atol=1e-3))
def test_merge_sequence_with_marginalization(self): num_points = 100 num_free_points = 15 # sim = new_simulation(point_dim=3, landmark_dim=1, num_points=num_points, seed=414) sim = new_simulation(point_dim=3, landmark_dim=1, num_points=num_points, seed=66) fg = factorgraph.GaussianFactorGraph(num_free_points) landmarks = sim.landmark_variables num_partitions = 10 partition_indptr = np.linspace(0, num_points, num_partitions + 1, dtype=np.int) for i in range(num_partitions): if i > 0: sim.fix_points( list(range(partition_indptr[i - 1], partition_indptr[i]))) for f in sim.factors( (partition_indptr[i], partition_indptr[i + 1])): fg.add_factor(f) equiv_groups, equiv_pairs = sim.equivalences( (0, partition_indptr[i + 1])) fg.merge_landmarks(equiv_pairs) unique_landmarks = fg.landmarks unique_order = [ sim.unique_index_map[landmarks.index(k)] for k in unique_landmarks ] self.assertEqual(len(fg.correspondence_map.set_map().keys()), len(unique_landmarks)) fg_groups = set( frozenset(g) for g in fg.correspondence_map.set_map().values()) diff = equiv_groups.symmetric_difference(fg_groups) self.assertTrue(len(diff) == 0) Am, Ap, d, _ = fg.observation_system() Bp, t, _ = fg.odometry_system() m = np.ravel(sim.unique_landmark_positions[unique_order, :]) p = np.ravel(sim.point_positions[ max(0, partition_indptr[i + 1] - num_free_points):partition_indptr[i + 1], :]) self.assertEqual(d.size, Am.shape[0]) self.assertEqual(d.size, Ap.shape[0]) self.assertEqual(m.size, Am.shape[1]) self.assertEqual(p.size, Ap.shape[1]) self.assertTrue(np.allclose(Am.dot(m) + Ap.dot(p), d)) self.assertTrue(np.allclose(Bp.dot(p), t))