def test_perturbPoint2(self): """Test perturbPoint2.""" values = gtsam.Values() values.insert(0, gtsam.Pose3()) values.insert(1, gtsam.Point2(1, 1)) gtsam.utilities.perturbPoint2(values, 1.0) self.assertTrue( not np.allclose(values.atPoint2(1), gtsam.Point2(1, 1)))
def test_perturbPose2(self): """Test perturbPose2.""" values = gtsam.Values() values.insert(0, gtsam.Pose2()) values.insert(1, gtsam.Point2(1, 1)) gtsam.utilities.perturbPose2(values, 1, 1) self.assertTrue(values.atPose2(0) != gtsam.Pose2())
def make_fsg_map(self): data_set = fsg_track_map() cones_full = data_set['cones'] i = 0 for x, y in zip(cones_full[:, 0], cones_full[:, 1]): self.env[i] = gtsam.Point2(x, y) i += 1
def add_point(self, pointsInitial, measurements, octave): if pointsInitial[-1] > self.depth_threshold: information = self.inv_lvl_sigma2[octave] * np.identity(2) stereo_model = gt.noiseModel_Diagonal.Information(information) huber = gt.noiseModel_mEstimator_Huber.Create(self.deltaMono) robust_model = gt.noiseModel_Robust(huber, stereo_model) factor = gt.GenericProjectionFactorCal3_S2( gt.Point2(measurements[0], measurements[2]), robust_model, X(1), L(self.counter), self.K_mono) self.is_stereo.append(False) else: information = self.inv_lvl_sigma2[octave] * np.identity(3) stereo_model = gt.noiseModel_Diagonal.Information(information) huber = gt.noiseModel_mEstimator_Huber.Create(self.deltaStereo) robust_model = gt.noiseModel_Robust(huber, stereo_model) factor = gt.GenericStereoFactor3D( gt.StereoPoint2(*tuple(measurements)), robust_model, X(1), L(self.counter), self.K_stereo) self.is_stereo.append(True) self.graph.add( gt.NonlinearEqualityPoint3(L(self.counter), gt.Point3(pointsInitial))) self.initialEstimate.insert(L(self.counter), gt.Point3(pointsInitial)) self.graph.add(factor) self.octave.append(octave) self.counter += 1
def step5_add_odometry_and_landmark_observations(): # Create an empty nonlinear factor graph. graph = gtsam.NonlinearFactorGraph() # Create keys for the involved variables. X2 = X(2) X3 = X(3) L2 = L(2) # Update the list with the new variable keys. pose_variables.append(X3) landmark_variables.append(L2) # Add the odometry measurement. odometry_noise = gtsam.noiseModel.Diagonal.Sigmas( np.array([0.1, 0.1, 0.1])) graph.add( gtsam.BetweenFactorPose2(X2, X3, gtsam.Pose2(2.0, 0.0, 0.0), odometry_noise)) # Add the landmark observation. measurement_noise = gtsam.noiseModel.Diagonal.Sigmas( np.array([0.05, 0.1])) graph.add( gtsam.BearingRangeFactor2D(X3, L2, gtsam.Rot2.fromDegrees(90), 2.0, measurement_noise)) # Set initial estimates only for the new variables. initial_estimate = gtsam.Values() initial_estimate.insert(X3, gtsam.Pose2(4.10, 0.10, 0.10)) initial_estimate.insert(L2, gtsam.Point2(4.10, 1.80)) # Update ISAM2 with the new factor graph. isam.update(graph, initial_estimate)
def n2g(numpy_arr, obj): if obj == "Quaternion": x, y, z, w = numpy_arr return gtsam.Rot3.Quaternion(w, x, y, z) elif obj == "Euler": roll, pitch, yaw = numpy_arr return gtsam.Rot3.Ypr(yaw, pitch, roll) elif obj == "Point2": x, y = numpy_arr return gtsam.Point2(x, y) elif obj == "Pose2": x, y, yaw = numpy_arr return gtsam.Pose2(x, y, yaw) elif obj == "Point3": x, y, z = numpy_arr return gtsam.Point3(x, y, z) elif obj == "Pose3": x, y, z, roll, pitch, yaw = numpy_arr return gtsam.Pose3(gtsam.Rot3.Ypr(yaw, pitch, roll), gtsam.Point3(x, y, z)) elif obj == "imuBiasConstantBias": imu_bias = numpy_arr return gtsam.imuBias_ConstantBias(np.array(imu_bias[:3]), np.array(imu_bias[3:])) elif obj == "Vector": return np.array(numpy_arr) else: raise NotImplementedError("Not implemented from numpy to " + obj)
def test_extractPoint2(self): """Test extractPoint2.""" initial = gtsam.Values() point2 = gtsam.Point2(0.0, 0.1) initial.insert(1, gtsam.Pose2(0.0, 0.1, 0.1)) initial.insert(2, point2) np.testing.assert_equal(gtsam.utilities.extractPoint2(initial), point2.reshape(-1, 2))
def test_allPose3s(self): """Test allPose3s.""" initial = gtsam.Values() initial.insert(0, gtsam.Pose3()) initial.insert(2, gtsam.Point2(1, 1)) initial.insert(1, gtsam.Pose3()) initial.insert(3, gtsam.Point3(1, 2, 3)) result = gtsam.utilities.allPose3s(initial) self.assertEqual(result.size(), 2)
def _get_landmark_position(state, distance, bearing): """ Predicts the landmark position based on a current state and observation distance and bearing. """ angle = wrap_angle(state.theta() + bearing) x_relative = distance * np.cos(angle) y_relative = distance * np.sin(angle) x = x_relative + state.x() y = y_relative + state.y() return gtsam.Point2(x, y)
def random_map(self, size, limit): """ size: num of landmarks limit: l, r, b, t """ self.env = {} l, r, b, t = limit for i in range(size): x = self.random_state.uniform(l, r) y = self.random_state.uniform(b, t) self.env[i] = gtsam.Point2(x, y)
def test_localToWorld(self): """Test localToWorld.""" local = gtsam.Values() local.insert(0, gtsam.Point2(10, 10)) local.insert(1, gtsam.Pose2(6, 11, 0.0)) base = gtsam.Pose2(1, 0, 0) world = gtsam.utilities.localToWorld(local, base) expected_point2 = gtsam.Point2(11, 10) expected_pose2 = gtsam.Pose2(7, 11, 0) np.testing.assert_allclose(world.atPoint2(0), expected_point2) np.testing.assert_allclose( world.atPose2(1).matrix(), expected_pose2.matrix()) user_keys = [1] world = gtsam.utilities.localToWorld(local, base, user_keys) np.testing.assert_allclose( world.atPose2(1).matrix(), expected_pose2.matrix()) # Raise error since 0 is not in user_keys self.assertRaises(RuntimeError, world.atPoint2, 0)
def test_tracks(self): """Test functions in SfmTrack""" # measurement is of format (camera_idx, imgPoint) # create arbitrary camera indices for two cameras i1, i2 = 4,5 # create arbitrary image measurements for cameras i1 and i2 uv_i1 = gtsam.Point2(12.6, 82) # translating point uv_i1 along X-axis uv_i2 = gtsam.Point2(24.88, 82) # add measurements to the track self.tracks.add_measurement(i1, uv_i1) self.tracks.add_measurement(i2, uv_i2) # Number of measurements in the track is 2 self.assertEqual(self.tracks.number_measurements(), 2) # camera_idx in the first measurement of the track corresponds to i1 cam_idx, img_measurement = self.tracks.measurement(0) self.assertEqual(cam_idx, i1) np.testing.assert_array_almost_equal( gtsam.Point3(0.,0.,0.), self.tracks.point3() )
def test_data(self): """Test functions in SfmData""" # Create new track with 3 measurements i1, i2, i3 = 3,5,6 uv_i1 = gtsam.Point2(21.23, 45.64) # translating along X-axis uv_i2 = gtsam.Point2(45.7, 45.64) uv_i3 = gtsam.Point2(68.35, 45.64) # add measurements and arbitrary point to the track measurements = [(i1, uv_i1), (i2, uv_i2), (i3, uv_i3)] pt = gtsam.Point3(1.0, 6.0, 2.0) track2 = gtsam.SfmTrack(pt) track2.add_measurement(i1, uv_i1) track2.add_measurement(i2, uv_i2) track2.add_measurement(i3, uv_i3) self.data.add_track(self.tracks) self.data.add_track(track2) # Number of tracks in SfmData is 2 self.assertEqual(self.data.number_tracks(), 2) # camera idx of first measurement of second track corresponds to i1 cam_idx, img_measurement = self.data.track(1).measurement(0) self.assertEqual(cam_idx, i1)
def __init__(self, K=Cal3_S2(), nrCameras: int = 3, nrPoints: int = 4) -> None: self.K = K self.Z = [x[:] for x in [[gtsam.Point2()] * nrPoints] * nrCameras] self.J = [x[:] for x in [[0] * nrPoints] * nrCameras] self.odometry = [Pose3()] * nrCameras # Set Noise parameters self.noiseModels = Data.NoiseModels() self.noiseModels.posePrior = gtsam.noiseModel.Diagonal.Sigmas( np.array([0.001, 0.001, 0.001, 0.1, 0.1, 0.1])) # noiseModels.odometry = gtsam.noiseModel.Diagonal.Sigmas( # np.array([0.001,0.001,0.001,0.1,0.1,0.1])) self.noiseModels.odometry = gtsam.noiseModel.Diagonal.Sigmas( np.array([0.05, 0.05, 0.05, 0.2, 0.2, 0.2])) self.noiseModels.pointPrior = gtsam.noiseModel.Isotropic.Sigma(3, 0.1) self.noiseModels.measurement = gtsam.noiseModel.Isotropic.Sigma(2, 1.0)
def load_sdf_from_file_json(self, sdf_file): with open(sdf_file) as f: sdf_data = json.load(f) cell_size = sdf_data['grid_res'] sdf_cols = sdf_data['grid_size_x'] sdf_rows = sdf_data['grid_size_y'] sdf_data_vec = sdf_data['grid_data'] sdf_origin_x = sdf_data['grid_origin_x'] sdf_origin_y = sdf_data['grid_origin_y'] origin = gtsam.Point2(sdf_origin_x, sdf_origin_y) sdf_data_mat = np.zeros((sdf_rows, sdf_cols)) for i in range(sdf_data_mat.shape[0]): for j in range(sdf_data_mat.shape[1]): sdf_data_mat[i, j] = sdf_data_vec[i][j] self.sdf = leocpp.PlanarSDF(origin, cell_size, sdf_data_mat)
def step2_add_landmark_observation(): # Create an empty nonlinear factor graph. graph = gtsam.NonlinearFactorGraph() # Create keys for the involved variables. X1 = X(1) L1 = L(1) # Update the list with the new landmark variable key. landmark_variables.append(L1) # Add the landmark observation. measurement_noise = gtsam.noiseModel.Diagonal.Sigmas( np.array([0.05, 0.1])) graph.add( gtsam.BearingRangeFactor2D(X1, L1, gtsam.Rot2.fromDegrees(45), np.sqrt(4.0 + 4.0), measurement_noise)) # Set initial estimates only for the new variables. initial_estimate = gtsam.Values() initial_estimate.insert(L1, gtsam.Point2(1.80, 2.10)) # Update ISAM2 with the new factor graph. isam.update(graph, initial_estimate)
def callback(self, imu_msg, dvl_msg): depth_msg = self.depth_cache.getLast() if depth_msg is None: return dd_delay = (depth_msg.header.stamp - dvl_msg.header.stamp).to_sec() if abs(dd_delay) > 1.0: logdebug("Missing depth message for {}".format(dd_delay)) rot = r2g(imu_msg.orientation) # nRb = nRs * bRs^-1 rot = rot.compose(self.imu_rot.inverse()) if self.imu_yaw0 is None: self.imu_yaw0 = rot.yaw() rot = gtsam.Rot3.Ypr(rot.yaw() - self.imu_yaw0, rot.pitch(), rot.roll()) vel = np.array( [dvl_msg.velocity.x, dvl_msg.velocity.y, dvl_msg.velocity.z]) if np.any(np.abs(vel) > self.dvl_max_velocity): if self.pose: self.dvl_error_timer += (dvl_msg.header.stamp - self.prev_time).to_sec() if self.dvl_error_timer > 5.0: logwarn( "DVL velocity ({:.1f}, {:.1f}, {:.1f}) exceeds max velocity {:.1f} for {:.1f} secs." .format( vel[0], vel[1], vel[2], self.dvl_max_velocity, self.dvl_error_timer, )) vel = self.prev_vel else: return else: self.dvl_error_timer = 0.0 if self.pose: dt = (dvl_msg.header.stamp - self.prev_time).to_sec() dv = (vel + self.prev_vel) * 0.5 trans = dv * dt local_point = gtsam.Point2(trans[0], trans[1]) pose2 = gtsam.Pose2(self.pose.x(), self.pose.y(), self.pose.rotation().yaw()) point = pose2.transform_from(local_point) self.pose = gtsam.Pose3( rot, gtsam.Point3(point.x(), point.y(), depth_msg.depth)) else: self.pose = gtsam.Pose3(rot, gtsam.Point3(0, 0, depth_msg.depth)) self.prev_time = dvl_msg.header.stamp self.prev_vel = vel omega = imu_msg.angular_velocity omega = np.array([omega.x, omega.y, omega.z]) self.prev_omega = self.imu_rot.matrix().dot(omega) new_keyframe = False if not self.keyframes: new_keyframe = True else: duration = self.prev_time.to_sec() - self.keyframes[-1][0] if duration > self.keyframe_duration: odom = self.keyframes[-1][1].between(self.pose) odom = g2n(odom) translation = np.linalg.norm(odom[:3]) rotation = abs(odom[-1]) if (translation > self.keyframe_translation or rotation > self.keyframe_rotation): new_keyframe = True if new_keyframe: self.keyframes.append((self.prev_time.to_sec(), self.pose)) self.publish_pose(new_keyframe)
def mhjcbb(sim, num_tracks=10, prob=0.95, posterior_pose_md_threshold=1.5, prune2_skip=10, max_observed_diff=3): slams = [[gtsam.ISAM2(), set()]] prune2_count = 1 observed = set() for x, (odom, obs) in enumerate(sim.step()): for isam2, observed in slams: graph = gtsam.NonlinearFactorGraph() values = gtsam.Values() if x == 0: prior_model = gtsam.noiseModel_Diagonal.Sigmas( np.array([sim.sigma_x, sim.sigma_y, sim.sigma_theta])) prior_factor = gtsam.PriorFactorPose2(X(0), odom, prior_model) graph.add(prior_factor) values.insert(X(0), odom) else: odom_model = gtsam.noiseModel_Diagonal.Sigmas( np.array([sim.sigma_x, sim.sigma_y, sim.sigma_theta])) odom_factor = gtsam.BetweenFactorPose2(X(x - 1), X(x), odom, odom_model) graph.add(odom_factor) pose0 = isam2.calculateEstimatePose2(X(x - 1)) values.insert(X(x), pose0.compose(odom)) isam2.update(graph, values) ################################################################################ mhjcbb = gtsam.da_MHJCBB2(num_tracks, prob, posterior_pose_md_threshold) for isam2, observed, in slams: mhjcbb.initialize(isam2) for l, br in obs.items(): br_model = gtsam.noiseModel_Diagonal.Sigmas( np.array([sim.sigma_bearing, sim.sigma_range])) mhjcbb.add(gtsam.Rot2(br[0]), br[1], br_model) mhjcbb.match() ################################################################################ new_slams = [] for i in range(mhjcbb.size()): track, keys = mhjcbb.get(i) keys = [gtsam.symbolIndex(keys.at(i)) for i in range(keys.size())] isam2 = gtsam.ISAM2() isam2.update(slams[track][0].getFactorsUnsafe(), slams[track][0].calculateEstimate()) graph = gtsam.NonlinearFactorGraph() values = gtsam.Values() observed = set(slams[track][1]) for (l_true, br), l in zip(obs.items(), keys): br_model = gtsam.noiseModel_Diagonal.Sigmas( np.array([sim.sigma_bearing, sim.sigma_range])) br_factor = gtsam.BearingRangeFactor2D(X(x), L(l), gtsam.Rot2(br[0]), br[1], br_model) graph.add(br_factor) if l not in observed: pose1 = isam2.calculateEstimatePose2(X(x)) point = gtsam.Point2(br[1] * np.cos(br[0]), br[1] * np.sin(br[0])) values.insert(L(l), pose1.transform_from(point)) observed.add(l) isam2.update(graph, values) new_slams.append([isam2, observed]) slams = new_slams slams = prune1(slams, x, posterior_pose_md_threshold) if len(slams[0][1]) > prune2_count * prune2_skip: slams = prune2(slams, max_observed_diff) prune2_count += 1 result = [] for isam2, observed in slams: traj_est = [ isam2.calculateEstimatePose2(X(x)) for x in range(len(sim.traj)) ] traj_est = np.array([(p.x(), p.y(), p.theta()) for p in traj_est]) landmark_est = [isam2.calculateEstimatePoint2(L(l)) for l in observed] landmark_est = np.array([(p.x(), p.y()) for p in landmark_est]) result.append((traj_est, landmark_est)) return result
def slam(sim, da='jcbb', prob=0.95): isam2 = gtsam.ISAM2() graph = gtsam.NonlinearFactorGraph() values = gtsam.Values() observed = set() for x, (odom, obs) in enumerate(sim.step()): if x == 0: prior_model = gtsam.noiseModel_Diagonal.Sigmas( np.array([sim.sigma_x, sim.sigma_y, sim.sigma_theta])) prior_factor = gtsam.PriorFactorPose2(X(0), odom, prior_model) graph.add(prior_factor) values.insert(X(0), odom) else: odom_model = gtsam.noiseModel_Diagonal.Sigmas( np.array([sim.sigma_x, sim.sigma_y, sim.sigma_theta])) odom_factor = gtsam.BetweenFactorPose2(X(x - 1), X(x), odom, odom_model) graph.add(odom_factor) pose0 = isam2.calculateEstimatePose2(X(x - 1)) values.insert(X(x), pose0.compose(odom)) isam2.update(graph, values) graph.resize(0) values.clear() estimate = isam2.calculateEstimate() if da == 'dr': for l_true, br in obs.items(): l = len(observed) br_model = gtsam.noiseModel_Diagonal.Sigmas( np.array([sim.sigma_bearing, sim.sigma_range])) br_factor = gtsam.BearingRangeFactor2D(X(x), L(l), gtsam.Rot2(br[0]), br[1], br_model) graph.add(br_factor) if l not in observed: pose1 = isam2.calculateEstimatePose2(X(x)) point = gtsam.Point2(br[1] * np.cos(br[0]), br[1] * np.sin(br[0])) values.insert(L(l), pose1.transform_from(point)) observed.add(l) elif da == 'perfect': for l_true, br in obs.items(): br_model = gtsam.noiseModel_Diagonal.Sigmas( np.array([sim.sigma_bearing, sim.sigma_range])) br_factor = gtsam.BearingRangeFactor2D(X(x), L(l_true), gtsam.Rot2(br[0]), br[1], br_model) graph.add(br_factor) if l_true not in observed: pose1 = isam2.calculateEstimatePose2(X(x)) point = gtsam.Point2(br[1] * np.cos(br[0]), br[1] * np.sin(br[0])) values.insert(L(l_true), pose1.transform_from(point)) observed.add(l_true) elif da == 'jcbb': ################################################################################ jcbb = gtsam.da_JCBB2(isam2, prob) for l, br in obs.items(): br_model = gtsam.noiseModel_Diagonal.Sigmas( np.array([sim.sigma_bearing, sim.sigma_range])) jcbb.add(gtsam.Rot2(br[0]), br[1], br_model) keys = jcbb.match() ################################################################################ keys = [gtsam.symbolIndex(keys.at(i)) for i in range(keys.size())] for (l_true, br), l in zip(obs.items(), keys): br_model = gtsam.noiseModel_Diagonal.Sigmas( np.array([sim.sigma_bearing, sim.sigma_range])) br_factor = gtsam.BearingRangeFactor2D(X(x), L(l), gtsam.Rot2(br[0]), br[1], br_model) graph.add(br_factor) if l not in observed: pose1 = isam2.calculateEstimatePose2(X(x)) point = gtsam.Point2(br[1] * np.cos(br[0]), br[1] * np.sin(br[0])) values.insert(L(l), pose1.transform_from(point)) observed.add(l) isam2.update(graph, values) graph.resize(0) values.clear() traj_est = [ isam2.calculateEstimatePose2(X(x)) for x in range(len(sim.traj)) ] traj_est = np.array([(p.x(), p.y(), p.theta()) for p in traj_est]) landmark_est = [isam2.calculateEstimatePoint2(L(l)) for l in observed] landmark_est = np.array([(p.x(), p.y()) for p in landmark_est]) return [[traj_est, landmark_est]]
def get_estimate(self): if self.type == SymbolType.POSE: return gtsam.Pose2(*self.estimate) if self.type == SymbolType.POINT: return gtsam.Point2(*self.estimate)
def from_request(cls, request): prior_noise = request['prior_noise'] prior_noise = gtsam.noiseModel_Diagonal.Sigmas(np.array(prior_noise)) odometry_noise = request['odometry_noise'] odometry_noise = gtsam.noiseModel_Diagonal.Sigmas( np.array(odometry_noise)) measurement_noise = request['measurement_noise'] measurement_noise = gtsam.noiseModel_Diagonal.Sigmas( np.array(measurement_noise)) symbols = request['symbols'] unknowns = {} for symbol in symbols: key = symbol['key'] assert len(key) == 2 var = gtsam.symbol(ord(key[0]), int(key[1])) unknowns[key] = var priors_request = request['priors'] priors = [] for prior in priors_request: key = prior['key'] prior_val = prior['prior'] priors.append( gtsam.PriorFactorPose2(unknowns[key], gtsam.Pose2(*prior_val), prior_noise)) between_pose_factors_request = request['between_pose_factors'] between_pose_factors = [] for factor in between_pose_factors_request: var1 = unknowns[factor['connections'][0]] var2 = unknowns[factor['connections'][1]] pose = factor['pose'] factor = BetweenFactorRequest( var1=var1, var2=var2, pose=pose, odometry_noise=odometry_noise).get_factor() between_pose_factors.append(factor) bearing_range_factors_request = request['bearing_range_factors'] bearing_range_factors = [] for factor in bearing_range_factors_request: var1 = unknowns[factor['connections'][0]] var2 = unknowns[factor['connections'][1]] bearing = factor['bearing'] range = factor['range'] factor = BearingRangeFactorRequest( var1=var1, var2=var2, bearing=bearing, range=range, measurement_noise=measurement_noise, ).get_factor() bearing_range_factors.append(factor) initial_estimates = {} for symbol in symbols: variable = unknowns[symbol['key']] if symbol['type'] == 'pose': estimate = gtsam.Pose2(*symbol['estimate']) elif symbol['type'] == 'point': estimate = gtsam.Point2(*symbol['estimate']) else: raise Exception("Invalid type") initial_estimates[variable] = estimate return cls(unknowns, priors, between_pose_factors, bearing_range_factors, initial_estimates)
def main(): """Main runner""" # Create an empty nonlinear factor graph graph = gtsam.NonlinearFactorGraph() # Create the keys corresponding to unknown variables in the factor graph X1 = X(1) X2 = X(2) X3 = X(3) L1 = L(4) L2 = L(5) # Add a prior on pose X1 at the origin. A prior factor consists of a mean and a noise model graph.add( gtsam.PriorFactorPose2(X1, gtsam.Pose2(0.0, 0.0, 0.0), PRIOR_NOISE)) # Add odometry factors between X1,X2 and X2,X3, respectively graph.add( gtsam.BetweenFactorPose2(X1, X2, gtsam.Pose2(2.0, 0.0, 0.0), ODOMETRY_NOISE)) graph.add( gtsam.BetweenFactorPose2(X2, X3, gtsam.Pose2(2.0, 0.0, 0.0), ODOMETRY_NOISE)) # Add Range-Bearing measurements to two different landmarks L1 and L2 graph.add( gtsam.BearingRangeFactor2D(X1, L1, gtsam.Rot2.fromDegrees(45), np.sqrt(4.0 + 4.0), MEASUREMENT_NOISE)) graph.add( gtsam.BearingRangeFactor2D(X2, L1, gtsam.Rot2.fromDegrees(90), 2.0, MEASUREMENT_NOISE)) graph.add( gtsam.BearingRangeFactor2D(X3, L2, gtsam.Rot2.fromDegrees(90), 2.0, MEASUREMENT_NOISE)) # Print graph print("Factor Graph:\n{}".format(graph)) # Create (deliberately inaccurate) initial estimate initial_estimate = gtsam.Values() initial_estimate.insert(X1, gtsam.Pose2(-0.25, 0.20, 0.15)) initial_estimate.insert(X2, gtsam.Pose2(2.30, 0.10, -0.20)) initial_estimate.insert(X3, gtsam.Pose2(4.10, 0.10, 0.10)) initial_estimate.insert(L1, gtsam.Point2(1.80, 2.10)) initial_estimate.insert(L2, gtsam.Point2(4.10, 1.80)) # Print print("Initial Estimate:\n{}".format(initial_estimate)) # Optimize using Levenberg-Marquardt optimization. The optimizer # accepts an optional set of configuration parameters, controlling # things like convergence criteria, the type of linear system solver # to use, and the amount of information displayed during optimization. # Here we will use the default set of parameters. See the # documentation for the full set of parameters. params = gtsam.LevenbergMarquardtParams() optimizer = gtsam.LevenbergMarquardtOptimizer(graph, initial_estimate, params) result = optimizer.optimize() print("\nFinal Result:\n{}".format(result)) # Calculate and print marginal covariances for all variables marginals = gtsam.Marginals(graph, result) for (key, s) in [(X1, "X1"), (X2, "X2"), (X3, "X3"), (L1, "L1"), (L2, "L2")]: print("{} covariance:\n{}\n".format(s, marginals.marginalCovariance(key)))
graph.add(gtsam.BearingRangeFactor2D( X1, L1, gtsam.Rot2.fromDegrees(45), np.sqrt(4.0+4.0), MEASUREMENT_NOISE)) graph.add(gtsam.BearingRangeFactor2D( X2, L1, gtsam.Rot2.fromDegrees(90), 2.0, MEASUREMENT_NOISE)) graph.add(gtsam.BearingRangeFactor2D( X3, L2, gtsam.Rot2.fromDegrees(90), 2.0, MEASUREMENT_NOISE)) # Print graph print("Factor Graph:\n{}".format(graph)) # Create (deliberately inaccurate) initial estimate initial_estimate = gtsam.Values() initial_estimate.insert(X1, gtsam.Pose2(-0.25, 0.20, 0.15)) initial_estimate.insert(X2, gtsam.Pose2(2.30, 0.10, -0.20)) initial_estimate.insert(X3, gtsam.Pose2(4.10, 0.10, 0.10)) initial_estimate.insert(L1, gtsam.Point2(1.80, 2.10)) initial_estimate.insert(L2, gtsam.Point2(4.10, 1.80)) # Print print("Initial Estimate:\n{}".format(initial_estimate)) # Optimize using Levenberg-Marquardt optimization. The optimizer # accepts an optional set of configuration parameters, controlling # things like convergence criteria, the type of linear system solver # to use, and the amount of information displayed during optimization. # Here we will use the default set of parameters. See the # documentation for the full set of parameters. params = gtsam.LevenbergMarquardtParams() optimizer = gtsam.LevenbergMarquardtOptimizer(graph, initial_estimate, params) result = optimizer.optimize() print("\nFinal Result:\n{}".format(result))
def test_constructors(self): """Test constructors from doubles and vectors.""" expected = gtsam.Point2(1, 2) actual = gtsam.Point2(np.array([1, 2])) np.testing.assert_array_equal(actual, expected)
if RECOMPUTE_MATCHING: # Compute matches and cache them to disk landmarks_to_images = detect_and_match( measurements.get_video_reader(video_frame_steps)) with open(measurements.path / 'matching.pkl', 'rb') as f: landmarks_to_images = pickle.load(f) else: # Load precomputed matching from disk with open(measurements.path / 'matching.pkl', 'wb') as f: pickle.dump(landmarks_to_images, f) for landmark_idx, landmark_occurences in enumerate( landmarks_to_images): for image_idx, landmark_pixel_coords in landmark_occurences: factor = gtsam.ProjectionFactorPPPCal3DS2( gtsam.Point2(*landmark_pixel_coords), gtsam.noiseModel_Isotropic.Sigma(2, 2.0), symbol('x', preintegration_steps[image_idx]), symbol('r', 0), symbol('m', landmark_idx), calibration_matrices[image_idx]) factor_graph.push_back(factor) # factor = gtsam.GenericProjectionFactorCal3DS2( # gtsam.Point2(*landmark_pixel_coords), # gtsam.noiseModel_Isotropic.Sigma(2, 2.0), # symbol('x', preintegration_steps[image_idx]), # symbol('m', landmark_idx), # calibration_matrices[image_idx], # # gtsam.Pose3( # # gtsam.Rot3(np.array([[0,-1,0], [-1,0,0], [0,0,-1]])), # # gtsam.Point3())
def extract_kp(idx): desc = list(map(float, data[idx].split())) return gtsam.Point2(desc[0], desc[1]) # pylint:disable=no-member
def batch_factorgraph_example(): # Create an empty nonlinear factor graph. graph = gtsam.NonlinearFactorGraph() # Create the keys for the poses. X1 = X(1) X2 = X(2) X3 = X(3) pose_variables = [X1, X2, X3] # Create keys for the landmarks. L1 = L(1) L2 = L(2) landmark_variables = [L1, L2] # Add a prior on pose X1 at the origin. prior_noise = gtsam.noiseModel.Diagonal.Sigmas(np.array([0.1, 0.1, 0.1])) graph.add( gtsam.PriorFactorPose2(X1, gtsam.Pose2(0.0, 0.0, 0.0), prior_noise)) # Add odometry factors between X1,X2 and X2,X3, respectively odometry_noise = gtsam.noiseModel.Diagonal.Sigmas(np.array([0.1, 0.1, 0.1])) graph.add( gtsam.BetweenFactorPose2(X1, X2, gtsam.Pose2(2.0, 0.0, 0.0), odometry_noise)) graph.add( gtsam.BetweenFactorPose2(X2, X3, gtsam.Pose2(2.0, 0.0, 0.0), odometry_noise)) # Add Range-Bearing measurements to two different landmarks L1 and L2 measurement_noise = gtsam.noiseModel.Diagonal.Sigmas(np.array([0.05, 0.1])) graph.add( gtsam.BearingRangeFactor2D(X1, L1, gtsam.Rot2.fromDegrees(45), np.sqrt(4.0 + 4.0), measurement_noise)) graph.add( gtsam.BearingRangeFactor2D(X2, L1, gtsam.Rot2.fromDegrees(90), 2.0, measurement_noise)) graph.add( gtsam.BearingRangeFactor2D(X3, L2, gtsam.Rot2.fromDegrees(90), 2.0, measurement_noise)) # Create (deliberately inaccurate) initial estimate initial_estimate = gtsam.Values() initial_estimate.insert(X1, gtsam.Pose2(-0.25, 0.20, 0.15)) initial_estimate.insert(X2, gtsam.Pose2(2.30, 0.10, -0.20)) initial_estimate.insert(X3, gtsam.Pose2(4.10, 0.10, 0.10)) initial_estimate.insert(L1, gtsam.Point2(1.80, 2.10)) initial_estimate.insert(L2, gtsam.Point2(4.10, 1.80)) # Create an optimizer. params = gtsam.LevenbergMarquardtParams() optimizer = gtsam.LevenbergMarquardtOptimizer(graph, initial_estimate, params) # Solve the MAP problem. result = optimizer.optimize() # Calculate marginal covariances for all variables. marginals = gtsam.Marginals(graph, result) # Extract marginals pose_marginals = [] for var in pose_variables: pose_marginals.append( MultivariateNormalParameters(result.atPose2(var), marginals.marginalCovariance(var))) landmark_marginals = [] for var in landmark_variables: landmark_marginals.append( MultivariateNormalParameters(result.atPoint2(var), marginals.marginalCovariance(var))) # You can extract the joint marginals like this. joint_all = marginals.jointMarginalCovariance( gtsam.KeyVector(pose_variables + landmark_variables)) print("Joint covariance over all variables:") print(joint_all.fullMatrix()) # Plot the marginals. plot_result(pose_marginals, landmark_marginals)