def test_perturbPose2(self): """Test perturbPose2.""" values = gtsam.Values() values.insert(0, gtsam.Pose2()) values.insert(1, gtsam.Point2(1, 1)) gtsam.utilities.perturbPose2(values, 1, 1) self.assertTrue(values.atPose2(0) != gtsam.Pose2())
def step(self): """ return: odom: odom between two poses (initial pose is returned for the first step) obs: dict of (landmark -> (bearing, range)) """ for i in range(len(self.traj)): if i == 0: odom = gtsam.Pose2() else: odom = self.traj[i - 1].between(self.traj[i]) nx = self.random_state.normal(0.0, self.sigma_x) ny = self.random_state.normal(0.0, self.sigma_y) nt = self.random_state.normal(0.0, self.sigma_theta) odom = odom.compose(gtsam.Pose2(nx, ny, nt)) obs = {} for l, point in self.env.items(): b = self.traj[i].bearing(point).theta() r = self.traj[i].range(point) b += self.random_state.normal(0.0, self.sigma_bearing) r += self.random_state.normal(0.0, self.sigma_range) if 0 < r < self.max_range and abs(b) < self.max_bearing: obs[l] = b, r if i == 0: yield self.traj[0].compose(odom), obs else: yield odom, obs
def step5_add_odometry_and_landmark_observations(): # Create an empty nonlinear factor graph. graph = gtsam.NonlinearFactorGraph() # Create keys for the involved variables. X2 = X(2) X3 = X(3) L2 = L(2) # Update the list with the new variable keys. pose_variables.append(X3) landmark_variables.append(L2) # Add the odometry measurement. odometry_noise = gtsam.noiseModel.Diagonal.Sigmas( np.array([0.1, 0.1, 0.1])) graph.add( gtsam.BetweenFactorPose2(X2, X3, gtsam.Pose2(2.0, 0.0, 0.0), odometry_noise)) # Add the landmark observation. measurement_noise = gtsam.noiseModel.Diagonal.Sigmas( np.array([0.05, 0.1])) graph.add( gtsam.BearingRangeFactor2D(X3, L2, gtsam.Rot2.fromDegrees(90), 2.0, measurement_noise)) # Set initial estimates only for the new variables. initial_estimate = gtsam.Values() initial_estimate.insert(X3, gtsam.Pose2(4.10, 0.10, 0.10)) initial_estimate.insert(L2, gtsam.Point2(4.10, 1.80)) # Update ISAM2 with the new factor graph. isam.update(graph, initial_estimate)
def add_mr_object_measurement(self, stack, class_realization, new_input_object_prior=None, new_input_object_covariance=None): graph_add = gtsam.NonlinearFactorGraph() initial_add = gtsam.Values() idx = 0 for obj in stack: cov_noise_model = stack[obj]['covariance'] exp_gtsam = gtsam.Pose3( gtsam.Rot3.Ypr(stack[obj]['expectation'][2], stack[obj]['expectation'][1], stack[obj]['expectation'][0]), gtsam.Point3(stack[obj]['expectation'][3], stack[obj]['expectation'][4], stack[obj]['expectation'][5])) pose_rotation_matrix = exp_gtsam.rotation().matrix() cov_noise_model_rotated = self.rotate_cov_6x6( cov_noise_model, np.transpose(pose_rotation_matrix)) cov_noise_model_rotated = gtsam.noiseModel.Gaussian.Covariance( cov_noise_model_rotated) #updating the graph self.graph.add( gtsam.PriorFactorPose3(self.XO(obj), exp_gtsam, cov_noise_model_rotated)) graph_add.add( gtsam.PriorFactorPose3(self.XO(obj), exp_gtsam, cov_noise_model_rotated)) self.prop_belief.add( gtsam.PriorFactorPose3(self.XO(obj), exp_gtsam, cov_noise_model_rotated)) if self.prop_initial.exists(self.XO(obj)) is False: self.prop_initial.insert( self.XO(obj), gtsam.Pose3(gtsam.Pose2(0.0, 0.0, 0.0))) # if obj in new_input_object_prior and obj not in self.classRealization: # self.graph.add(gtsam.PriorFactorPose3(self.XO(obj), new_input_object_prior[obj], # new_input_object_covariance[obj])) # self.prop_belief.add(gtsam.PriorFactorPose3(self.XO(obj), new_input_object_prior[obj], # new_input_object_covariance[obj])) self.classRealization[obj] = class_realization[obj] if self.isam.value_exists(self.XO(obj)) is False: initial_add.insert(self.XO(obj), gtsam.Pose3(gtsam.Pose2(0.0, 0.0, 0.0))) idx += 1 if stack[obj]['weight'] > -322: self.logWeight += stack[obj]['weight'] # + 150 else: self.logWeight = -math.inf
def test_allPose2s(self): """Test allPose2s.""" initial = gtsam.Values() initial.insert(0, gtsam.Pose2()) initial.insert(1, gtsam.Pose2(1, 1, 1)) initial.insert(2, gtsam.Point2(1, 1)) initial.insert(3, gtsam.Point3(1, 2, 3)) result = gtsam.utilities.allPose2s(initial) self.assertEqual(result.size(), 2)
def compute_icp(self, source_points, target_points, guess=gtsam.Pose2()): source_points = np.array(source_points, np.float32) target_points = np.array(target_points, np.float32) guess = guess.matrix() message, T = self.icp.compute(source_points, target_points, guess) # ICP covariance is too small # cov = self.icp.getCovariance() x, y = T[:2, 2] theta = np.arctan2(T[1, 0], T[0, 0]) return message, gtsam.Pose2(x, y, theta)
def car_path(self): data_set = fsg_track_map()['car'] x_pos, y_pos = data_set pose = gtsam.Pose2(x_pos[0], y_pos[0], 0.0) for index, data in enumerate(x_pos): self.traj.append(pose) if index != 0: u = data - x_pos[index - 1], y_pos[index] - y_pos[index - 1], 0.0 pose = pose.compose(gtsam.Pose2(*u))
def setUp(self): self.graph = gtsam.NonlinearFactorGraph() odometry = gtsam.Pose2(2.0, 0.0, 0.0) odometryNoise = gtsam.noiseModel.Diagonal.Sigmas( np.array([0.2, 0.2, 0.1])) self.graph.add(gtsam.BetweenFactorPose2(0, 1, odometry, odometryNoise)) self.graph.add(gtsam.BetweenFactorPose2(1, 2, odometry, odometryNoise)) self.values = gtsam.Values() self.values.insert_pose2(0, gtsam.Pose2(0., 0., 0.)) self.values.insert_pose2(1, gtsam.Pose2(2., 0., 0.)) self.values.insert_pose2(2, gtsam.Pose2(4., 0., 0.))
def main(): graph = gtsam.NonlinearFactorGraph() initialEstimate = gtsam.Values() priorNoise = gtsam.noiseModel.Diagonal.Sigmas(np.array([0.3, 0.3, 0.10])) graph.add(gtsam.PriorFactorPose2(0, gtsam.Pose2(0, 0, 0), priorNoise)) # read data from .g2o file # and initialize nodes/edges data_file = 'input_INTEL_g2o.g2o' with open(data_file, 'r') as f: for line in f: line_split = line.split() if line_split[0] == 'VERTEX_SE2': node = int(line_split[1]) x, y, th = make_pose(line_split[2:]) initialEstimate.insert(node, gtsam.Pose2(x, y, th)) elif line_split[0] == 'EDGE_SE2': node1 = int(line_split[1]) node2 = int(line_split[2]) dx, dy, dth = make_pose(line_split[3:6]) noise = gtsam.noiseModel.Gaussian.Information( create_information_matrix(line_split[6:])) graph.add( gtsam.BetweenFactorPose2(node1, node2, gtsam.Pose2(dx, dy, dth), noise)) f.close() # initialEstimate.print("\nInitial Estimate:\n") parameters = gtsam.GaussNewtonParams() # Stop iterating once the change in error between steps is less than this value parameters.relativeErrorTol = 1e-5 # Do not perform more than N iteration steps parameters.maxIterations = 100 # Create the optimizer ... optimizer = gtsam.GaussNewtonOptimizer(graph, initialEstimate, parameters) # ... and optimize result = optimizer.optimize() result.print("Final Result:\n") keys = result.keys() # x = [] # y = [] # theta = [] for key in keys: print(key) if result.exists(key): print(result.atPose2(key))
def get_factor(self): return gtsam.BetweenFactorPose2( self.var1, self.var2, gtsam.Pose2(*self.pose), self.odometry_noise, )
def n2g(numpy_arr, obj): if obj == "Quaternion": x, y, z, w = numpy_arr return gtsam.Rot3.Quaternion(w, x, y, z) elif obj == "Euler": roll, pitch, yaw = numpy_arr return gtsam.Rot3.Ypr(yaw, pitch, roll) elif obj == "Point2": x, y = numpy_arr return gtsam.Point2(x, y) elif obj == "Pose2": x, y, yaw = numpy_arr return gtsam.Pose2(x, y, yaw) elif obj == "Point3": x, y, z = numpy_arr return gtsam.Point3(x, y, z) elif obj == "Pose3": x, y, z, roll, pitch, yaw = numpy_arr return gtsam.Pose3(gtsam.Rot3.Ypr(yaw, pitch, roll), gtsam.Point3(x, y, z)) elif obj == "imuBiasConstantBias": imu_bias = numpy_arr return gtsam.imuBias_ConstantBias(np.array(imu_bias[:3]), np.array(imu_bias[3:])) elif obj == "Vector": return np.array(numpy_arr) else: raise NotImplementedError("Not implemented from numpy to " + obj)
def __init__(self, initial_state, covariance, alphas, beta): self._initial_state = gtsam.Pose2(initial_state[0], initial_state[1], initial_state[2]) self._prior_noise = gtsam.noiseModel_Diagonal.Sigmas(np.array([covariance, covariance, covariance])) # self.observation_noise = gtsam.noiseModel_Diagonal.Sigmas(np.array([beta[0] ** 2, np.deg2rad(beta[1]) ** 2])) #self.observation_noise = gtsam.noiseModel_Diagonal.Sigmas(np.array([beta[0] ** 2, np.deg2rad(beta[1]) ** 2])) self.observation_noise = gtsam.noiseModel_Diagonal.Sigmas( np.array(([np.deg2rad(beta[1]) ** 2, (beta[0] / 100) ** 2]))) self.beta = beta self.alphas = alphas ** 2 self.pose_num = 0 self.observation_num = 1000 self.landmark_indexes = list() self.states_new = np.array([[]]) self.observation_new = np.array([[]]) self.graph = gtsam.NonlinearFactorGraph() self.estimations = gtsam.Values() self.result = gtsam.Values() self.parameters = gtsam.ISAM2Params() self.parameters.setRelinearizeThreshold(1e-4) self.parameters.setRelinearizeSkip(1) self.slam = gtsam.ISAM2(self.parameters) self.graph.add(gtsam.PriorFactorPose2(self.pose_num, self._initial_state, self._prior_noise)) self.estimations.insert(self.pose_num, self._initial_state)
def add_measurement(self, measurement, new_da_realization, graph, initial, da_realization, class_realization): """ :type graph: gtsam.NonlinearFactorGraph :type initial: gtsam.Values """ # Adding a factor per each measurement with corresponding data association realization measurement_number = 0 #path_length = len(da_realization) - 1 path_length = len(da_realization) for obj in new_da_realization: obj = int(obj) for obj_index in class_realization: if obj == obj_index[0]: obj_class = obj_index[1] logit_measurement = self.prob_vector_2_logit( measurement[measurement_number]) graph.add( cls_un_model_fake_1d_conf.ClsUModelFactor( self.X(path_length), self.XO(obj), logit_measurement, obj_class)) measurement_number += 1 # Initialization of object of the realization # TODO: CHANGE TO SOMETHING MORE ACCURATE / LESS BASIC if initial.exists(self.XO(obj)) is False: initial.insert(self.XO(obj), gtsam.Pose3(gtsam.Pose2(0.0, 0.0, 0.0)))
def __init__(self, p, q, r, alphas=np.array([0.001, 0.0001]), sensor_offset=np.zeros(2)): # Add noise models self.odometry_noise = gtsam.noiseModel.Diagonal.Sigmas(q) self.measurement_noise = gtsam.noiseModel.Diagonal.Sigmas(r) self.alphas = alphas self.sensor_offset = sensor_offset # Create graph and initilize newest pose self.graph = gtsam.NonlinearFactorGraph() self.poses = gtsam.Values() # To enumerate all poses and landmarks self.kx = 1 self.kl = 1 self.landmarks = np.empty(0) # Initilize graph with prior prior_noise = gtsam.noiseModel.Diagonal.Sigmas(p) self.graph.add( gtsam.PriorFactorPose2(X(self.kx), gtsam.Pose2(0.0, 0.0, 0.0), prior_noise))
def __init__(self, goal_pose = gtsam.Pose2(gtsam.Pose3(0.0, 0.0, 0.0)), goal_weight=0, information_theoretic_weight=0, lambda_uncertainty_weight=0): self.goal_pose = goal_pose self.goal_weight = goal_weight self.information_theoretic_weight = information_theoretic_weight self.lambda_uncertainty_weight = lambda_uncertainty_weight
def add_measurement(self, measurement, new_da_realization, graph, initial, da_realization): """ :type graph: gtsam.NonlinearFactorGraph :type initial: gtsam.Values """ # Adding a factor per each measurement with corresponding data association realization measurement_number = 0 path_length = len(da_realization) for realization in new_da_realization: realization = int(realization) graph.add( gtsam.BetweenFactorPose3(self.X(path_length), self.XO(realization), measurement[measurement_number], self.model_noise)) measurement_number += 1 # Initialization of object of the realization # TODO: CHANGE TO SOMETHING MORE ACCURATE / LESS BASIC if initial.exists(self.XO(realization)) is False: initial.insert(self.XO(realization), gtsam.Pose3(gtsam.Pose2(0.0, 0.0, 0.0))) # Saving the data association realization in a list da_realization[path_length - 1] = new_da_realization
def add_measurement(self, measurement, new_da_realization, graph, initial, da_realization, class_realization): """ :type graph: gtsam.NonlinearFactorGraph :type initial: gtsam.Values """ # Adding a factor per each measurement with corresponding data association realization measurement_number = 0 path_length = len(da_realization) - 1 for realization in new_da_realization: realization = int(realization) graph.add( libNewFactorFake_p5.ClsModelFactor3( self.X(path_length), self.XO(realization), self.ClsModelCore[class_realization[ new_da_realization[measurement_number] - 1] - 1], measurement[measurement_number])) measurement_number += 1 # Initialization of object of the realization # TODO: CHANGE TO SOMETHING MORE ACCURATE / LESS BASIC if initial.exists(self.XO(realization)) is False: initial.insert(self.XO(realization), gtsam.Pose3(gtsam.Pose2(0.0, 0.0, 0.0)))
def test_extractPoint3(self): """Test extractPoint3.""" initial = gtsam.Values() point3 = gtsam.Point3(0.0, 0.1, 0.0) initial.insert(1, gtsam.Pose2(0.0, 0.1, 0.1)) initial.insert(2, point3) np.testing.assert_equal(gtsam.utilities.extractPoint3(initial), point3.reshape(-1, 3))
def test_perturbPoint3(self): """Test perturbPoint3.""" values = gtsam.Values() point3 = gtsam.Point3(0, 0, 0) values.insert(0, gtsam.Pose2()) values.insert(1, point3) gtsam.utilities.perturbPoint3(values, 1) self.assertTrue(not np.allclose(values.atPoint3(1), point3))
def from_request(cls, request): lag = request['lag'] prior_mean = request['prior_mean'] prior_mean = gtsam.Pose2(*prior_mean) prior_noise = request['prior_noise'] prior_noise = gtsam.noiseModel_Diagonal.Sigmas(np.array(prior_noise)) return cls(lag, prior_mean, prior_noise)
def test_extractPose3(self): """Test extractPose3.""" initial = gtsam.Values() pose3 = np.asarray([1., 0., 0., 0., 1., 0., 0., 0., 1., 0., 0., 0.]) initial.insert(1, gtsam.Pose2(0.0, 0.1, 0.1)) initial.insert(2, gtsam.Pose3()) np.testing.assert_allclose(gtsam.utilities.extractPose3(initial), pose3.reshape(-1, 12))
def point_to_pose(self, points): # poses = [gtsam.Pose2(point[0], point[1], point[2]) for point in points] poses = [] for point in points: pose = gtsam.Pose2(point[0], point[1], np.deg2rad(point[2])) poses.append(pose) return poses
def test_extractPose2(self): """Test extractPose2.""" initial = gtsam.Values() pose2 = np.asarray((0.0, 0.1, 0.1)) initial.insert(1, gtsam.Pose2(*pose2)) initial.insert(2, gtsam.Point3(0.0, 0.1, 0.0)) np.testing.assert_allclose(gtsam.utilities.extractPose2(initial), pose2.reshape(-1, 3))
def main(): """Main runner""" # Create an empty nonlinear factor graph graph = gtsam.NonlinearFactorGraph() # Add a prior on the first pose, setting it to the origin # A prior factor consists of a mean and a noise model (covariance matrix) priorMean = gtsam.Pose2(0.0, 0.0, 0.0) # prior at origin graph.add(gtsam.PriorFactorPose2(1, priorMean, PRIOR_NOISE)) # Add odometry factors odometry = gtsam.Pose2(2.0, 0.0, 0.0) # For simplicity, we will use the same noise model for each odometry factor # Create odometry (Between) factors between consecutive poses graph.add(gtsam.BetweenFactorPose2(1, 2, odometry, ODOMETRY_NOISE)) graph.add(gtsam.BetweenFactorPose2(2, 3, odometry, ODOMETRY_NOISE)) print("\nFactor Graph:\n{}".format(graph)) # Create the data structure to hold the initialEstimate estimate to the solution # For illustrative purposes, these have been deliberately set to incorrect values initial = gtsam.Values() initial.insert(1, gtsam.Pose2(0.5, 0.0, 0.2)) initial.insert(2, gtsam.Pose2(2.3, 0.1, -0.2)) initial.insert(3, gtsam.Pose2(4.1, 0.1, 0.1)) print("\nInitial Estimate:\n{}".format(initial)) # optimize using Levenberg-Marquardt optimization params = gtsam.LevenbergMarquardtParams() optimizer = gtsam.LevenbergMarquardtOptimizer(graph, initial, params) result = optimizer.optimize() print("\nFinal Result:\n{}".format(result)) # 5. Calculate and print marginal covariances for all variables marginals = gtsam.Marginals(graph, result) for i in range(1, 4): print("X{} covariance:\n{}\n".format(i, marginals.marginalCovariance(i))) for i in range(1, 4): gtsam_plot.plot_pose2(0, result.atPose2(i), 0.5, marginals.marginalCovariance(i)) plt.axis('equal') plt.show()
def initialize(self, priorMean=[0, 0, 0], priorCov=[0, 0, 0]): # init the prior priorMean = gtsam.Pose2(priorMean[0], priorMean[1], priorMean[2]) priorCov = gtsam.noiseModel_Diagonal.Sigmas(np.array(priorCov)) self.graph.add( gtsam.PriorFactorPose2(X(self.currentKey), priorMean, priorCov)) self.initialValues.insert(X(self.currentKey), priorMean) return
def record_observation(observation): global SMOOTHER_BATCH factor_graph = gtsam.NonlinearFactorGraph() new_values = gtsam.Values() new_timestamps = gtsam_unstable.FixedLagSmootherKeyTimestampMap() time = observation.time previous_key = observation.previous_key current_key = observation.current_key current_pose = observation.current_pose new_timestamps.insert(_timestamp_key_value(current_key, time)) new_values.insert(current_key, current_pose) # for measurement, noise in zip( # observation.odometry_measurements, observation.odometry_noise): # factor_graph.push_back(gtsam.BetweenFactorPose2( # previous_key, current_key, measurement, noise import numpy as np odometry_measurement_1 = gtsam.Pose2(0.61, -0.08, 0.02) odometry_noise_1 = gtsam.noiseModel_Diagonal.Sigmas( np.array([0.1, 0.1, 0.05])) factor_graph.push_back( gtsam.BetweenFactorPose2(previous_key, current_key, odometry_measurement_1, odometry_noise_1)) odometry_measurement_2 = gtsam.Pose2(0.47, 0.03, 0.01) odometry_noise_2 = gtsam.noiseModel_Diagonal.Sigmas( np.array([0.05, 0.05, 0.05])) factor_graph.push_back( gtsam.BetweenFactorPose2(previous_key, current_key, odometry_measurement_2, odometry_noise_2)) # Update the smoothers with the new factors SMOOTHER_BATCH.update(factor_graph, new_values, new_timestamps) pose = SMOOTHER_BATCH.calculateEstimatePose2(current_key) output = FixedLagSmootherOutput(current_key, pose) return output
def test_localToWorld(self): """Test localToWorld.""" local = gtsam.Values() local.insert(0, gtsam.Point2(10, 10)) local.insert(1, gtsam.Pose2(6, 11, 0.0)) base = gtsam.Pose2(1, 0, 0) world = gtsam.utilities.localToWorld(local, base) expected_point2 = gtsam.Point2(11, 10) expected_pose2 = gtsam.Pose2(7, 11, 0) np.testing.assert_allclose(world.atPoint2(0), expected_point2) np.testing.assert_allclose( world.atPose2(1).matrix(), expected_pose2.matrix()) user_keys = [1] world = gtsam.utilities.localToWorld(local, base, user_keys) np.testing.assert_allclose( world.atPose2(1).matrix(), expected_pose2.matrix()) # Raise error since 0 is not in user_keys self.assertRaises(RuntimeError, world.atPoint2, 0)
def addObs(self, measurement, measurementNoise): measurement = gtsam.Pose2(float(measurement[0]), float(measurement[1]), float(measurement[2])) measurementNoise = gtsam.noiseModel_Diagonal.Variances( np.array(measurementNoise)) self.graph.add( gtsam.PriorFactorPose2(X(self.currentKey), measurement, measurementNoise)) return
def step(self, odometry, odometryNoise): odometryGT = gtsam.Pose2(odometry[0], odometry[1], odometry[2]) odometryNoise = gtsam.noiseModel_Diagonal.Variances( np.array(odometryNoise)) self.graph.add( gtsam.BetweenFactorPose2(X(self.currentKey), X(self.currentKey + 1), odometryGT, odometryNoise)) # adding the initialValues predMean = self._motion_model(odometry) initialVal = gtsam.Pose2(predMean[0], predMean[1], predMean[2]) self.initialValues.insert(X(self.currentKey + 1), initialVal) # increment the key self.currentKey += 1 # update current pose if adding odometry self.currentPose = predMean return
def _get_motion_gtsam_format(motion): """ Predicts the landmark position based on a current state and observation distance and bearing. """ drot1, dtran, drot2 = motion theta = drot1 + drot2 x = dtran * np.cos(theta) y = dtran * np.sin(theta) # Wrap the angle between [-pi, +pi]. theta = wrap_angle(theta) return gtsam.Pose2(x, y, theta)