Example #1
0
    def test_bundle_adjustment(self):
        """Test Structure from Motion bundle adjustment solution with accurate prior value."""

        # Create the set of ground-truth landmarks
        expected_points = sfm_data.create_points()

        # Create the set of ground-truth poses
        expected_poses = sfm_data.create_poses()

        # Create the nrCameras*nrPoints feature data input for  atrium_sfm()
        feature_data = sfm_data.Data(self.nrCameras, self.nrPoints)

        # Project points back to the camera to generate synthetic key points
        for i, pose in enumerate(expected_poses):
            for j, point in enumerate(expected_points):
                feature_data.J[i][j] = j
                camera = gtsam.PinholeCameraCal3_S2(pose, self.calibration)
                feature_data.Z[i][j] = camera.project(point)

        result = self.sfm.bundle_adjustment(feature_data,2.5, 0, 0)

        # Compare output poses with ground truth poses
        for i in range(len(expected_poses)):
            pose_i = result.atPose3(symbol(ord('x'), i))
            self.assert_gtsam_equals(pose_i, expected_poses[i])

        # Compare output points with ground truth points
        for j in range(len(expected_points)):
            point_j = result.atPoint3(symbol(ord('p'), j))
            self.assert_gtsam_equals(point_j, expected_points[j])
Example #2
0
def log_step_push2d(tstep, logger, data, optimizer):

    num_poses = tstep + 1

    # log estimated poses
    poses_graph = optimizer.calculateEstimate()
    obj_pose_vec_graph = np.zeros((num_poses, 3))
    ee_pose_vec_graph = np.zeros((num_poses, 3))

    for i in range(0, num_poses):
        obj_key = gtsam.symbol(ord('o'), i)
        ee_key = gtsam.symbol(ord('e'), i)

        obj_pose2d = poses_graph.atPose2(obj_key)
        ee_pose2d = poses_graph.atPose2(ee_key)

        obj_pose_vec_graph[i, :] = [
            obj_pose2d.x(), obj_pose2d.y(),
            obj_pose2d.theta()
        ]
        ee_pose_vec_graph[i, :] = [
            ee_pose2d.x(), ee_pose2d.y(),
            ee_pose2d.theta()
        ]

    logger.log_step("graph/obj_poses2d", obj_pose_vec_graph, tstep)
    logger.log_step("graph/ee_poses2d", ee_pose_vec_graph, tstep)

    # log gt poses
    obj_pose_vec_gt = data['obj_poses_gt'][0:num_poses]
    ee_pose_vec_gt = data['ee_poses_gt'][0:num_poses]
    logger.log_step('gt/obj_poses2d', obj_pose_vec_gt, tstep)
    logger.log_step('gt/ee_poses2d', ee_pose_vec_gt, tstep)

    return logger
Example #3
0
    def test_VisualISAMExample(self):
        # Data Options
        options = generator.Options()
        options.triangle = False
        options.nrCameras = 20

        # iSAM Options
        isamOptions = visual_isam.Options()
        isamOptions.hardConstraint = False
        isamOptions.pointPriors = False
        isamOptions.batchInitialization = True
        isamOptions.reorderInterval = 10
        isamOptions.alwaysRelinearize = False

        # Generate data
        data, truth = generator.generate_data(options)

        # Initialize iSAM with the first pose and points
        isam, result, nextPose = visual_isam.initialize(
            data, truth, isamOptions)

        # Main loop for iSAM: stepping through all poses
        for currentPose in range(nextPose, options.nrCameras):
            isam, result = visual_isam.step(data, isam, result, truth,
                                            currentPose)

        for i in range(len(truth.cameras)):
            pose_i = result.atPose3(symbol('x', i))
            self.gtsamAssertEquals(pose_i, truth.cameras[i].pose(), 1e-5)

        for j in range(len(truth.points)):
            point_j = result.atPoint3(symbol('l', j))
            self.gtsamAssertEquals(point_j, truth.points[j], 1e-5)
Example #4
0
def sim3_transform_map(result, nrCameras, nrPoints):
    # Similarity transform
    s_poses = []
    s_points = []
    _sim3 = sim3.Similarity3()

    for i in range(nrCameras):
        pose_i = result.atPose3(symbol(ord('x'), i))
        s_poses.append(pose_i)

    for j in range(nrPoints):
        point_j = result.atPoint3(symbol(ord('p'), j))
        s_points.append(point_j)

    theta = np.radians(30)
    wRc = gtsam.Rot3(np.array([[-math.sin(theta), 0, math.cos(theta)],
                               [-math.cos(theta), 0, -math.sin(theta)],
                               [0, 1, 0]]))
    d_pose1 = gtsam.Pose3(
        wRc, gtsam.Point3(-math.sin(theta)*1.58, -math.cos(theta)*1.58, 1.2))
    d_pose2 = gtsam.Pose3(
        wRc, gtsam.Point3(-math.sin(theta)*1.58*3, -math.cos(theta)*1.58*3, 1.2))
    pose_pairs = [[s_poses[2], d_pose2], [s_poses[0], d_pose1]]

    _sim3.align_pose(pose_pairs)
    print('R', _sim3._R)
    print('t', _sim3._t)
    print('s', _sim3._s)

    s_map = (s_poses, s_points)
    actual_poses, actual_points = _sim3.map_transform(s_map)
    d_map = _sim3.map_transform(s_map)

    return _sim3, d_map
Example #5
0
def solve_lqr(A, B, Q, R, X0=np.array([0., 0.]), num_time_steps=500):
    '''Solves a discrete, finite horizon LQR problem given system dynamics in
    state space representation.
    Arguments:
        A, B: nxn state transition matrix and nxp control input matrix
        Q, R: nxn state cost matrix and pxp control cost matrix
        X0: initial state (n-vector)
        num_time_steps: number of time steps, T
    Returns:
        x_sol, u_sol: Txn array of states and Txp array of controls
    '''
    n = np.size(A, 0)
    p = np.size(B, 1)

    # noise models
    prior_noise = gtsam.noiseModel_Constrained.All(n)
    dynamics_noise = gtsam.noiseModel_Constrained.All(n)
    q_noise = gtsam.dynamic_cast_noiseModel_Diagonal_noiseModel_Gaussian(
        gtsam.noiseModel_Gaussian.Information(Q))
    r_noise = gtsam.dynamic_cast_noiseModel_Diagonal_noiseModel_Gaussian(
        gtsam.noiseModel_Gaussian.Information(R))
    # note: GTSAM 4.0.2 python wrapper doesn't have 'Information'
    # wrapper, use this instead if you are not on develop branch:
    #   `gtsam.noiseModel_Gaussian.SqrtInformation(np.sqrt(Q)))`

    # Create an empty Gaussian factor graph
    graph = gtsam.GaussianFactorGraph()

    # Create the keys corresponding to unknown variables in the factor graph
    X = []
    U = []
    for i in range(num_time_steps):
        X.append(gtsam.symbol(ord('x'), i))
        U.append(gtsam.symbol(ord('u'), i))

    # set initial state as prior
    graph.add(X[0], np.eye(n), X0, prior_noise)

    # Add dynamics constraint as ternary factor
    #   A.x1 + B.u1 - I.x2 = 0
    for i in range(num_time_steps - 1):
        graph.add(X[i], A, U[i], B, X[i + 1], -np.eye(n), np.zeros((n)),
                  dynamics_noise)

    # Add cost functions as unary factors
    for x in X:
        graph.add(x, np.eye(n), np.zeros(n), q_noise)
    for u in U:
        graph.add(u, np.eye(p), np.zeros(p), r_noise)

    # Solve
    result = graph.optimize()
    x_sol = np.zeros((num_time_steps, n))
    u_sol = np.zeros((num_time_steps, p))
    for i in range(num_time_steps):
        x_sol[i, :] = result.at(X[i])
        u_sol[i] = result.at(U[i])

    return x_sol, u_sol
Example #6
0
    def test_SFMExample(self):
        options = generator.Options()
        options.triangle = False
        options.nrCameras = 10

        [data, truth] = generator.generate_data(options)

        measurementNoiseSigma = 1.0
        pointNoiseSigma = 0.1
        poseNoiseSigmas = np.array([0.001, 0.001, 0.001, 0.1, 0.1, 0.1])

        graph = gtsam.NonlinearFactorGraph()

        # Add factors for all measurements
        measurementNoise = gtsam.noiseModel_Isotropic.Sigma(2, measurementNoiseSigma)
        for i in range(len(data.Z)):
            for k in range(len(data.Z[i])):
                j = data.J[i][k]
                graph.add(gtsam.GenericProjectionFactorCal3_S2(
                    data.Z[i][k], measurementNoise,
                    symbol(ord('x'), i), symbol(ord('p'), j), data.K))

        posePriorNoise = gtsam.noiseModel_Diagonal.Sigmas(poseNoiseSigmas)
        graph.add(gtsam.PriorFactorPose3(symbol(ord('x'), 0),
                                   truth.cameras[0].pose(), posePriorNoise))
        pointPriorNoise = gtsam.noiseModel_Isotropic.Sigma(3, pointNoiseSigma)
        graph.add(gtsam.PriorFactorPoint3(symbol(ord('p'), 0),
                                    truth.points[0], pointPriorNoise))

        # Initial estimate
        initialEstimate = gtsam.Values()
        for i in range(len(truth.cameras)):
            pose_i = truth.cameras[i].pose()
            initialEstimate.insert(symbol(ord('x'), i), pose_i)
        for j in range(len(truth.points)):
            point_j = truth.points[j]
            initialEstimate.insert(symbol(ord('p'), j), point_j)

        # Optimization
        optimizer = gtsam.LevenbergMarquardtOptimizer(graph, initialEstimate)
        for i in range(5):
            optimizer.iterate()
        result = optimizer.values()

        # Marginalization
        marginals = gtsam.Marginals(graph, result)
        marginals.marginalCovariance(symbol(ord('p'), 0))
        marginals.marginalCovariance(symbol(ord('x'), 0))

        # Check optimized results, should be equal to ground truth
        for i in range(len(truth.cameras)):
            pose_i = result.atPose3(symbol(ord('x'), i))
            self.assertTrue(pose_i.equals(truth.cameras[i].pose(), 1e-5))

        for j in range(len(truth.points)):
            point_j = result.atPoint3(symbol(ord('p'), j))
            self.assertTrue(point_j.equals(truth.points[j], 1e-5))
Example #7
0
    def test_bundle_adjustment_with_error(self):
        """Test Structure from Motion solution with ill estimated prior value."""

        # Create the set of actual points and poses
        actual_points = []
        actual_poses = []

        # Create the set of ground-truth landmarks
        points = sfm_data.create_points()

        # Create the set of ground-truth poses
        poses = sfm_data.create_poses()

        # Create the nrCameras*nrPoints feature point data input for  atrium_sfm()
        feature_data = sfm_data.Data(self.nrCameras, self.nrPoints)

        # Project points back to the camera to generate feature points
        for i, pose in enumerate(poses):
            for j, point in enumerate(points):
                feature_data.J[i][j] = j
                camera = gtsam.PinholeCameraCal3_S2(pose, self.calibration)
                feature_data.Z[i][j] = camera.project(point)

        result = self.sfm.bundle_adjustment(
            feature_data, 2.5, self.rotation_error, self.translation_error)

        # Similarity transform
        s_poses = []
        s_points = []
        _sim3 = sim3.Similarity3()

        for i in range(len(poses)):
            pose_i = result.atPose3(symbol(ord('x'), i))
            s_poses.append(pose_i)

        for j in range(len(points)):
            point_j = result.atPoint3(symbol(ord('p'), j))
            s_points.append(point_j)

        pose_pairs = [[s_poses[2], poses[2]], [s_poses[1], poses[1]], [s_poses[0], poses[0]]]

        _sim3.sim3_pose(pose_pairs)

        s_map = (s_poses, s_points)
        actual_poses, actual_points = _sim3.map_transform(s_map)

        # Compare output poses with ground truth poses
        for i, pose in enumerate(actual_poses):
            self.assert_gtsam_equals(pose, poses[i],1e-2)

        # Compare output points with ground truth points
        for i, point in enumerate(actual_points):
            self.assert_gtsam_equals(point, points[i], 1e-2)
Example #8
0
def create_lti_fg(A, B, X0=np.array([]), u=np.array([]), num_time_steps=500):
    '''Creates a factor graph with system dynamics constraints in state-space
            representation:
            x_{t+1} = Ax_t + Bu_t
    Arguments:
        A: nxn State matrix
        B: nxp Input matrix
        X0: initial state (n-vector)
        u: Txp control inputs (optional, if not specified, no control input will
            be used)
        num_time_steps: number of time steps, T, to run the system
    Returns:
        graph, X, U: A factor graph and lists of keys X & U for the states and
            control inputs respectively
    '''
    # Create noise models
    prior_noise = gtsam.noiseModel_Constrained.All(np.size(A, 0))
    dynamics_noise = gtsam.noiseModel_Constrained.All(np.size(A, 0))
    control_noise = gtsam.noiseModel_Constrained.All(1)

    # Create an empty Gaussian factor graph
    graph = gtsam.GaussianFactorGraph()

    # Create the keys corresponding to unknown variables in the factor graph
    X = []
    U = []
    for i in range(num_time_steps):
        X.append(gtsam.symbol(ord('x'), i))
        U.append(gtsam.symbol(ord('u'), i))

    # set initial state as prior
    if X0.size > 0:
        if X0.size != np.size(A, 0):
            raise ValueError("X0 dim does not match state dim")
        graph.add(X[0], np.eye(X0.size), X0, prior_noise)

    # Add dynamics constraint as ternary factor
    #   A.x1 + B.u1 - I.x2 = 0
    for i in range(num_time_steps - 1):
        graph.add(X[i], A, U[i], B, X[i + 1], -np.eye(np.size(A, 0)),
                  np.zeros((np.size(A, 0))), dynamics_noise)

    # Add control inputs
    for i in range(len(u)):
        if np.shape(u) != (num_time_steps, np.size(B, 1)):
            raise ValueError("control input is wrong size")
        graph.add(U[i], np.eye(np.size(B, 1)), u[i, :], control_noise)

    return graph, X, U
def circlePose3(numPoses=8, radius=1.0, symbolChar='\0'):
    """
    circlePose3 generates a set of poses in a circle. This function
    returns those poses inside a gtsam.Values object, with sequential
    keys starting from 0. An optional character may be provided, which
    will be stored in the msb of each key (i.e. gtsam.Symbol).

    We use aerospace/navlab convention, X forward, Y right, Z down
    First pose will be at (R,0,0)
    ^y   ^ X
    |    |
    z-->xZ--> Y  (z pointing towards viewer, Z pointing away from viewer)
    Vehicle at p0 is looking towards y axis (X-axis points towards world y)
    """

    values = gtsam.Values()
    theta = 0.0
    dtheta = 2 * pi / numPoses
    gRo = gtsam.Rot3(
        np.array([[0., 1., 0.], [1., 0., 0.], [0., 0., -1.]], order='F'))
    for i in range(numPoses):
        key = gtsam.symbol(symbolChar, i)
        gti = gtsam.Point3(radius * cos(theta), radius * sin(theta), 0)
        oRi = gtsam.Rot3.Yaw(
            -theta)  # negative yaw goes counterclockwise, with Z down !
        gTi = gtsam.Pose3(gRo.compose(oRi), gti)
        values.insert(key, gTi)
        theta = theta + dtheta
    return values
Example #10
0
def create_graph():
    """Create a basic linear factor graph for testing"""
    graph = gtsam.GaussianFactorGraph()

    x0 = gtsam.symbol(ord('x'), 0)
    x1 = gtsam.symbol(ord('x'), 1)
    x2 = gtsam.symbol(ord('x'), 2)

    BETWEEN_NOISE = gtsam.noiseModel_Diagonal.Sigmas(np.ones(1))
    PRIOR_NOISE = gtsam.noiseModel_Diagonal.Sigmas(np.ones(1))

    graph.add(x1, np.eye(1), x0, -np.eye(1), np.ones(1), BETWEEN_NOISE)
    graph.add(x2, np.eye(1), x1, -np.eye(1), 2 * np.ones(1), BETWEEN_NOISE)
    graph.add(x0, np.eye(1), np.zeros(1), PRIOR_NOISE)

    return graph, (x0, x1, x2)
Example #11
0
    def test_dot(self):
        """Check that dot works with position hints."""
        fragment = DiscreteBayesNet()
        fragment.add(Either, [Tuberculosis, LungCancer], "F T T T")
        MyAsia = gtsam.symbol('a', 0), 2  # use a symbol!
        fragment.add(Tuberculosis, [MyAsia], "99/1 95/5")
        fragment.add(LungCancer, [Smoking], "99/1 90/10")

        # Make sure we can *update* position hints
        writer = gtsam.DotWriter()
        ph: dict = writer.positionHints
        ph.update({'a': 2})  # hint at symbol position
        writer.positionHints = ph

        # Check the output of dot
        actual = fragment.dot(writer=writer)
        expected_result = """\
            digraph {
              size="5,5";

              var3[label="3"];
              var4[label="4"];
              var5[label="5"];
              var6[label="6"];
              var6989586621679009792[label="a0", pos="0,2!"];

              var4->var6
              var6989586621679009792->var3
              var3->var5
              var6->var5
            }"""
        self.assertEqual(actual, textwrap.dedent(expected_result))
Example #12
0
def log_step_df_push2d(tstep, dataframe, data, optimizer):

    num_poses = tstep + 1

    # estimated poses
    poses_graph = optimizer.calculateEstimate()
    obj_pose_vec_graph = np.zeros((num_poses, 3))
    ee_pose_vec_graph = np.zeros((num_poses, 3))

    for i in range(0, num_poses):
        obj_key = gtsam.symbol(ord('o'), i)
        ee_key = gtsam.symbol(ord('e'), i)

        obj_pose2d = poses_graph.atPose2(obj_key)
        ee_pose2d = poses_graph.atPose2(ee_key)

        obj_pose_vec_graph[i, :] = [
            obj_pose2d.x(), obj_pose2d.y(),
            obj_pose2d.theta()
        ]
        ee_pose_vec_graph[i, :] = [
            ee_pose2d.x(), ee_pose2d.y(),
            ee_pose2d.theta()
        ]

    # gt poses
    obj_pose_vec_gt = data['obj_poses_gt'][0:num_poses]
    ee_pose_vec_gt = data['ee_poses_gt'][0:num_poses]

    # add to dataframe
    data_dict = {
        'opt/graph/obj_poses2d': [obj_pose_vec_graph.tolist()],
        'opt/graph/ee_poses2d': [ee_pose_vec_graph.tolist()],
        'opt/gt/obj_poses2d': [obj_pose_vec_gt],
        'opt/gt/ee_poses2d': [ee_pose_vec_gt]
    }

    data_row = pd.DataFrame(data_dict, index=[tstep], dtype=object)
    data_row.index.name = 'tstep'
    dataframe = dataframe.append(data_row)

    return dataframe
Example #13
0
    def __init__(self, odom_global, odom_relative, visual_global,
                 visual_relative):
        self.odom_global = odom_global
        self.odom_relative = odom_relative
        self.visual_global = visual_global
        self.visual_relative = visual_relative

        # shorthand symbols
        self.X = lambda i: int(gtsam.symbol(ord('x'), i))

        # Create an empty nonlinear factor graph
        self.graph = gtsam.NonlinearFactorGraph()
    def test_VisualISAMExample(self):

        # Create a factor
        poseKey1 = symbol('x', 1)
        poseKey2 = symbol('x', 2)
        trueRotation = Rot3.RzRyRx(0.15, 0.15, -0.20)
        trueTranslation = Point3(+0.5, -1.0, +1.0)
        trueDirection = Unit3(trueTranslation)
        E = EssentialMatrix(trueRotation, trueDirection)
        model = gtsam.noiseModel.Isotropic.Sigma(5, 0.25)
        factor = EssentialMatrixConstraint(poseKey1, poseKey2, E, model)

        #  Create a linearization point at the zero-error point
        pose1 = Pose3(Rot3.RzRyRx(0.00, -0.15, 0.30), Point3(-4.0, 7.0, -10.0))
        pose2 = Pose3(
            Rot3.RzRyRx(0.179693265735950, 0.002945368776519,
                        0.102274823253840),
            Point3(-3.37493895, 6.14660244, -8.93650986))

        expected = np.zeros((5, 1))
        actual = factor.evaluateError(pose1, pose2)
        self.gtsamAssertEquals(actual, expected, 1e-8)
Example #15
0
    def trajectory_estimator(self, features, visible_map):
        """ Estimate current pose with matched features through GTSAM and update the trajectory
        Parameters:
            features: A Feature Object. Stores all matched Superpoint features.
            visible_map: A Map Object. Stores all matched Landmark points.
            pose: gtsam.pose3 Object. The pose at the last state from the atrium map trajectory.
        Returns:
            current_pose: gtsam.pose3 Object. The current estimate pose.

        Use input matched features as the projection factors of the graph.  
        Use input visible_map (match landmark points of the map) as the landmark priors of the graph.
        Use the last time step pose from the trajectory as the initial estimate value of the current pose
        """
        assert len(self.atrium_map.trajectory) != 0, "Trajectory is empty."
        pose = self.atrium_map.trajectory[len(self.atrium_map.trajectory)-1]

        # Initialize factor graph
        graph = gtsam.NonlinearFactorGraph()
        initialEstimate = gtsam.Values()

        # Add projection factors with matched feature points for all measurements
        measurementNoiseSigma = 1.0
        measurementNoise = gtsam.noiseModel_Isotropic.Sigma(
            2, measurementNoiseSigma)
        for i, feature in enumerate(features.key_point_list):
            graph.add(gtsam.GenericProjectionFactorCal3_S2(
                feature, measurementNoise,
                X(0), P(i), self.calibration))

        # Create initial estimate for the pose
        # Because the robot moves slowly, we can use the previous pose as an initial estimation of the current pose.
        initialEstimate.insert(X(0), pose)

        # Create priors for visual
        # Because the map is known, we use the landmarks from the visible map with nearly zero error as priors.
        pointPriorNoise = gtsam.noiseModel_Isotropic.Sigma(3, 0.01)
        for i, landmark in enumerate(visible_map.landmark_list):
            
            graph.add(gtsam.PriorFactorPoint3(P(i),
                                              landmark, pointPriorNoise))
            initialEstimate.insert(P(i), landmark)

        # Optimization
        optimizer = gtsam.LevenbergMarquardtOptimizer(graph, initialEstimate)
        result = optimizer.optimize()

        # Add current pose to the trajectory
        current_pose = result.atPose3(symbol(ord('x'), 0))
        self.atrium_map.append_trajectory(current_pose)

        return current_pose
    def sim3_transform_map(self, result, img_pose_id_list, landmark_id_list):
        # Similarity transform
        s_poses = []
        s_points = []
        _sim3 = sim3.Similarity3()

        theta = np.radians(30)
        wRc = gtsam.Rot3(
            np.array([[-math.sin(theta), 0,
                       math.cos(theta)],
                      [-math.cos(theta), 0, -math.sin(theta)], [0, 1, 0]]))

        pose_pairs = []
        for i, idx in enumerate(img_pose_id_list):
            pose_i = result.atPose3(symbol(ord('x'), idx))
            s_poses.append(pose_i)
            if i < 2:
                d_pose = gtsam.Pose3(
                    wRc,
                    gtsam.Point3(-math.sin(theta) * 1.58 * idx,
                                 -math.cos(theta) * 1.58 * idx, 1.2))
                pose_pairs.append([s_poses[i], d_pose])
                i += 1

        for idx in landmark_id_list:
            point_j = result.atPoint3(symbol(ord('p'), idx))
            s_points.append(point_j)

        _sim3.align_pose(pose_pairs)
        print('R', _sim3._R)
        print('t', _sim3._t)
        print('s', _sim3._s)

        s_map = (s_poses, s_points)
        actual_poses, actual_points = _sim3.map_transform(s_map)
        d_map = _sim3.map_transform(s_map)

        return _sim3, d_map
Example #17
0
def log_step_nav2d(tstep, logger, data, optimizer):

    num_poses = tstep + 1

    pose_vec_graph = np.zeros((num_poses, 3))
    pose_vec_gt = np.asarray(data['poses_gt'][0:num_poses])
    poses_graph = optimizer.calculateEstimate()

    for i in range(0, num_poses):
        key = gtsam.symbol(ord('x'), i)
        pose2d = poses_graph.atPose2(key)
        pose_vec_graph[i, :] = [pose2d.x(), pose2d.y(), pose2d.theta()]

    logger.log_step("graph/poses2d", pose_vec_graph, tstep)
    logger.log_step("gt/poses2d", pose_vec_gt, tstep)

    return logger
Example #18
0
    def __init__(self, odom_global, odom_relative, visual_global,
                 visual_relative):
        self.odom_global = self.point_to_pose(odom_global)
        self.odom_relative = self.point_to_pose(odom_relative)
        self.visual_global = self.point_to_pose(visual_global)
        self.visual_relative = self.point_to_pose(visual_relative)

        # print(odom_global)
        # self.draw_trajectories([self.odom_global, self.visual_global], ['b', 'r'], 2)
        # exit()

        # print(np.asarray(visual_global))
        # exit()
        # print(odom_relative)
        # print(visual_global)
        # print(visual_relative)

        # shorthand symbols:
        self.X = lambda i: int(gtsam.symbol(ord('x'), i))
Example #19
0
def step(data, isam, result, truth, currPoseIndex):
    '''
    Do one step isam update
    @param[in] data: measurement data (odometry and visual measurements and their noiseModels)
    @param[in/out] isam: current isam object, will be updated
    @param[in/out] result: current result object, will be updated
    @param[in] truth: ground truth data, used to initialize new variables
    @param[currPoseIndex]: index of the current pose
    '''
    # iSAM expects us to give it a new set of factors
    # along with initial estimates for any new variables introduced.
    newFactors = gtsam.NonlinearFactorGraph()
    initialEstimates = gtsam.Values()

    # Add odometry
    prevPoseIndex = currPoseIndex - 1
    odometry = data.odometry[prevPoseIndex]
    newFactors.add(
        gtsam.BetweenFactorPose3(symbol('x', prevPoseIndex),
                                 symbol('x', currPoseIndex), odometry,
                                 data.noiseModels.odometry))

    # Add visual measurement factors and initializations as necessary
    for k in range(len(data.Z[currPoseIndex])):
        zij = data.Z[currPoseIndex][k]
        j = data.J[currPoseIndex][k]
        jj = symbol('l', j)
        newFactors.add(
            gtsam.GenericProjectionFactorCal3_S2(zij,
                                                 data.noiseModels.measurement,
                                                 symbol('x', currPoseIndex),
                                                 jj, data.K))
        # TODO: initialize with something other than truth
        if not result.exists(jj) and not initialEstimates.exists(jj):
            lmInit = truth.points[j]
            initialEstimates.insert(jj, lmInit)

    # Initial estimates for the new pose.
    prevPose = result.atPose3(symbol('x', prevPoseIndex))
    initialEstimates.insert(symbol('x', currPoseIndex),
                            prevPose.compose(odometry))

    # Update ISAM
    # figure(1)tic
    isam.update(newFactors, initialEstimates)
    # t=toc plot(frame_i,t,'r.') tic
    newResult = isam.calculateEstimate()
    # t=toc plot(frame_i,t,'g.')

    return isam, newResult
Example #20
0
def log_step_df_nav2d(tstep, dataframe, data, optimizer):

    num_poses = tstep + 1

    pose_vec_graph = np.zeros((num_poses, 3))
    pose_vec_gt = np.asarray(data['poses_gt'][0:num_poses])
    poses_graph = optimizer.calculateEstimate()

    for i in range(0, num_poses):
        key = gtsam.symbol(ord('x'), i)
        pose2d = poses_graph.atPose2(key)
        pose_vec_graph[i, :] = [pose2d.x(), pose2d.y(), pose2d.theta()]
    
    # add to dataframe
    data_dict = {'opt/graph/poses2d': [pose_vec_graph.tolist()], 'opt/gt/poses2d': [pose_vec_gt.tolist()]}
    data_row = pd.DataFrame(data_dict, index=[tstep], dtype=object)
    data_row.index.name = 'tstep'

    dataframe = dataframe.append(data_row)

    return dataframe
Example #21
0
def symbol(name: str, index: int) -> int:
    """ helper for creating a symbol without explicitly casting 'name' from str to int """
    return gtsam.symbol(ord(name), index)
Example #22
0
def P(j):  # pylint: disable=invalid-name
    """Create key for landmark j."""
    return symbol(ord('p'), j)
Example #23
0
def X(i):  # pylint: disable=invalid-name
    """Create key for pose i."""
    return symbol(ord('x'), i)
 def symbol(letter, index):
     return int(gtsam.symbol(ord(letter), index))
Example #25
0
class System(object):
    """
    Python front-end to build graph/estimate from dataset. 
    """
    X = lambda i: int(gtsam.symbol(ord('x'), i))
    Q = lambda i: int(gtsam.symbol(ord('q'), i))

    @staticmethod
    def run(sequence, config):

        # build graph / estimate
        graph, initial_estimate = System.build_graph(sequence, config)

        # draw initial system
        plotting = MPLDrawing('initial_problem')
        plotting.plot_system(graph, initial_estimate)

        # optimize using c++ back-end
        estimate = System.optimize(graph, initial_estimate, sequence.calibration)

        # draw estimation
        plotting = MPLDrawing('final_solution')
        plotting.plot_system(graph, estimate)

        # extract quadrics / trajectory 
        estimated_trajectory = Trajectory.from_values(estimate)
        estimated_quadrics = Quadrics.from_values(estimate)

        # evaluate results
        initial_ATE_H = Evaluation.evaluate_trajectory(Trajectory.from_values(initial_estimate), sequence.true_trajectory, horn=True)[0]
        estimate_ATE_H = Evaluation.evaluate_trajectory(estimated_trajectory, sequence.true_trajectory, horn=True)[0]
        initial_ATE = Evaluation.evaluate_trajectory(Trajectory.from_values(initial_estimate), sequence.true_trajectory, horn=False)[0]
        estimate_ATE = Evaluation.evaluate_trajectory(estimated_trajectory, sequence.true_trajectory, horn=False)[0]
        print('Initial ATE w/ horn alignment: {}'.format(initial_ATE_H))
        print('Final ATE w/ horn alignment: {}'.format(estimate_ATE_H))
        print('Initial ATE w/ weak alignment: {}'.format(initial_ATE))
        print('Final ATE w/ weak alignment: {}'.format(estimate_ATE))

        # # plot results
        trajectories = [Trajectory.from_values(initial_estimate), estimated_trajectory, sequence.true_trajectory]
        maps = [Quadrics.from_values(initial_estimate), estimated_quadrics, sequence.true_quadrics]
        colors = ['r', 'm', 'g']; names = ['initial_estimate', 'final_estimate', 'ground_truth']
        plotting.plot_result(trajectories, maps, colors, names)
           
    
    @staticmethod
    def optimize(graph, initial_estimate, calibration):

        # create optimizer parameters
        params = gtsam.LevenbergMarquardtParams()
        params.setVerbosityLM("SUMMARY")    # SILENT = 0, SUMMARY, TERMINATION, LAMBDA, TRYLAMBDA, TRYCONFIG, DAMPED, TRYDELTA : VALUES, ERROR 
        params.setMaxIterations(100)
        params.setlambdaInitial(1e-5)
        params.setlambdaUpperBound(1e10)
        params.setlambdaLowerBound(1e-8)
        params.setRelativeErrorTol(1e-5)
        params.setAbsoluteErrorTol(1e-5)
  
        # create optimizer
        optimizer = gtsam.LevenbergMarquardtOptimizer(graph, initial_estimate, params)

        # run optimizer
        print('starting optimization')
        estimate = optimizer.optimize()
        print('optimization finished')

        return estimate


    @staticmethod
    def build_graph(sequence, config):
        """
        Adds noise to sequence variables / measurements. 
        Returns graph, initial_estimate
        """

        # create empty graph / estimate
        graph = gtsam.NonlinearFactorGraph()
        initial_estimate = gtsam.Values()

        # declare noise models
        prior_noise = gtsam.noiseModel_Diagonal.Sigmas(np.array([float(config['base']['PRIOR_SIGMA'])]*6, dtype=np.float))
        odometry_noise = gtsam.noiseModel_Diagonal.Sigmas(np.array([float(config['base']['ODOM_SIGMA'])]*6, dtype=np.float))
        bbox_noise = gtsam.noiseModel_Diagonal.Sigmas(np.array([float(config['base']['BOX_SIGMA'])]*4, dtype=np.float))

        # get noisy odometry / boxes 
        true_odometry = sequence.true_trajectory.as_odometry()
        noisy_odometry = true_odometry.add_noise(mu=0.0, sd=float(config['base']['ODOM_NOISE']))
        noisy_boxes = sequence.true_boxes.add_noise(mu=0.0, sd=float(config['base']['BOX_NOISE']))

        # initialize trajectory
        # TODO: ensure aligned in same reference frame
        initial_trajectory = noisy_odometry.as_trajectory(sequence.true_trajectory.data()[0])
        # initial_trajectory = noisy_odometry.as_trajectory()

        # initialize quadrics
        # NOTE: careful initializing with true quadrics and noise traj as it may not make sense
        if config['base']['Initialization'] == 'SVD':
            initial_quadrics = System.initialize_quadrics(initial_trajectory, noisy_boxes, sequence.calibration)
        elif config['base']['Initialization'] == 'Dataset':
            initial_quadrics = sequence.true_quadrics

        # add prior pose
        prior_factor = gtsam.PriorFactorPose3(System.X(0), initial_trajectory.at(0), prior_noise)
        graph.add(prior_factor)

        # add odometry measurements
        for (start_key, end_key), rpose in noisy_odometry.items():
            odometry_factor = gtsam.BetweenFactorPose3(System.X(start_key), System.X(end_key), rpose, odometry_noise)
            graph.add(odometry_factor)

        # add initial pose estimates
        for pose_key, pose in initial_trajectory.items():
            initial_estimate.insert(System.X(pose_key), pose)

        # add valid box measurements
        valid_objects = []
        initialized_quadrics = initial_quadrics.keys()
        for object_key in np.unique(noisy_boxes.object_keys()):

            # add if quadric initialized
            if object_key in initialized_quadrics:
                
                # get all views of quadric
                object_boxes = noisy_boxes.at_object(object_key)

                # add if enough views
                if len(object_boxes) > 3:

                    # add measurements
                    valid_objects.append(object_key)
                    for (pose_key, t), box in object_boxes.items():
                        bbf = quadricslam.BoundingBoxFactor(box, sequence.calibration, System.X(pose_key), System.Q(object_key), bbox_noise)
                        bbf.addToGraph(graph)

        # add initial landmark estimates
        for object_key, quadric in initial_quadrics.items():

            # add if seen > 3 times
            if (object_key in valid_objects):
                quadric.addToValues(initial_estimate, System.Q(object_key))

        return graph, initial_estimate

    @staticmethod
    def initialize_quadrics(trajectory, boxes, calibration):
        """ Uses SVD to initialize quadrics from measurements """
        quadrics = Quadrics()

        # loop through object keys
        for object_key in np.unique(boxes.object_keys()):

            # get all detections at object key
            object_boxes = boxes.at_object(object_key)

            # get the poses associated with each detection 
            pose_keys = object_boxes.pose_keys()
            poses = trajectory.at_keys(pose_keys)

            # ensure quadric seen from > 3 views
            if len(np.unique(pose_keys)) < 3:
                continue

            # initialize quadric fomr views using svd
            quadric_matrix = System.quadric_SVD(poses, object_boxes, calibration)

            # constrain generic dual quadric to be ellipsoidal 
            quadric = quadricslam.ConstrainedDualQuadric.constrain(quadric_matrix)

            # check quadric is okay
            if (System.is_okay(quadric, poses, calibration)):
                quadrics.add(quadric, object_key)

        return quadrics

    @staticmethod
    def quadric_SVD(poses, object_boxes, calibration):
        """ calculates quadric_matrix using SVD """

        # iterate through box/pose data
        planes = []
        for box, pose in zip(object_boxes.data(), poses.data()):

            # calculate boxes lines
            lines = box.lines()

            # convert Vector3Vector to list
            lines = [lines.at(i) for i in range(lines.size())]

            # calculate projection matrix
            P = quadricslam.QuadricCamera.transformToImage(pose, calibration).transpose()

            # project lines to planes
            planes += [P @ line for line in lines]

        # create A matrix
        A = np.asarray([np.array([p[0]**2,  2*(p[0]*p[1]),  2*(p[0]*p[2]),  2*(p[0]*p[3]),
                                                p[1]**2,  	2*(p[1]*p[2]),  2*(p[1]*p[3]),
                                                                p[2]**2,  	2*(p[2]*p[3]),
                                                                               p[3]**2]) for p in planes])

        # solve SVD for Aq = 0, which should be equal to p'Qp = 0
        _,_,V = np.linalg.svd(A, full_matrices=True)
        q = V.T[:, -1]

        # construct quadric
        dual_quadric = np.array([[q[0], q[1], q[2], q[3]],
                                [q[1], q[4], q[5], q[6]],
                                [q[2], q[5], q[7], q[8]],
                                [q[3], q[6], q[8], q[9]]])

        return dual_quadric

    @staticmethod
    def is_okay(quadric, poses, calibration):
        """
        Checks quadric is valid:
            quadric constrained correctly
            paralax > threshold
            reprojections valid in each frame 
                quadric infront of camera : positive depth 
                camera outside quadric
                conic is an ellipse 
            ensure views provide enough DOF (due to edges / out of frame)
        """
        for pose in poses:

            # quadric must have positive depth
            if quadric.isBehind(pose):
                return False

            # camera pose must be outside quadric 
            if quadric.contains(pose):
                return False

            # conic must be valid and elliptical 
            conic = quadricslam.QuadricCamera.project(quadric, pose, calibration)
            if conic.isDegenerate():
                return False
            if not conic.isEllipse():
                return False
                
        return True
Example #26
0
keys.push_back(2)
print 'size: ', keys.size()
print keys.at(0)
print keys.at(1)

noise = gtsam.noiseModel_Isotropic.Precision(2, 3.0)
noise.print_('noise:')
print 'noise print:', noise
f = gtsam.JacobianFactor(7, np.ones([2, 2]), model=noise, b=np.ones(2))
print 'JacobianFactor(7):\n', f
print "A = ", f.getA()
print "b = ", f.getb()

f = gtsam.JacobianFactor(np.ones(2))
f.print_('jacoboian b_in:')

print "JacobianFactor initalized with b_in:", f

diag = gtsam.noiseModel_Diagonal.Sigmas(np.array([1., 2., 3.]))
fv = gtsam.PriorFactorVector(1, np.array([4., 5., 6.]), diag)
print "priorfactorvector: ", fv

print "base noise: ", fv.get_noiseModel()
print "casted to gaussian2: ", gtsam.dynamic_cast_noiseModel_Diagonal_noiseModel_Base(
    fv.get_noiseModel())

X = gtsam.symbol(65, 19)
print X
print gtsam.symbolChr(X)
print gtsam.symbolIndex(X)
Example #27
0
def X(x):
    return gtsam.symbol(ord('x'), x)
Example #28
0
def L(l):
    return gtsam.symbol(ord('l'), l)
Example #29
0
import sys
import numpy as np
import matplotlib.pyplot as plt

# import gtsam and extension
import gtsam
import quadricslam

if __name__ == '__main__':

    # declare noise estimates
    ODOM_SIGMA = 0.01
    BOX_SIGMA = 3

    # define shortcuts for symbols
    X = lambda i: int(gtsam.symbol(ord('x'), i))
    Q = lambda i: int(gtsam.symbol(ord('q'), i))

    # create empty graph / estimate
    graph = gtsam.NonlinearFactorGraph()
    initial_estimate = gtsam.Values()

    # define calibration
    calibration = gtsam.Cal3_S2(525.0, 525.0, 0.0, 160.0, 120.0)

    # define noise models
    prior_noise = gtsam.noiseModel_Diagonal.Sigmas(
        np.array([1e-1] * 6, dtype=np.float))
    odometry_noise = gtsam.noiseModel_Diagonal.Sigmas(
        np.array([ODOM_SIGMA] * 3 + [ODOM_SIGMA] * 3, dtype=np.float))
    bbox_noise = gtsam.noiseModel_Diagonal.Sigmas(
Example #30
0
def IMU_example():
    """Run iSAM 2 example with IMU factor."""

    # Start with a camera on x-axis looking at origin
    radius = 30
    up = gtsam.Point3(0, 0, 1)
    target = gtsam.Point3(0, 0, 0)
    position = gtsam.Point3(radius, 0, 0)
    camera = gtsam.SimpleCamera.Lookat(position, target, up, gtsam.Cal3_S2())
    pose_0 = camera.pose()

    # Create the set of ground-truth landmarks and poses
    angular_velocity = math.radians(180)  # rad/sec
    delta_t = 1.0/18  # makes for 10 degrees per step

    angular_velocity_vector = vector3(0, -angular_velocity, 0)
    linear_velocity_vector = vector3(radius * angular_velocity, 0, 0)
    scenario = gtsam.ConstantTwistScenario(
        angular_velocity_vector, linear_velocity_vector, pose_0)

    # Create a factor graph
    newgraph = gtsam.NonlinearFactorGraph()

    # Create (incremental) ISAM2 solver
    isam = gtsam.ISAM2()

    # Create the initial estimate to the solution
    # Intentionally initialize the variables off from the ground truth
    initialEstimate = gtsam.Values()

    # Add a prior on pose x0. This indirectly specifies where the origin is.
    # 30cm std on x,y,z 0.1 rad on roll,pitch,yaw
    noise = gtsam.noiseModel_Diagonal.Sigmas(
        np.array([0.3, 0.3, 0.3, 0.1, 0.1, 0.1]))
    newgraph.push_back(gtsam.PriorFactorPose3(X(0), pose_0, noise))

    # Add imu priors
    biasKey = gtsam.symbol(ord('b'), 0)
    biasnoise = gtsam.noiseModel_Isotropic.Sigma(6, 0.1)
    biasprior = gtsam.PriorFactorConstantBias(biasKey, gtsam.imuBias_ConstantBias(),
                                              biasnoise)
    newgraph.push_back(biasprior)
    initialEstimate.insert(biasKey, gtsam.imuBias_ConstantBias())
    velnoise = gtsam.noiseModel_Isotropic.Sigma(3, 0.1)

    # Calculate with correct initial velocity
    n_velocity = vector3(0, angular_velocity * radius, 0)
    velprior = gtsam.PriorFactorVector(V(0), n_velocity, velnoise)
    newgraph.push_back(velprior)
    initialEstimate.insert(V(0), n_velocity)

    accum = gtsam.PreintegratedImuMeasurements(PARAMS)

    # Simulate poses and imu measurements, adding them to the factor graph
    for i in range(80):
        t = i * delta_t  # simulation time
        if i == 0:  # First time add two poses
            pose_1 = scenario.pose(delta_t)
            initialEstimate.insert(X(0), pose_0.compose(DELTA))
            initialEstimate.insert(X(1), pose_1.compose(DELTA))
        elif i >= 2:  # Add more poses as necessary
            pose_i = scenario.pose(t)
            initialEstimate.insert(X(i), pose_i.compose(DELTA))

        if i > 0:
            # Add Bias variables periodically
            if i % 5 == 0:
                biasKey += 1
                factor = gtsam.BetweenFactorConstantBias(
                    biasKey - 1, biasKey, gtsam.imuBias_ConstantBias(), BIAS_COVARIANCE)
                newgraph.add(factor)
                initialEstimate.insert(biasKey, gtsam.imuBias_ConstantBias())

            # Predict acceleration and gyro measurements in (actual) body frame
            nRb = scenario.rotation(t).matrix()
            bRn = np.transpose(nRb)
            measuredAcc = scenario.acceleration_b(t) - np.dot(bRn, n_gravity)
            measuredOmega = scenario.omega_b(t)
            accum.integrateMeasurement(measuredAcc, measuredOmega, delta_t)

            # Add Imu Factor
            imufac = gtsam.ImuFactor(
                X(i - 1), V(i - 1), X(i), V(i), biasKey, accum)
            newgraph.add(imufac)

            # insert new velocity, which is wrong
            initialEstimate.insert(V(i), n_velocity)
            accum.resetIntegration()

        # Incremental solution
        isam.update(newgraph, initialEstimate)
        result = isam.calculateEstimate()
        ISAM2_plot(result)

        # reset
        newgraph = gtsam.NonlinearFactorGraph()
        initialEstimate.clear()