Beispiel #1
0
    def H(self, eta: np.ndarray) -> np.ndarray:
        """Calculate the jacobian of h.

        Parameters
        ----------
        eta : np.ndarray, shape=(3 + 2 * #landmarks,)
            The robot state and landmarks stacked.

        Returns
        -------
        np.ndarray, shape=(2 * #landmarks, 3 + 2 * #landmarks)
            the jacobian of h wrt. eta.
        """

        x = eta[0:3]
        ## reshape map (2, #landmarks), m[j] is the jth landmark
        m = eta[3:].reshape((-1, 2)).T

        numM = m.shape[1]

        Rot = rotmat2d(-x[2])

        delta_m = m - x[:2, None]
        zc = delta_m - Rot @ self.sensor_offset[:, None]

        zpred = self.h(eta).reshape(-1, 2).T
        zr = zpred[0]

        Rpihalf = rotmat2d(np.pi / 2)

        # In what follows you can be clever and avoid making this for all the landmarks you _know_
        # you will not detect (the maximum range should be available from the data).
        # But keep it simple to begin with.

        # Allocate H and set submatrices as memory views into H
        # You may or may not want to do this like this
        H = np.zeros(
            (2 * numM, 3 + 2 * numM))  # TODO, see eq (11.15), (11.16), (11.17)
        Hx = H[:, :3]  # slice view, setting elements of Hx will set H as well
        Hm = H[:, 3:]  # slice view, setting elements of Hm will set H as well

        # proposed way is to go through landmarks one by one
        jac_z_cb = -np.eye(
            2, 3)  # preallocate and update this for some speed gain if looping
        for i in range(numM):  # But this hole loop can be vectorized
            ind = 2 * i  # starting postion of the ith landmark into H
            inds = slice(ind,
                         ind + 2)  # the inds slice for the ith landmark into H
            jac_z_cb[:, 2] = -Rpihalf @ delta_m[:, i]

            Hx[ind] = (zc[:, i] / zr[i]) @ jac_z_cb
            Hx[ind + 1] = (zc[:, i] / zr[i]**2) @ Rpihalf.T @ jac_z_cb

            Hm[inds, inds] = -Hx[inds, :2]

        assert (
            H.shape == (2 * numM, 3 + 2 * numM)
        ), "EKFSLAM.H: Wrong shape on calculated H"  # TODO: You can set some assertions here to make sure that some of the structure in H is correct
        return H
Beispiel #2
0
    def H(self, eta: np.ndarray) -> np.ndarray:
        """Calculate the jacobian of h.

        Parameters
        ----------
        eta : np.ndarray, shape=(3 + 2 * #landmarks,)
            The robot state and landmarks stacked.

        Returns
        -------
        np.ndarray, shape=(2 * #landmarks, 3 + 2 * #landmarks)
            the jacobian of h wrt. eta.
        """

        # extract states and map
        x = eta[0:3]
        ## reshape map (2, #landmarks), m[j] is the jth landmark
        m = eta[3:].reshape((-1, 2)).T

        numM = m.shape[1]

        Rot = rotmat2d(x[2])
        Rpihalf = rotmat2d(np.pi / 2)

        # Relative position of landmark to robot in world frame:
        delta_m = m - x[:2].reshape((2, 1))

        zc = (
            delta_m.T - Rot @ self.sensor_offset
        ).T  # (2, #measurements), each measured position in cartesian coordinates like
        # [x coordinates;
        #  y coordinates]

        H = np.zeros((2 * numM, 3 + 2 * numM))

        z = np.zeros((numM, 1))
        ones = np.ones((numM, 1))
        Hx_tops = -np.concatenate(
            ((1 / la.norm(zc, axis=0) * delta_m).T, z), axis=1)
        Hx_bottoms = np.concatenate(
            ((1 / la.norm(zc, axis=0)**2 * delta_m).T @ Rpihalf, -ones),
            axis=1)
        Hx = np.concatenate((Hx_tops, Hx_bottoms), axis=1)
        Hx = Hx.reshape(-1, 3)

        Hm = -Hx[:, :2]
        Hm = Hm.reshape(numM, -1, 2)

        H[:, :3] = Hx
        H[:, 3:] = la.block_diag(*Hm)

        return H
Beispiel #3
0
    def H(self, eta: np.ndarray) -> np.ndarray:
        """Calculate the jacobian of h.

        Parameters
        ----------
        eta : np.ndarray, shape=(3 + 2 * #landmarks,)
            The robot state and landmarks stacked.

        Returns
        -------
        np.ndarray, shape=(2 * #landmarks, 3 + 2 * #landmarks)
            the jacobian of h wrt. eta.
        """
        # extract states and map
        x = eta[:3]
        # reshape map (2, #landmarks), m[j] is the jth landmark
        m = eta[3:].reshape((-1, 2)).T
        numM = m.shape[1]
        Rot = rotmat2d(-x[2])

        delta_m = m - x[:2, None]
        zc = delta_m - Rot @ self.sensor_offset[:, None]
        zpred = self.h(eta).reshape(-1, 2).T  # predicted measurements
        zr = zpred[0]  # ranges

        # Allocate H and set submatrices as memory views into H
        H = np.zeros((2 * numM, 3 + 2 * numM))
        Hx = H[:, :3]
        Hm = H[:, 3:]

        Rpihalf = rotmat2d(np.pi / 2)
        jac_z_cb = -np.eye(2, 3)
        #jac_z_cb[:, 2] = -Rpihalf @ delta_m # [-I_2 -Rph(m^i - rho_k)]
        for i in range(numM):  # But this whole loop can be vectorized
            """if zr[i] > self.max_range:
                Hx[ind] = zc[:, i]# @ jac_z_cb / zr[i]
                Hx[ind + 1] = zc[:, i]# @ Rpihalf.T @ jac_z_cb / zr[i]**2
                Hm[inds, inds] = -Hx[inds, :2]
                continue"""
            ind = 2 * i  # starting position of the ith landmark into H
            inds = slice(ind,
                         ind + 2)  # the inds slice for the ith landmark into H

            jac_z_cb[:, 2] = -Rpihalf @ delta_m[:, i]  # > do this before loop
            Hx[ind] = zc[:, i] @ jac_z_cb / zr[i]
            Hx[ind + 1] = zc[:, i] @ Rpihalf.T @ jac_z_cb / zr[i]**2
            Hm[inds, inds] = -Hx[inds, :2]

        assert (H.shape == (
            2 * numM, 3 + 2 * numM)), "EKFSLAM.H: Wrong shape on calculated H"
        return H
    def H(self, eta: np.ndarray) -> np.ndarray:
        """Calculate the jacobian of h.
        Parameters
        ----------
        eta : np.ndarray, shape=(3 + 2 * #landmarks,)
            The robot state and landmarks stacked.
        Returns
        -------
        np.ndarray, shape=(2 * #landmarks, 3 + 2 * #landmarks)
            the jacobian of h wrt. eta.
        """

        x = eta[0:3]
        ## reshape map (2, #landmarks), m[j] is the jth landmark
        m = eta[3:].reshape((-1, 2)).T

        numM = m.shape[1]

        Rot = rotmat2d(-x[2])

        delta_m = m - x[:2, None]
        zc = delta_m - Rot @ self.sensor_offset[:, None]

        zpred = self.h(eta).reshape(-1, 2).T
        zr = zpred[0]

        Rpihalf = rotmat2d(np.pi / 2)

        # Allocate H and set submatrices as memory views into H
        H = np.zeros(
            (2 * numM, 3 + 2 * numM))  # TODO, see eq (11.15), (11.16), (11.17)
        Hx = H[:, :3]  # slice view, setting elements of Hx will set H as well
        Hm = H[:, 3:]  # slice view, setting elements of Hm will set H as well

        jac_z_cb = -np.eye(2, 3)
        for i in range(numM):
            ind = 2 * i  # starting postion of the ith landmark into H
            inds = slice(ind,
                         ind + 2)  # the inds slice for the ith landmark into H
            jac_z_cb[:, 2] = -Rpihalf @ delta_m[:, i]

            Hx[ind] = (zc[:, i] / zr[i]) @ jac_z_cb
            Hx[ind + 1] = (zc[:, i] / zr[i]**2) @ Rpihalf.T @ jac_z_cb

            Hm[inds, inds] = -Hx[inds, :2]

        assert (
            H.shape == (2 * numM, 3 + 2 * numM)
        ), "EKFSLAM.H: Wrong shape on calculated H"  # TODO: You can set some assertions here to make sure that some of the structure in H is correct
        return H
Beispiel #5
0
    def h(self, eta: np.ndarray) -> np.ndarray:
        """Predict all the landmark positions in sensor frame.

        Parameters
        ----------
        eta : np.ndarray, shape=(3 + 2 * #landmarks,)
            The robot state and landmarks stacked.

        Returns
        -------
        np.ndarray, shape=(2 * #landmarks,)
            The landmarks in the sensor frame.
        """
        # extract states and map
        x = eta[0:3]
        ## reshape map (2, #landmarks), m[:, j] is the jth landmark
        m = eta[3:].reshape((-1, 2)).T

        Rot = rotmat2d(-x[2])

        # None as index ads an axis with size 1 at that position.
        # Numpy broadcasts size 1 dimensions to any size when needed
        # TODO, relative position of landmark to sensor on robot in world frame
        # mi − ρk − R(ψk)L. Where ρk = x[:2], mi = m, R(ψk) = Rot, L = self.sensor_offset
        delta_m = m - x[:2].reshape(
            2, 1
        ) - rotmat2d(x[2]) @ (self.sensor_offset).reshape(
            2, 1
        )  # TODO, relative position of landmark to sensor on robot in world frame

        # TODO, predicted measurements in cartesian coordinates, beware sensor offset for VP
        zpredcart = np.array([Rot @ delta_mi for delta_mi in delta_m.T])

        # TODO, ranges
        zpred_r = np.array([la.norm(mi) for mi in delta_m.T])
        # TODO, bearings
        zpred_theta = np.array([np.arctan2(mi[1], mi[0]) for mi in zpredcart])
        # TODO, the two arrays above stacked on top of each other vertically like
        zpred = np.vstack([zpred_r, zpred_theta])
        # [ranges;
        #  bearings]
        # into shape (2, #lmrk)

        zpred = zpred.T.ravel(
        )  # stack measurements along one dimension, [range1 bearing1 range2 bearing2 ...]

        assert (zpred.ndim == 1 and zpred.shape[0]
                == eta.shape[0] - 3), "SLAM.h: Wrong shape on zpred"
        return zpred
Beispiel #6
0
    def h(self, eta: np.ndarray) -> np.ndarray:
        """Predict all the landmark positions in sensor frame.

        Parameters
        ----------
        eta : np.ndarray, shape=(3 + 2 * #landmarks,)
            The robot state and landmarks stacked.

        Returns
        -------
        np.ndarray, shape=(2 * #landmarks,)
            The landmarks in the sensor frame.
        """
        #print('h')
        # extract states and map
        x = eta[:3]
        ## reshape map (2, #landmarks), m[:, j] is the jth landmark
        m = eta[3:].reshape((-1, 2)).T
        Rot = rotmat2d(-x[2])

        # relative position of landmark to sensor on robot in world frame
        delta_m = m - x[:2, None] - Rot.T @ self.sensor_offset[:, None]
        # predicted measurements in cartesian coordinates
        zpredcart = Rot @ delta_m
        zpred_r = la.norm(zpredcart, axis=0)  # ranges
        zpred_theta = np.arctan2(zpredcart[1], zpredcart[0])  # bearings

        zpred = np.stack((zpred_r, zpred_theta))
        zpred = zpred.T.ravel()  # stack measurements along one dimension

        assert (zpred.ndim == 1 and zpred.shape[0]
                == eta.shape[0] - 3), "SLAM.h: Wrong shape on zpred"
        return zpred
Beispiel #7
0
    def h(self, eta: np.ndarray) -> np.ndarray:
        """Predict all the landmark positions in sensor frame.
        Parameters
         eta : (3 + 2 * #landmarks,) The robot state and landmarks stacked.
        Returns
         np.ndarray, shape=(2 * #landmarks,) The landmarks in the sensor frame.
        """
        x = eta[:3]
        rho = x[:2]

        m = eta[3:].reshape((-1, 2)).T
        R = rotmat2d(-x[2])  # world->body
        # R.T: body->world

        # relative position of landmark to sensor on robot (world)
        delta_m = m - rho.reshape(2, 1)  # No sensor offset here

        # predicted measurements in cartesian coordinates
        zpredcart = R @ delta_m - self.sensor_offset.reshape(2, 1)

        # Converting cartesian body -> polar body
        zpred_r = np.apply_along_axis(np.linalg.norm, axis=0, arr=zpredcart)
        zpred_theta = utils.wrapToPi(np.arctan(zpredcart[1] / zpredcart[0]))
        zpred = np.concatenate((zpred_r, zpred_theta), axis=0)

        # stack measurements along one dimension, [range1 bearing1 range2 bearing2 ...]
        zpred = zpred.T.ravel()

        assert (zpred.ndim == 1 and zpred.shape[0]
                == eta.shape[0] - 3), "SLAM.h: Wrong shape on zpred"
        return zpred
Beispiel #8
0
    def h(self, eta: np.ndarray) -> np.ndarray:
        """Predict all the landmark positions in sensor frame.

        Parameters
        ----------
        eta : np.ndarray, shape=(3 + 2 * #landmarks,)
            The robot state and landmarks stacked.

        Returns
        -------
        np.ndarray, shape=(2 * #landmarks,)
            The landmarks in the sensor frame.
        """
        # extract states and map
        x = eta[0:3]
        ## reshape map (2, #landmarks), m[:, j] is the jth landmark
        m = eta[3:].reshape((-1, 2)).T

        Rot = rotmat2d(-x[2])

        # None as index ads an axis with size 1 at that position.
        # Numpy broadcasts size 1 dimensions to any size when needed
        delta_m = m - (x[:2, None] + Rot.T @ self.sensor_offset[:, None])
        zpredcart = Rot @ delta_m
        zpred_r = np.linalg.norm(zpredcart, axis=0)
        z_pred_theta = np.arctan2(zpredcart[1], zpredcart[0])
        zpred = np.vstack((zpred_r, z_pred_theta))

        zpred = zpred.T.ravel(
        )  # stack measurements along one dimension, [range1 bearing1 range2 bearing2 ...]

        assert (zpred.ndim == 1 and zpred.shape[0]
                == eta.shape[0] - 3), "SLAM.h: Wrong shape on zpred"
        return zpred
Beispiel #9
0
    def f(self, x: np.ndarray, u: np.ndarray) -> np.ndarray:
        """Add the odometry u to the robot state x.

        Parameters
        ----------
        x : np.ndarray, shape=(3,)
            the robot state
        u : np.ndarray, shape=(3,)
            the odometry

        Returns
        -------
        np.ndarray, shape = (3,)
            the predicted state
        """
        """
        psi_prev = x[2]
        psi_prev = utils.wrapToPi(psi_prev)
        x_pred = np.zeros((3,))
        x_pred[0] = x[0] + u[0]*np.cos(psi_prev)-u[1]*np.sin(psi_prev) # TODO, eq (11.7). Should wrap heading angle between (-pi, pi), see utils.wrapToPi
        x_pred[1] = x[1] + u[0]*np.sin(psi_prev) + u[1]*np.cos(psi_prev)
        x_pred[2] += u[2]
        x_pred[2] = utils.wrapToPi(x_pred[2])
        """

        x_pred = np.hstack(
            [x[0:2] + rotmat2d(x[2]) @ u[:2],
             utils.wrapToPi(x[2] + u[2])])

        assert x_pred.shape == (3, ), "EKFSLAM.f: wrong shape for xpred"
        return x_pred
Beispiel #10
0
    def f(self, x: np.ndarray, u: np.ndarray) -> np.ndarray:
        """Add the odometry u to the robot state x.

        Parameters
        ----------
        x : np.ndarray, shape=(3,)
            the robot state
        u : np.ndarray, shape=(3,)
            the odometry

        Returns
        -------
        np.ndarray, shape = (3,)
            the predicted state
        """

        # xpred = np.zeros(3,)
        # xpred[0] = x[0] + u[0]*np.cos(x[2]) - u[1]*np.sin(x[2])
        # xpred[1] = x[1] + u[0]*np.sin(x[2]) + u[1]*np.cos(x[2])
        # xpred[2] = utils.wrapToPi(x[2] + u[2]) #eq (11.7). Should wrap heading angle between (-pi, pi), see utils.wrapToPi
        pos = x[:2] + rotmat2d(x[2]) @ u[:2]
        yaw_angle = utils.wrapToPi(x[2] + u[2])

        xpred = np.array([*pos, yaw_angle])
        assert xpred.shape == (3, ), "EKFSLAM.f: wrong shape for xpred"
        return xpred
Beispiel #11
0
    def H(self, eta: np.ndarray) -> np.ndarray:
        """Calculate the jacobian of h.
        Parameters:
         eta : np.ndarray, shape=(3 + 2 * #landmarks,) The robot state and landmarks stacked.
        Returns:
         np.ndarray, shape=(2 * #landmarks, 3 + 2 * #landmarks) the jacobian of h wrt. eta.
        """

        x = eta[:3]
        m = eta[3:].reshape(
            (-1,
             2)).T  ## reshape map (2, #landmarks), m[j] is the jth landmark
        numM = m.shape[1]
        R = rotmat2d(x[2])  # body->world
        Rpihalf = rotmat2d(np.pi / 2)

        # relative position of landmark to robot in world frame. m - rho that appears in (11.15) and (11.16)
        delta_m = m - x[:2].reshape(2, 1)  # (world)
        zc = delta_m - R @ self.sensor_offset.reshape(
            2, 1)  # measurements, cartesian (world)
        zr = np.apply_along_axis(np.linalg.norm, axis=0, arr=zc)

        Hx_top = np.concatenate(
            (-(1 / la.norm(zc, axis=0) * zc).T, np.zeros((numM, 1))), axis=1)
        Hx_btm = np.concatenate(
            ((1 / (la.norm(zc, axis=0)**2) * zc).T @ Rpihalf, -np.ones(
                (numM, 1))),
            axis=1)
        Hx = np.concatenate((Hx_top, Hx_btm), axis=1).reshape(-1, 3)
        Hm = -Hx[:, :2].reshape(numM, -1, 2)

        H = np.zeros((2 * numM, 3 + 2 * numM))
        H[:, :3] = Hx
        H[:, 3:] = la.block_diag(*Hm)

        #assert (H.shape[0] == 2*numM and H.shape[1] ==  3+2*numM), "SLAM.H: Wrong shape of H"
        #assert (Hx.shape[0] == 2*numM and Hx.shape[1] == 3), "SLAM.H: Wrong shape of Hx"
        return H
Beispiel #12
0
    def h(self, eta: np.ndarray) -> np.ndarray:
        """Predict all the landmark positions in sensor frame.

        Parameters
        ----------
        eta : np.ndarray, shape=(3 + 2 * #landmarks,)
            The robot state and landmarks stacked.

        Returns
        -------
        np.ndarray, shape=(2 * #landmarks,)
            The landmarks in the sensor frame.
        """
        # extract states and map
        x = eta[0:3]
        ## reshape map (2, #landmarks), m[:, j] is the jth landmark
        m = eta[3:].reshape((-1, 2)).T
        Rot = rotmat2d(-x[2])
        # Rot_neg = rotmat2d(-x[2]) # sverre: WORLD -> BODY
        # Rot_pos = rotmat2d(x[2]) # sverre: BODY -> WORLD

        # None as index ads an axis with size 1 at that position.
        # Numpy broadcasts size 1 dimensions to any size when needed
        # delta_m = np.array(list(map(lambda l: l - x[:2] - Rot.T @ self.sensor_offset, m.T))) # TODO, relative position of landmark to sensor on robot in world frame

        # zpredcart = np.array(list(map(lambda d_m: Rot @ d_m, delta_m))) # TODO, predicted measurements in cartesian coordinates, beware sensor offset for VP

        # zpred_r = np.array(list(map(lambda rel_pos: la.norm(rel_pos), delta_m))) # TODO, ranges

        # zpred_theta = np.array(list(map(lambda rel_pos: np.arctan2(rel_pos[0], rel_pos[1]), zpredcart))) # TODO, bearings

        # zpred = np.stack((zpred_r, zpred_theta)) # TODO, the two arrays above stacked on top of each other vertically like
        # [ranges;
        #  bearings]
        # into shape (2, #lmrk)

        delta_m = m - (x[:2, None] + Rot.T @ self.sensor_offset[:, None])
        zpredcart = Rot @ delta_m
        zpred_r = np.linalg.norm(zpredcart, axis=0)
        z_pred_theta = np.arctan2(zpredcart[1], zpredcart[0])
        zpred = np.vstack((zpred_r, z_pred_theta))

        zpred = zpred.T.ravel(
        )  # stack measurements along one dimension, [range1 bearing1 range2 bearing2 ...]

        # assert (
        #     zpred.ndim == 1 and zpred.shape[0] == eta.shape[0] - 3
        # ), "SLAM.h: Wrong shape on zpred"
        return zpred
Beispiel #13
0
    def h(self, eta: np.ndarray) -> np.ndarray:
        """Predict all the landmark positions in sensor frame.

        Parameters
        ----------
        eta : np.ndarray, shape=(3 + 2 * #landmarks,)
            The robot state and landmarks stacked.

        Returns
        -------
        np.ndarray, shape=(2 * #landmarks,)
            The landmarks in the sensor frame.
        """
        # extract states and map
        x = eta[0:3]
        ## reshape map (2, #landmarks), m[:, j] is the jth landmark
        m = eta[3:].reshape((-1, 2)).T
        M = m.shape[1]

        Rot = rotmat2d(-x[2])

        # None as index ads an axis with size 1 at that position.
        # Numpy broadcasts size 1 dimensions to any size when needed
        delta_m = (m.T - x[:2]).T
        # m = (2, 1000)
        # x = (2,)

        zpredcart = Rot @ delta_m - self.sensor_offset.reshape((2, 1))
        # (2x2) @ (2,1000)

        zpred_r = la.norm(zpredcart, axis=0)
        zpred_theta = np.arctan2(zpredcart[1], zpredcart[0])
        zpred = np.vstack([zpred_r, zpred_theta])
        assert len(zpred_r) == M
        assert len(zpred_theta) == M
        # vertically like
        # [ranges;
        #  bearings]
        # into shape (2, #lmrk)

        zpred = zpred.T.ravel(
        )  # stack measurements along one dimension, [range1 bearing1 range2 bearing2 ...]

        assert (zpred.ndim == 1 and zpred.shape[0]
                == eta.shape[0] - 3), "SLAM.h: Wrong shape on zpred"
        return zpred
Beispiel #14
0
    def h(self, eta: np.ndarray) -> np.ndarray:
        """Predict all the landmark positions in sensor frame.

        Parameters
        ----------
        eta : np.ndarray, shape=(3 + 2 * #landmarks,)
            The robot state and landmarks stacked.

        Returns
        -------
        np.ndarray, shape=(2 * #landmarks,)
            The landmarks in the sensor frame.
        """
        # extract states and map
        x = eta[0:3]
        ## reshape map (2, #landmarks), m[:, j] is the jth landmark
        m = eta[3:].reshape((-1, 2)).T

        Rot = rotmat2d(-x[2])

        # None as index ads an axis with size 1 at that position.
        # Numpy broadcasts size 1 dimensions to any size when needed

        # TODO, relative position of landmark to sensor on robot in world frame
        delta_m = (m.T - x[:2])

        # TODO, predicted measurements in cartesian coordinates, beware sensor offset for VP
        zpredcart = delta_m - Rot.T @ self.sensor_offset
        z_body = Rot @ zpredcart.T

        zpred_r = la.norm(zpredcart, axis=1)  # TODO, ranges
        zpred_theta = np.arctan2(z_body[1], z_body[0])  # TODO, bearings
        zpred = np.stack(
            (zpred_r, zpred_theta)
        )  # TODO, the two arrays above stacked on top of each other vertically like
        # [ranges;
        #  bearings]
        # into shape (2, #lmrk)

        zpred = zpred.T.ravel(
        )  # stack measurements along one dimension, [range1 bearing1 range2 bearing2 ...]

        assert (zpred.ndim == 1 and zpred.shape[0]
                == eta.shape[0] - 3), "SLAM.h: Wrong shape on zpred"
        return zpred
Beispiel #15
0
    def Fu(self, x: np.ndarray, u: np.ndarray) -> np.ndarray:
        """Calculate the Jacobian of f with respect to u.

        Parameters
        ----------
        x : np.ndarray, shape=(3,)
            the robot state
        u : np.ndarray, shape=(3,)
            the odometry

        Returns
        -------
        np.ndarray
            The Jacobian of f wrt. u.
        """
        Fu = block_diag(rotmat2d(x[2]), 1)

        assert Fu.shape == (3, 3), "EKFSLAM.Fu: wrong shape"
        return Fu
Beispiel #16
0
    def Fx(self, x: np.ndarray, u: np.ndarray) -> np.ndarray:
        """Calculate the Jacobian of f with respect to x.

        Parameters
        ----------
        x : np.ndarray, shape=(3,)
            the robot state
        u : np.ndarray, shape=(3,)
            the odometry

        Returns
        -------
        np.ndarray
            The Jacobian of f wrt. x.
        """

        psi = x[2]

        Fx = np.eye(3)  # TODO, eq (11.13)
        Fx[0:2, 2] = rotmat2d(psi + np.pi / 2) @ u[:2]
        assert Fx.shape == (3, 3), "EKFSLAM.Fx: wrong shape"
        return Fx
    def f(self, x: np.ndarray, u: np.ndarray) -> np.ndarray:
        """Add the odometry u to the robot state x.

        Parameters
        ----------
        x : np.ndarray, shape=(3,)
            the robot state
        u : np.ndarray, shape=(3,)
            the odometry

        Returns
        -------
        np.ndarray, shape = (3,)
            the predicted state
        """

        pos = x[:2] + rotmat2d(x[2]) @ u[:2]
        yaw_angle = utils.wrapToPi(x[2] + u[2]) # Wrap heading angle between (-pi, pi)

        xpred = np.array([*pos, yaw_angle]) 

        assert xpred.shape == (3,), "EKFSLAM.f: wrong shape for xpred"
        return xpred
Beispiel #18
0
    def Fu(self, x: np.ndarray, u: np.ndarray) -> np.ndarray:
        """Calculate the Jacobian of f with respect to u.

        Parameters
        ----------
        x : np.ndarray, shape=(3,)
            the robot state
        u : np.ndarray, shape=(3,)
            the odometry

        Returns
        -------
        np.ndarray
            The Jacobian of f wrt. u.
        """

        psi = x[2]

        Fu = np.eye(3)  # TODO, eq (11.14)
        Fu[0:2, 0:2] = rotmat2d(psi)

        assert Fu.shape == (3, 3), "EKFSLAM.Fu: wrong shape"
        return Fu
Beispiel #19
0
    def add_landmarks(self, eta: np.ndarray, P: np.ndarray,
                      z: np.ndarray) -> Tuple[np.ndarray, np.ndarray]:
        """Calculate new landmarks, their covariances and add them to the state.

        Parameters
        ----------
        eta : np.ndarray, shape=(3 + 2*#landmarks,)
            the robot state and map concatenated
        P : np.ndarray, shape=(3 + 2*#landmarks,)*2
            the covariance of eta
        z : np.ndarray, shape(2 * #newlandmarks,)
            A set of measurements to create landmarks for

        Returns
        -------
        Tuple[np.ndarray, np.ndarray], shapes=(3 + 2*(#landmarks + #newlandmarks,), (3 + 2*(#landmarks + #newlandmarks,)*2
            eta with new landmarks appended, and its covariance
        """
        n = P.shape[0]
        assert z.ndim == 1, "SLAM.add_landmarks: z must be a 1d array"

        numLmk = z.shape[0] // 2

        lmnew = np.empty_like(z)

        Gx = np.empty((numLmk * 2, 3))
        Rall = np.zeros((numLmk * 2, numLmk * 2))

        sensor_offset_world = rotmat2d(
            eta[2]
        ) @ self.sensor_offset  # For transforming landmark position into world frame
        sensor_offset_world_der = rotmat2d(
            eta[2] + np.pi / 2) @ self.sensor_offset  # Used in Gx

        rot = rotmat2d(z[1::2] + eta[2])
        #new_rot = rot.swapaxes(0,2).swapaxes(1,2).ravel().reshape((18,2))

        vecZ = np.array([-np.sin(z[1::2] + eta[2]),
                         np.cos(z[1::2] + eta[2])]).T
        Gx[:, :2] = np.tile(np.eye(2), (numLmk, 1))
        Gx[:, 2] = (z[0::2].reshape(-1, 1) * vecZ +
                    sensor_offset_world_der).ravel()

        lmnew = ((z[0::2] * rot[:, 0]).T + eta[0:2] +
                 sensor_offset_world).ravel()

        right = np.insert(z[0::2], np.arange(0, numLmk, 1), 0)
        left = np.tile([1, 0], numLmk)
        np.concatenate((left, right)).reshape(-1, 2, order="F")

        for j in range(numLmk):
            ind = 2 * j
            inds = slice(ind, ind + 2)
            zj = z[inds]

            Gz = rot[:, :, j] @ np.diag([1, zj[0]])  # TODO

            Rall[
                inds,
                inds] = Gz @ self.R @ Gz.T  # TODO, Gz * R * Gz^T, transform measurement covariance from polar to cartesian coordinates

        assert len(lmnew) % 2 == 0, "SLAM.add_landmark: lmnew not even length"
        etaadded = np.hstack(
            (eta, lmnew))  # TODO, append new landmarks to state vector

        n_R = Rall.shape[0]
        Padded = np.zeros((n + n_R, n + n_R))

        Padded[:n, :n] = P
        Padded[n:, n:] = Gx @ P[:3, :3] @ Gx.T + Rall
        Padded[n:, :n] = Gx @ P[:3, :]
        Padded[:n, n:] = Padded[n:, :n].T

        assert (
            etaadded.shape * 2 == Padded.shape
        ), "EKFSLAM.add_landmarks: calculated eta and P has wrong shape"
        assert np.allclose(
            Padded, Padded.T), "EKFSLAM.add_landmarks: Padded not symmetric"
        assert np.all(np.linalg.eigvals(Padded) >= 0
                      ), "EKFSLAM.add_landmarks: Padded not PSD"

        return etaadded, Padded
Beispiel #20
0
    def add_landmarks(self, eta: np.ndarray, P: np.ndarray,
                      z: np.ndarray) -> Tuple[np.ndarray, np.ndarray]:
        """Calculate new landmarks, their covariances and add them to the state.

        Parameters
        ----------
        eta : np.ndarray, shape=(3 + 2*#landmarks,)
            the robot state and map concatenated
        P : np.ndarray, shape=(3 + 2*#landmarks,)*2
            the covariance of eta
        z : np.ndarray, shape(2 * #newlandmarks,)
            A set of measurements to create landmarks for

        Returns
        -------
        Tuple[np.ndarray, np.ndarray], shapes=(3 + 2*(#landmarks + #newlandmarks,), (3 + 2*(#landmarks + #newlandmarks,)*2
            eta with new landmarks appended, and its covariance
        """
        #print('add_landmark')
        assert z.ndim == 1, "SLAM.add_landmarks: z must be a 1d array"

        n = P.shape[0]
        numLmk = z.shape[0] // 2
        lmnew = np.empty_like(z)
        Gx = np.empty((numLmk * 2, 3))
        Rall = np.zeros((numLmk * 2, numLmk * 2))
        I2 = np.eye(2)  # Preallocate, used for Gx

        # For transforming landmark position into world frame
        sensor_offset_world = rotmat2d(eta[2]) @ self.sensor_offset
        sensor_offset_world_der = rotmat2d(eta[2] + np.pi /
                                           2) @ self.sensor_offset  # For Gx

        for j in range(numLmk):
            ind = 2 * j
            inds = slice(ind, ind + 2)

            zj = z[inds]
            rot = rotmat2d(zj[1] + eta[2])  # psi + psi
            # calculate position of new landmark in world frame
            lmnew[inds] = zj[0] * rot[:, 0] + eta[:2] + sensor_offset_world

            Gx[inds, :2] = I2
            Gx[inds, 2] = zj[
                0] * rot[:,
                         1] + sensor_offset_world_der  #.reshape(-1, 1) #+ sensor_offset_world_der
            Gz = rot @ np.diag([1, zj[0]])
            Rall[inds,
                 inds] = Gz @ self.R @ Gz.T  # pol2cart on measurement cov

        assert len(lmnew) % 2 == 0, "SLAM.add_landmark: lmnew not even length"
        # append new landmarks to state vector
        #etaadded = np.append(eta, lmnew)
        etaadded = np.array([*eta, *lmnew])
        Padded = la.block_diag(P, Gx @ P[:3, :3] @ Gx.T + Rall)
        Padded[n:, :n] = Gx @ P[:3, :]
        Padded[:n, n:] = Padded[n:, :n].T

        assert (
            etaadded.shape * 2 == Padded.shape
        ), "EKFSLAM.add_landmarks: calculated eta and P has wrong shape"
        assert np.allclose(
            Padded, Padded.T), "EKFSLAM.add_landmarks: Padded not symmetric"
        assert np.all(np.linalg.eigvals(Padded) >= 0
                      ), "EKFSLAM.add_landmarks: Padded not PSD"

        return etaadded, Padded
Beispiel #21
0
    def H(self, eta: np.ndarray) -> np.ndarray:
        """Calculate the jacobian of h.

        Parameters
        ----------
        eta : np.ndarray, shape=(3 + 2 * #landmarks,)
            The robot state and landmarks stacked.

        Returns
        -------
        np.ndarray, shape=(2 * #landmarks, 3 + 2 * #landmarks)
            the jacobian of h wrt. eta.
        """
        # extract states and map
        x = eta[0:3]
        ## reshape map (2, #landmarks), m[j] is the jth landmark
        m = eta[3:].reshape((-1, 2)).T

        numM = m.shape[1]

        Rot = rotmat2d(x[2])
        Rpihalf = rotmat2d(np.pi / 2)

        delta_m = m - x[:2].reshape(
            (2, 1)
        )  # TODO, relative position of landmark to robot in world frame. m - rho that appears in (11.15) and (11.16)

        zc = (
            delta_m.T - Rot @ self.sensor_offset
        ).T  # TODO, (2, #measurements), each measured position in cartesian coordinates like
        # [x coordinates;
        #  y coordinates]

        # NOTE: Calculate zpred here for some speed increase probably
        zpred = self.h(
            eta)  # TODO (2, #measurements), predicted measurements, like
        zpred = np.reshape(zpred, (2, zpred.shape[0] // 2), "F")  # [ranges;
        #  bearings]
        #zr = zpred[0, :]    # TODO, ranges

        # Allocate H and set submatrices as memory views into H
        # You may or may not want to do this like this
        H = np.zeros(
            (2 * numM, 3 + 2 * numM))  # TODO, see eq (11.15), (11.16), (11.17)

        z = np.zeros((numM, 1))
        ones = np.ones((numM, 1))
        Hx_tops = -np.concatenate(
            ((1 / la.norm(zc, axis=0) * delta_m).T, z), axis=1)  # Correct
        Hx_bottoms = np.concatenate(
            ((1 / la.norm(zc, axis=0)**2 * delta_m).T @ Rpihalf, -ones),
            axis=1)  # Correct
        Hx = np.concatenate((Hx_tops, Hx_bottoms), axis=1)
        Hx = Hx.reshape(-1, 3)

        Hm = -Hx[:, :2]
        Hm = Hm.reshape(numM, -1, 2)

        H[:, :3] = Hx
        H[:, 3:] = la.block_diag(*Hm)

        # TODO: You can set some assertions here to make sure that some of the structure in H is correct

        return H
Beispiel #22
0
        num_asso = np.count_nonzero(a[mk] > -1)

        if num_asso > 0:
            NISnorm[mk] = NIS[mk] / (2 * num_asso)
            CInorm[mk] = np.array(chi2.interval(confidence_prob, 2 * num_asso)) / (2 * num_asso )
        else:
            NISnorm[mk] = 1
            CInorm[mk].fill(1)

        xupd[mk] = eta[:3]

        if doPlot:
            sh_lmk.set_offsets(eta[3:].reshape(-1, 2))
            if len(z) > 0:
                zinmap = (
                    rotmat2d(eta[2])
                    @ (
                        z[:, 0] * np.array([np.cos(z[:, 1]), np.sin(z[:, 1])])
                        + slam.sensor_offset[:, None]
                    )
                    + eta[0:2, None]
                )
                sh_Z.set_offsets(zinmap.T)
            lh_pose.set_data(*xupd[mk_first:mk, :2].T)

            ax.set(
                xlim=[-200, 200],
                ylim=[-200, 200],
                title=f"step {k}, laser scan {mk}, landmarks {len(eta[3:])//2},\nmeasurements {z.shape[0]}, num new = {np.sum(a[mk] == -1)}",
            )
            plt.draw()
        num_asso = np.count_nonzero(a[mk] > -1)

        if num_asso > 0:
            NISnorm[mk] = NIS[mk] / (2 * num_asso)
            CInorm[mk] = np.array(chi2.interval(confidence_prob,
                                                2 * num_asso)) / (2 * num_asso)
        else:
            NISnorm[mk] = 1
            CInorm[mk].fill(1)

        xupd[mk] = eta[:3]

        if doPlot:
            sh_lmk.set_offsets(eta[3:].reshape(-1, 2))
            if len(z) > 0:
                zinmap = (rotmat2d(eta[2]) @ (z[:, 0] * np.array(
                    [np.cos(z[:, 1]), np.sin(z[:, 1])]) +
                                              slam.sensor_offset[:, None]) +
                          eta[0:2, None])
                sh_Z.set_offsets(zinmap.T)
            lh_pose.set_data(*xupd[mk_first:mk, :2].T)

            ax.set(
                xlim=[-200, 200],
                ylim=[-200, 200],
                title=
                f"step {k}, laser scan {mk}, landmarks {len(eta[3:])//2},\nmeasurements {z.shape[0]}, num new = {np.sum(a[mk] == -1)}",
            )
            plt.draw()
            plt.pause(0.00001)
Beispiel #24
0
    def H(self, eta: np.ndarray) -> np.ndarray:
        """Calculate the jacobian of h.
        Parameters:
         eta : np.ndarray, shape=(3 + 2 * #landmarks,) The robot state and landmarks stacked.
        Returns:
         np.ndarray, shape=(2 * #landmarks, 3 + 2 * #landmarks) the jacobian of h wrt. eta.
        """

        x = eta[:3]
        m = eta[3:].reshape(
            (-1,
             2)).T  ## reshape map (2, #landmarks), m[j] is the jth landmark
        numM = m.shape[1]
        R = rotmat2d(x[2])  # body->world

        # relative position of landmark to robot in world frame. m - rho that appears in (11.15) and (11.16)
        delta_m = m - x[:2].reshape(2, 1)  # (world)
        zc = delta_m - R @ self.sensor_offset.reshape(
            2, 1)  # measurements, cartesian coord (world frame)

        zpred = self.h(eta)
        zr = np.array([zpred[i] for i in range(0, len(zpred), 2)])  # ranges

        Rpihalf = rotmat2d(np.pi / 2)

        # In what follows you can be clever and avoid making this for all the landmarks you _know_
        # you will not detect (the maximum range should be available from the data).
        # But keep it simple to begin with.

        # Allocate H and set submatrices as memory views into H
        H = np.zeros(
            (2 * numM, 3 + 2 * numM))  # see eq (11.15), (11.16), (11.17)
        Hx = H[:, :
               3]  # slice view, setting elements of THIS will set H as well
        Hm = H[:, 3:]

        # proposed way is to go through landmarks one by one
        jac_z_cb = -np.eye(
            2, 3)  # preallocate and update this for some speed gain if looping
        for j in range(numM):  # But this whole loop can be vectorized
            ind = 2 * j  # starting postion of the ith landmark into H
            inds = slice(ind,
                         ind + 2)  # the inds slice for the ith landmark into H

            zc_j = zc[:, j].reshape(2, 1)
            temp = np.concatenate(
                (-np.eye(2), -Rpihalf @ delta_m[:, j].reshape(2, 1)), axis=1)
            Hx_top = (zc_j.T / zr[j]) @ temp
            Hx_btm = (zc_j.T @ Rpihalf.T / zr[j]**2) @ temp

            Hx[inds] = np.concatenate((Hx_top, Hx_btm), axis=0)

            #TODO : avoid making this for all the landmarks you _know_
            # you will not detect (the maximum range should be available from the data).
            Hm[inds, inds] = np.vstack(
                (zc_j.T / zr[j], zc_j.T @ Rpihalf / (zr[j]**2)))

            #Hm[inds, inds]  = np.vstack(delta_m[:,j].T * 1 / la.norm(delta_m[:,j]),
            #                            delta_m[:,j].T @ Rpihalf * 1 / (la.norm(delta_m[:,j])**2) )

        #set some assertions here to make sure that some of the structure in H is correct
        assert (H.shape[0] == 2 * numM
                and H.shape[1] == 3 + 2 * numM), "SLAM.H: Wrong shape of H"
        assert (Hx.shape[0] == 2 * numM
                and Hx.shape[1] == 3), "SLAM.H: Wrong shape of Hx"
        return H
Beispiel #25
0
    def add_landmarks(self, eta: np.ndarray, P: np.ndarray,
                      z: np.ndarray) -> Tuple[np.ndarray, np.ndarray]:
        """Calculate new landmarks, their covariances and add them to the state.
        Parameters
         eta : np.ndarray, shape=(3 + 2*#landmarks,)   : the robot state and map concatenated
         P : np.ndarray, shape=(3 + 2*#landmarks,)*2   : the covariance of eta
         z : np.ndarray, shape(2 * #newlandmarks,)     : A set of measurements to create landmarks for
        Returns
         Tuple[np.ndarray, np.ndarray], shapes=(3 + 2*(#landmarks + #newlandmarks,), (3 + 2*(#landmarks + #newlandmarks,)*2
            eta with new landmarks appended, and its covariance
        """
        n = P.shape[0]
        assert z.ndim == 1, "SLAM.add_landmarks: z must be a 1d array"

        numLmk = z.shape[0] // 2
        lmnew = np.empty_like(z)

        Gx = np.empty((numLmk * 2, 3))
        Rall = np.zeros((numLmk * 2, numLmk * 2))

        I2 = np.eye(2)  # Preallocate, used for Gx
        sensor_offset_world = rotmat2d(
            eta[2]
        ) @ self.sensor_offset  # For transforming landmark position into world frame
        sensor_offset_world_der = rotmat2d(
            eta[2] + np.pi / 2) @ self.sensor_offset  # Used in Gx

        for j in range(numLmk):
            ind = 2 * j
            inds = slice(ind, ind + 2)
            zj = z[inds]

            # calculate pos of new landmark in world frame
            zj_x, zj_y = utils.polar2cartesian(zj[0], zj[1])
            lmnew[inds] = rotmat2d(eta[2]) @ np.array([zj_x, zj_y]).reshape(
                2, ) - sensor_offset_world

            Gx[inds, :2] = I2
            Gx[inds, 2] = zj[0] * np.array([
                -np.sin(zj[1] + eta[2]),
                np.cos(zj[1] + eta[2])
            ]).reshape(2, ) + sensor_offset_world_der
            Gz = rotmat2d(zj[1] + eta[2]) @ np.diag((1, zj[0]))
            # Gz * R * Gz^T, transform measurement covariance from polar to cartesian coordinates
            Rall[inds, inds] = Gz @ self.R @ Gz.T

        assert len(lmnew) % 2 == 0, "SLAM.add_landmark: lmnew not even length"

        etaadded = np.append(
            eta, lmnew)  # [eta; G() ] : append new landmarks to state vector

        low_r = Gx @ P[:3, :3] @ Gx.T + Rall  # + Gz @ self.R @ Gz.T
        P_added = la.block_diag(P, low_r)  # block diagonal of P_new
        P_added[:n, n:] = P[:, :3] @ Gx.T  # top right corner of P_new
        P_added[n:, :n] = P_added[:n, n:].T  # transpose of above

        assert (
            etaadded.shape * 2 == P_added.shape
        ), "EKFSLAM.add_landmarks: calculated eta and P has wrong shape"
        assert np.allclose(
            P_added, P_added.T), "EKFSLAM.add_landmarks: P_added not symmetric"
        assert np.all(np.linalg.eigvals(P_added) >= 0
                      ), "EKFSLAM.add_landmarks: P_added not PSD"

        return etaadded, P_added
Beispiel #26
0
    def H(self, eta: np.ndarray) -> np.ndarray:
        """Calculate the jacobian of h.

        Parameters
        ----------
        eta : np.ndarray, shape=(3 + 2 * #landmarks,)
            The robot state and landmarks stacked.

        Returns
        -------
        np.ndarray, shape=(2 * #landmarks, 3 + 2 * #landmarks)
            the jacobian of h wrt. eta.
        """
        # extract states and map
        x = eta[0:3]
        ## reshape map (2, #landmarks), m[j] is the jth landmark
        m = eta[3:].reshape((-1, 2)).T

        numM = m.shape[1]

        Rot = rotmat2d(x[2])

        delta_m = (m.T - x[:2]).T

        zc = delta_m - (Rot @ self.sensor_offset).reshape((2, 1))
        # (2, #measurements), each measured position in cartesian coordinates like
        # [x coordinates;
        #  y coordinates]

        zpred = self.h(eta).reshape((-1, 2)).T

        # (2, #measurements), predicted measurements, like
        # [ranges;
        #  bearings]
        zr = zpred[0]

        Rpihalf = rotmat2d(np.pi / 2)

        # In what follows you can be clever and avoid making this for all the landmarks you _know_
        # you will not detect (the maximum range should be available from the data).
        # But keep it simple to begin with.

        # Allocate H and set submatrices as memory views into H
        # You may or may not want to do this like this
        H = np.zeros(
            (2 * numM, 3 + 2 * numM))  # TODO, see eq (11.15), (11.16), (11.17)
        Hx = H[:, :3]  # slice view, setting elements of Hx will set H as well
        Hm = H[:, 3:]  # slice view, setting elements of Hm will set H as well

        # proposed way is to go through landmarks one by one
        jac_z_cb = -np.eye(
            2, 3)  # preallocate and update this for some speed gain if looping
        for i in range(numM):  # But this whole loop can be vectorized
            ind = 2 * i  # starting postion of the ith landmark into H
            inds = slice(ind,
                         ind + 2)  # the inds slice for the ith landmark into H

            zc_norm = la.norm(zc[:, i])
            zc_unit = zc[:, i] / zc_norm
            jac_z_cb[:, 2] = -Rpihalf @ delta_m[:, i]
            # TODO: Set H or Hx and Hm here
            Hx[inds] = np.vstack([zc_unit.T, zc_unit.T @ Rpihalf.T / zc_norm
                                  ]) @ jac_z_cb

            Hm[inds,
               inds] = np.vstack([zc_unit.T, zc_unit.T / zc_norm @ Rpihalf.T])

        # TODO: You can set some assertions here to make sure that some of the structure in H is correct
        return H
Beispiel #27
0
            CI[mk] = chi2.interval(1-alpha, 2 * num_asso)

            total_num_asso += num_asso
            NISes[mk] = NIS[mk]
        else:
            NISnorm[mk] = 1
            CInorm[mk].fill(1)
            CI[mk].fill(1)

        xupd[mk] = eta[:3]

        if doPlot:
            sh_lmk.set_offsets(eta[3:].reshape(-1, 2))
            if len(z) > 0:
                zinmap = (
                    rotmat2d(eta[2])
                    @ (
                        z[:, 0] * np.array([np.cos(z[:, 1]), np.sin(z[:, 1])])
                        + slam.sensor_offset[:, None]
                    )
                    + eta[0:2, None]
                )
                sh_Z.set_offsets(zinmap.T)
            lh_pose.set_data(*xupd[mk_first:mk, :2].T)

            ax.set(
                xlim=[-200, 200],
                ylim=[-200, 200],
                title=f"step {k}, laser scan {mk}, landmarks {len(eta[3:])//2},\nmeasurements {z.shape[0]}, num new = {np.sum(a[mk] == -1)}",
            )
            plt.draw()
Beispiel #28
0
    def add_landmarks(self, eta: np.ndarray, P: np.ndarray,
                      z: np.ndarray) -> Tuple[np.ndarray, np.ndarray]:
        """Calculate new landmarks, their covariances and add them to the state.

        Parameters
        ----------
        eta : np.ndarray, shape=(3 + 2*#landmarks,)
            the robot state and map concatenated
        P : np.ndarray, shape=(3 + 2*#landmarks,)*2
            the covariance of eta
        z : np.ndarray, shape(2 * #newlandmarks,)
            A set of measurements to create landmarks for

        Returns
        -------
        Tuple[np.ndarray, np.ndarray], shapes=(3 + 2*(#landmarks + #newlandmarks,), (3 + 2*(#landmarks + #newlandmarks,)*2
            eta with new landmarks appended, and its covariance
        """
        n = P.shape[0]
        assert z.ndim == 1, "SLAM.add_landmarks: z must be a 1d array"

        numLmk = z.shape[0] // 2

        lmnew = np.empty_like(z)

        x = eta[:3]

        Gx = np.empty((numLmk * 2, 3))
        Rall = np.zeros((numLmk * 2, numLmk * 2))

        I2 = np.eye(2)  # Preallocate, used for Gx
        sensor_offset_world = rotmat2d(
            eta[2]
        ) @ self.sensor_offset  # For transforming landmark position into world frame
        sensor_offset_world_der = rotmat2d(
            eta[2] + np.pi / 2) @ self.sensor_offset  # Used in Gx

        for j in range(numLmk):
            ind = 2 * j
            inds = slice(ind, ind + 2)
            zj = z[inds]
            zr, zb = zj

            rot = rotmat2d(zb + x[2])
            zc = zr * np.array([np.cos(zb + x[2]), np.sin(zb + x[2])])

            lmnew[inds] = x[:2] + zc + sensor_offset_world
            # TODO, calculate position of new landmark in world frame

            Gx[inds, :2] = I2
            Gx[inds, 2] = zr * np.array([
                -np.sin(zb + x[2]), np.cos(zb + x[2])
            ]) + sensor_offset_world_der

            Gz = rot @ np.diag([1, zr])

            Rall[inds, inds] = Gz @ self.R @ Gz.T
            # TODO, Gz * R * Gz^T, transform measurement covariance from polar to cartesian coordinates

        assert len(lmnew) % 2 == 0, "SLAM.add_landmark: lmnew not even length"
        etaadded = np.array([*eta, *lmnew])
        Padded = block_diag(P, Gx @ P[:3, :3] @ Gx.T + Rall)
        # TODO, block diagonal of P_new, see problem text in 1g) in graded assignment 3
        Padded[:n, n:] = P[:, :3] @ Gx.T
        Padded[n:, :n] = Padded[:n, n:].T

        assert (
            etaadded.shape * 2 == Padded.shape
        ), "EKFSLAM.add_landmarks: calculated eta and P has wrong shape"
        assert np.allclose(
            Padded, Padded.T), "EKFSLAM.add_landmarks: Padded not symmetric"
        assert np.all(np.linalg.eigvals(Padded) >= 0
                      ), "EKFSLAM.add_landmarks: Padded not PSD"

        return etaadded, Padded
Beispiel #29
0
    def add_landmarks(self, eta: np.ndarray, P: np.ndarray,
                      z: np.ndarray) -> Tuple[np.ndarray, np.ndarray]:
        """Calculate new landmarks, their covariances and add them to the state.

        Parameters
        ----------
        eta : np.ndarray, shape=(3 + 2*#landmarks,)
            the robot state and map concatenated
        P : np.ndarray, shape=(3 + 2*#landmarks,)*2
            the covariance of eta
        z : np.ndarray, shape(2 * #newlandmarks,)
            A set of measurements to create landmarks for

        Returns
        -------
        Tuple[np.ndarray, np.ndarray], shapes=(3 + 2*(#landmarks + #newlandmarks,), (3 + 2*(#landmarks + #newlandmarks,)*2
            eta with new landmarks appended, and its covariance
        """
        n = P.shape[0]
        assert z.ndim == 1, "SLAM.add_landmarks: z must be a 1d array"

        numLmk = z.shape[0] // 2

        lmnew = np.empty_like(z)

        Gx = np.empty((numLmk * 2, 3))
        Rall = np.zeros((numLmk * 2, numLmk * 2))

        I2 = np.eye(2)  # Preallocate, used for Gx
        sensor_offset_world = rotmat2d(
            eta[2]
        ) @ self.sensor_offset  # For transforming landmark position into world frame
        sensor_offset_world_der = rotmat2d(
            eta[2] + np.pi / 2) @ self.sensor_offset  # Used in Gx

        for j in range(numLmk):
            ind = 2 * j
            inds = slice(ind, ind + 2)
            zj = z[inds]

            zj_cart = zj[0] * np.array([np.cos(zj[1]), np.sin(zj[1])])

            rot = rotmat2d(zj[1] + eta[2])  # TODO, rotmat in Gz
            lmnew[inds] = eta[:2] + rotmat2d(
                eta[2]
            ) @ zj_cart + sensor_offset_world  # TODO, calculate position of new landmark in world frame

            Gx[inds, :2] = I2  # TODO
            Gx[inds, 2] = zj[0] * rot[:, 1] + sensor_offset_world_der  # TODO

            Gz = rot @ np.diag([1, zj[0]])  # TODO

            Rall[
                inds,
                inds] = Gz @ self.R @ Gz.T  # TODO, Gz * R * Gz^T, transform measurement covariance from polar to cartesian coordinates

        assert len(lmnew) % 2 == 0, "SLAM.add_landmark: lmnew not even length"
        # TODO, append new landmarks to state vector
        etaadded = np.append(eta, lmnew)
        # TODO, block diagonal of P_new, see problem text in 1g) in graded assignment 3
        Padded = la.block_diag(
            P, Gx @ P[:3, :3] @ Gx.T +
            Rall)  #? Ikke transposed på den siste Gx her, ref øvingsteksten
        # TODO, top right corner of P_new
        Padded[:n,
               n:] = P[:, :3] @ Gx.T  #? Har endra index her for å få top right
        # TODO, transpose of above. Should yield the same as calcualion, but this enforces symmetry and should be cheaper
        Padded[n:, :n] = Padded[:n, n:].T

        assert (
            etaadded.shape * 2 == Padded.shape
        ), "EKFSLAM.add_landmarks: calculated eta and P has wrong shape"
        assert np.allclose(
            Padded, Padded.T), "EKFSLAM.add_landmarks: Padded not symmetric"
        assert np.all(np.linalg.eigvals(Padded) >= 0
                      ), "EKFSLAM.add_landmarks: Padded not PSD"

        return etaadded, Padded
Beispiel #30
0
    def H(self, eta: np.ndarray) -> np.ndarray:
        """Calculate the jacobian of h.

        Parameters
        ----------
        eta : np.ndarray, shape=(3 + 2 * #landmarks,)
            The robot state and landmarks stacked.

        Returns
        -------
        np.ndarray, shape=(2 * #landmarks, 3 + 2 * #landmarks)
            the jacobian of h wrt. eta.
        """
        # extract states and map
        x = eta[0:3]
        ## reshape map (2, #landmarks), m[j] is the jth landmark
        m = eta[3:].reshape((-1, 2)).T  #? Copy paste fra h(.)

        numM = m.shape[1]

        Rot = rotmat2d(x[2])

        # TODO, relative position of landmark to robot in world frame. m - rho that appears in (11.15) and (11.16)
        delta_m = m - x[:2].reshape(2, 1)

        # TODO, (2, #measurements), each measured position in cartesian coordinates like
        zc = delta_m - Rot @ self.sensor_offset.reshape((2, 1))
        # zc = [Rot @ mi for mi in delta_m.T]
        # [x coordinates;
        #  y coordinates]

        # TODO (2, #measurements), predicted measurements, like
        zpred = self.h(eta)
        # [ranges;
        #  bearings]
        # TODO, ranges
        zr = zpred[::2]

        Rpihalf = rotmat2d(np.pi / 2)

        # In what follows you can be clever and avoid making this for all the landmarks you _know_
        # you will not detect (the maximum range should be available from the data).
        # But keep it simple to begin with.

        # Allocate H and set submatrices as memory views into H
        # You may or may not want to do this like this
        H = np.zeros(
            (2 * numM, 3 + 2 * numM))  # TODO, see eq (11.15), (11.16), (11.17)
        Hx = H[:, :3]  # slice view, setting elements of Hx will set H as well
        Hm = H[:, 3:]  # slice view, setting elements of Hm will set H as well

        # proposed way is to go through landmarks one by one
        jac_z_cb = -np.eye(
            2, 3)  # preallocate and update this for some speed gain if looping
        I2 = np.eye(2)
        for i in range(numM):  # But this hole loop can be vectorized
            ind = 2 * i  # starting postion of the ith landmark into H
            inds = slice(ind,
                         ind + 2)  # the inds slice for the ith landmark into H

            jac_z_cb[:, 2] = -Rpihalf @ delta_m[:, i]
            Hx[inds, :][0, :] = (zc[:, i].T / zr[i]) @ jac_z_cb
            Hx[inds, :][1, :] = (zc[:, i].T @ Rpihalf.T /
                                 (zr[i]**2)) @ jac_z_cb
            Hm[inds, inds] = -Hx[inds, :2]

        # TODO: You can set some assertions here to make sure that some of the structure in H is correct
        return H