Ejemplo n.º 1
0
    def compute_predicted_measurements(self):
        """
        Adapt this method from EkfLocalization.compute_predicted_measurements().
        """
        J = (self.x.size - 3) // 2
        hs = np.zeros((2, J))
        Hx_list = []
        for j in range(J):
            idx_j = 3 + 2 * j
            alpha, r = self.x[idx_j:idx_j + 2]

            Hx = np.zeros((2, self.x.size))

            ########## Code starts here ##########
            # TODO: Compute h, Hx.
            # HINT: Call tb.transform_line_to_scanner_frame() for the j'th map line.
            # HINT: The first 3 columns of Hx should be populated using the same approach as in EkfLocalization.compute_predicted_measurements().
            # HINT: The first two map lines (j=0,1) are fixed so the Jacobian of h wrt the alpha and r for those lines is just 0.
            # HINT: For the other map lines (j>2), write out h in terms of alpha and r to get the Jacobian Hx.
            h, Hx[:, :3] = tb.transform_line_to_scanner_frame(
                np.array([alpha, r]), self.x[:3], self.tf_base_to_camera)

            # First two map lines are assumed fixed so we don't want to propagate
            # any measurement correction to them.
            if j >= 2:
                Hx[:, idx_j:idx_j + 2] = np.eye(2)
                x_b_w, y_b_w, theta_b_w = self.x[:3]
                x_c_b, y_c_b, theta_c_b = self.tf_base_to_camera
                x_cam = x_c_b * np.cos(theta_b_w) - y_c_b * np.sin(
                    theta_b_w) + x_b_w
                y_cam = x_c_b * np.sin(theta_b_w) + y_c_b * np.cos(
                    theta_b_w) + y_b_w
                pos_angle = np.arctan2(y_cam, x_cam)
                Hx[1, idx_j] = -np.sin(pos_angle - alpha) * np.sqrt(x_cam**2 +
                                                                    y_cam**2)
            ########## Code ends here ##########

            h, Hx = tb.normalize_line_parameters(h, Hx)
            hs[:, j] = h
            Hx_list.append(Hx)

        return hs, Hx_list

        hs = np.zeros_like(self.map_lines)
        Hx_list = []
        for j in range(self.map_lines.shape[1]):
            ########## Code starts here ##########
            # TODO: Compute h, Hx using tb.transform_line_to_scanner_frame() for the j'th map line.
            # HINT: This should be a single line of code.
            h, Hx = tb.transform_line_to_scanner_frame(self.map_lines[:, j],
                                                       self.x,
                                                       self.tf_base_to_camera)

            ########## Code ends here ##########

            h, Hx = tb.normalize_line_parameters(h, Hx)
            hs[:, j] = h
            Hx_list.append(Hx)
Ejemplo n.º 2
0
    def compute_predicted_measurements(self):
        """
        Adapt this method from EkfLocalization.compute_predicted_measurements().
        """
        J = (self.x.size - 3) // 2
        hs = np.zeros((2, J))
        Hx_list = []
        for j in range(J):
            idx_j = 3 + 2 * j
            alpha, r = self.x[idx_j:idx_j + 2]

            Hx = np.zeros((2, self.x.size))

            ########## Code starts here ##########
            # TODO: Compute h, Hx.
            # HINT: Call tb.transform_line_to_scanner_frame() for the j'th map line.
            # HINT: The first 3 columns of Hx should be populated using the same approach as in EkfLocalization.compute_predicted_measurements().
            # HINT: The first two map lines (j=0,1) are fixed so the Jacobian of h wrt the alpha and r for those lines is just 0.
            # HINT: For the other map lines (j>2), write out h in terms of alpha and r to get the Jacobian Hx.

            # First two map lines are assumed fixed so we don't want to propagate
            # any measurement correction to them.
            if j >= 2:
                Hx[:, idx_j:idx_j + 2] = np.eye(2)  # FIX ME!
            ########## Code ends here ##########

            h, Hx = tb.normalize_line_parameters(h, Hx)
            hs[:, j] = h
            Hx_list.append(Hx)

        return hs, Hx_list
Ejemplo n.º 3
0
    def compute_predicted_measurements(self):
        """
        Adapt this method from EkfLocalization.compute_predicted_measurements().
        """
        J = (self.x.size - 3) // 2
        hs = np.zeros((2, J))
        Hx_list = []
        for j in range(J):
            idx_j = 3 + 2 * j
            alpha, r = self.x[idx_j:idx_j + 2]

            Hx = np.zeros((2, self.x.size))

            ########## Code starts here ##########
            # TODO: Compute h, Hx.
            # HINT: Call tb.transform_line_to_scanner_frame() for the j'th map line.
            # HINT: The first 3 columns of Hx should be populated using the same approach as in EkfLocalization.compute_predicted_measurements().
            # HINT: The first two map lines (j=0,1) are fixed so the Jacobian of h wrt the alpha and r for those lines is just 0.
            # HINT: For the other map lines (j>2), write out h in terms of alpha and r to get the Jacobian Hx.

            h, Hx[:, 0:3] = tb.transform_line_to_scanner_frame(
                np.array([alpha, r]), self.x[0:3], self.tf_base_to_camera)

            # First two map lines are assumed fixed so we don't want to propagate
            # any measurement correction to them.
            if j >= 2:
                #                Hx[:,idx_j:idx_j+2] = np.eye(2)  # FIX ME!

                rot_b_to_w = np.array(
                    [[np.cos(self.x[2]), -np.sin(self.x[2]), self.x[0]],
                     [np.sin(self.x[2]),
                      np.cos(self.x[2]), self.x[1]], [0, 0, 1]])

                x_cam_b = self.tf_base_to_camera[0]
                y_cam_b = self.tf_base_to_camera[1]
                th_cam_b = self.tf_base_to_camera[2]

                cam_b = np.array([x_cam_b, y_cam_b, 1])
                cam_w = np.matmul(rot_b_to_w, cam_b)
                cam_w[2] = th_cam_b + self.x[2]

                #                alpha_cam = alpha - cam_w[2]

                alpha_l = alpha - np.arctan2(cam_w[1], cam_w[0])
                #                r_cam = r - np.sqrt(cam_w[0]**2 + cam_w[1]**2) * np.cos(alpha_l)

                H11 = 1.0
                H12 = 0.0
                H21 = np.sqrt(cam_w[0]**2 + cam_w[1]**2) * np.sin(alpha_l)
                H22 = 1.0

                Hx[:, idx_j:idx_j + 2] = np.array([[H11, H12], [H21, H22]])

            ########## Code ends here ##########

            h, Hx = tb.normalize_line_parameters(h, Hx)
            hs[:, j] = h
            Hx_list.append(Hx)

        return hs, Hx_list
Ejemplo n.º 4
0
    def compute_predicted_measurements(self):
        """
        Given a single map line in the world frame, outputs the line parameters
        in the scanner frame so it can be associated with the lines extracted
        from the scanner measurements.

        Input:
            None
        Output:
            hs: np.array[M,2,J] - J line parameters in the scanner (camera) frame for M particles.
        """
        ########## Code starts here ##########
        # TODO: Compute hs.
        # Hint: We don't need Jacobians for particle filtering.
        # Hint: Simple solutions: Using for loop, for each particle, for each 
        #       map line, transform to scanner frmae using tb.transform_line_to_scanner_frame()
        #       and tb.normalize_line_parameters()
        # Hint: To maximize speed, try to compute the predicted measurements
        #       without looping over the map lines. You can implement vectorized
        #       versions of turtlebot_model functions directly here. This
        #       results in a ~10x speedup.
        # Hint: For the faster solution, it does not call tb.transform_line_to_scanner_frame()
        #       or tb.normalize_line_parameters(), but reimplement these steps vectorized.
        J=self.map_lines.shape[1]        
        hs = np.zeros((self.M, 2,J ))
        
        for i in range(self.M):
            for j in range(J):
                alpha,r=self.map_lines[:,j]                
                h = tb.transform_line_to_scanner_frame(np.array([alpha, r]), self.xs[i,0:3], self.tf_base_to_camera, compute_jacobian=False)
                h = tb.normalize_line_parameters(h)
                hs[i,:,j]=h
        ########## Code ends here ##########

        return hs
Ejemplo n.º 5
0
    def compute_predicted_measurements(self):
        """
        Given a single map line in the world frame, outputs the line parameters
        in the scanner frame so it can be associated with the lines extracted
        from the scanner measurements.

        Input:
            None
        Outputs:
                 hs: np.array[2,J]  - J line parameters in the scanner (camera) frame.
            Hx_list: [np.array[2,3]] - list of Jacobians of h with respect to the belief mean self.x.
        """
        hs = np.zeros_like(self.map_lines)
        Hx_list = []
        for j in range(self.map_lines.shape[1]):
            ########## Code starts here ##########
            # TODO: Compute h, Hx using tb.transform_line_to_scanner_frame().
            h, Hx = tb.transform_line_to_scanner_frame(self.map_lines[:, j],
                                                       self.x,
                                                       self.tf_base_to_camera)
            ########## Code ends here ##########

            h, Hx = tb.normalize_line_parameters(h, Hx)
            hs[:, j] = h
            Hx_list.append(Hx)

        return hs, Hx_list
Ejemplo n.º 6
0
    def compute_predicted_measurements(self):
        """
        Given the best estimate of the map features (found inside the augmented
        state vector) and our current position (also inside the augmented state
        vector), return |hs| representing the map features in the robot's frame
        of reference and |Hx| representing how much |hs| changes with respect
        to the augmented state vector.
        """
        # number of map features
        J = (self.x.size - 3) // 2
        hs = np.zeros((2, J))
        Hx_list = []
        # loop through all the features
        for j in range(J):
            idx_j = 3 + 2 * j
            # extract best-estimate line parameters from augmented state vector
            alpha, r = self.x[idx_j:idx_j + 2]
            # compute h and Hx
            Hx = np.zeros((2, self.x.size))
            h, Hx_robot = tb.transform_line_to_scanner_frame(
                (alpha, r), self.x[0:3], self.tf_base_to_camera)
            Hx[:, :3] = Hx_robot
            # First two map lines are assumed fixed so we don't want to propagate
            # any measurement correction to them.
            if j >= 2:
                Hx[:, idx_j:idx_j + 2] = np.eye(2)
            h, Hx = tb.normalize_line_parameters(h, Hx)
            hs[:, j] = h
            Hx_list.append(Hx)

        return hs, Hx_list
Ejemplo n.º 7
0
    def compute_predicted_measurements(self):
        """
        Adapt this method from EkfLocalization.compute_predicted_measurements().
        """
        J = (self.x.size - 3) // 2
        hs = np.zeros((2, J))
        Hx_list = []

        th = self.tf_base_to_camera[2] + self.x[2]
        R_c_w = np.array([
            [ np.cos(th), np.sin(th), 0], \
            [-np.sin(th), np.cos(th), 0], \
            [ 0,0,1]
        ])
        x_cam, y_cam, th_cam = np.dot(np.linalg.inv(R_c_w),
                                      self.tf_base_to_camera) + self.x[:3]
        #alpha_in_cam = alpha - th_cam
        norm_cam = np.linalg.norm((x_cam, y_cam))
        #r_in_cam = r - norm_cam * np.cos(alpha - np.arctan2(y_cam, x_cam))
        #h = (alpha_in_cam, r_in_cam)

        for j in range(J):
            idx_j = 3 + 2 * j
            alpha, r = self.x[idx_j:idx_j + 2]

            Hx = np.zeros((2, self.x.size))

            ########## Code starts here ##########
            # TODO: Compute h, Hx.
            # HINT: Call tb.transform_line_to_scanner_frame() for the j'th map line.
            # HINT: The first 3 columns of Hx should be populated using the same approach as in EkfLocalization.compute_predicted_measurements().
            # HINT: The first two map lines (j=0,1) are fixed so the Jacobian of h wrt the alpha and r for those lines is just 0.
            # HINT: For the other map lines (j>2), write out h in terms of alpha and r to get the Jacobian Hx.
            line = (alpha, r)

            h, Hx[:, :3] = tb.transform_line_to_scanner_frame(
                line, self.x[:3], self.tf_base_to_camera)
            # First two map lines are assumed fixed so we don't want to propagate
            # any measurement correction to them.
            if j >= 2:
                #Hx[:,idx_j:idx_j+2] = np.eye(2)  # FIX ME!
                Hx[:, idx_j:idx_j + 2] = np.array(
                    [[1, 0],
                     [norm_cam * np.sin(alpha - np.arctan2(y_cam, x_cam)), 1]])
            ########## Code ends here ##########

            h, Hx = tb.normalize_line_parameters(h, Hx)
            hs[:, j] = h
            Hx_list.append(Hx)

        return hs, Hx_list
Ejemplo n.º 8
0
    def compute_predicted_measurements(self):
        """
        Adapt this method from EkfLocalization.compute_predicted_measurements().
        """
        J = (self.x.size - 3) // 2
        hs = np.zeros((2, J))
        Hx_list = []
        for j in range(J):
            idx_j = 3 + 2 * j
            alpha, r = self.x[idx_j:idx_j + 2]

            Hx = np.zeros((2, self.x.size))

            ########## Code starts here ##########
            # TODO: Compute h, Hx.
            # HINT: Call tb.transform_line_to_scanner_frame() for the j'th map line.
            # HINT: The first 3 columns of Hx should be populated using the same approach as in EkfLocalization.compute_predicted_measurements().
            # HINT: The first two map lines (j=0,1) are fixed so the Jacobian of h wrt the alpha and r for those lines is just 0.
            # HINT: For the other map lines (j>2), write out h in terms of alpha and r to get the Jacobian Hx.
            h, HxTemp = tb.transform_line_to_scanner_frame(
                self.x[idx_j:idx_j + 2], self.x[0:3], self.tf_base_to_camera,
                True)
            Hx[:, 0:3] = HxTemp
            Hx[:, 3:5] = np.zeros((2, 2))
            # First two map lines are assumed fixed so we don't want to propagate
            # any measurement correction to them.
            if j >= 2:
                theta_r = self.x[2]

                R = np.array([[np.cos(theta_r), -np.sin(theta_r), 0.0],
                              [np.sin(theta_r),
                               np.cos(theta_r), 0.0], [0.0, 0.0, 1.0]])
                x_cam_world = self.x[0:3] + np.dot(R, self.tf_base_to_camera)
                x_cam, y_cam, theta_cam = x_cam_world
                """
                x_cam = self.tf_base_to_camera[0]
                y_cam = self.tf_base_to_camera[1]
                """
                Hx[:, idx_j:idx_j + 2] = np.array(
                    [[1.0, 0.0],
                     [x_cam * np.sin(alpha) - y_cam * np.cos(alpha), 1.0]])

            ########## Code ends here ##########

            h, Hx = tb.normalize_line_parameters(h, Hx)
            hs[:, j] = h
            Hx_list.append(Hx)

        return hs, Hx_list
Ejemplo n.º 9
0
    def compute_predicted_measurements(self):
        """
        Adapt this method from EkfLocalization.compute_predicted_measurements().
        """
        J = (self.x.size - 3) // 2
        hs = np.zeros((2, J))
        Hx_list = []
        for j in range(J):
            idx_j = 3 + 2 * j
            alpha, r = self.x[idx_j:idx_j + 2]

            Hx = np.zeros((2, self.x.size))

            ########## Code starts here ##########
            # TODO: Compute h, Hx.
            # HINT: Call tb.transform_line_to_scanner_frame() for the j'th map line.
            # HINT: The first 3 columns of Hx should be populated using the same approach as in EkfLocalization.compute_predicted_measurements().
            # HINT: The first two map lines (j=0,1) are fixed so the Jacobian of h wrt the alpha and r for those lines is just 0.
            # HINT: For the other map lines (j>2), write out h in terms of alpha and r to get the Jacobian Hx.
            line = np.array([alpha, r])
            h, Hx[:, :3] = tb.transform_line_to_scanner_frame(
                line, self.x[:3], self.tf_base_to_camera)

            # First two map lines are assumed fixed so we don't want to propagate
            # any measurement correction to them.
            if j >= 2:
                Hx[:, idx_j:idx_j + 2] = np.eye(2)

                x_base, y_base, th_base = self.x[:3]
                Rz = np.array([[np.cos(th_base), -np.sin(th_base)],
                               [np.sin(th_base),
                                np.cos(th_base)]])
                x_cam_rotated, y_cam_rotated = np.dot(
                    Rz, self.tf_base_to_camera[:2])
                x_cam = x_cam_rotated + x_base
                y_cam = y_cam_rotated + y_base
                cam_coord_norm = np.linalg.norm(np.array([x_cam, y_cam]))
                Hx[1, idx_j] = -cam_coord_norm * np.sin(
                    np.arctan2(y_cam, x_cam) - alpha)
            ########## Code ends here ##########

            h, Hx = tb.normalize_line_parameters(h, Hx)
            hs[:, j] = h
            Hx_list.append(Hx)

        return hs, Hx_list
Ejemplo n.º 10
0
    def compute_predicted_measurements(self):
        """
        Adapt this method from EkfLocalization.compute_predicted_measurements().
        """
        J = (self.x.size - 3) // 2
        hs = np.zeros((2, J))
        Hx_list = []
        for j in range(J):
            idx_j = 3 + 2 * j
            alpha, r = self.x[idx_j:idx_j + 2]

            Hx = np.zeros((2, self.x.size))

            ########## Code starts here ##########
            # TODO: Compute h, Hx.
            # HINT: Call tb.transform_line_to_scanner_frame() for the j'th map line.
            # HINT: The first 3 columns of Hx should be populated using the same approach as in EkfLocalization.compute_predicted_measurements().
            # HINT: The first two map lines (j=0,1) are fixed so the Jacobian of h wrt the alpha and r for those lines is just 0.
            # HINT: For the other map lines (j>2), write out h in terms of alpha and r to get the Jacobian Hx.

            h, Hx_temp = tb.transform_line_to_scanner_frame(
                np.array([alpha, r]), self.x[0:3], self.tf_base_to_camera)
            Hx[:, 0:3] = Hx_temp
            # First two map lines are assumed fixed so we don't want to propagate
            # any measurement correction to them.
            if j >= 2:
                da_da = 1.
                da_dr = 0.
                x_off, y_off, th_off = self.tf_base_to_camera
                x, y, th = self.x[0:3]
                dr_da = np.sin(alpha) * (
                    x + np.cos(th_off) * x_off -
                    np.sin(th_off) * y_off) - np.cos(alpha) * (
                        y + np.sin(th_off) * x_off + np.cos(th_off) * y_off)
                dr_dr = 1.
                Hx[:, idx_j:idx_j + 2] = np.array([[da_da, da_dr],
                                                   [dr_da, dr_dr]])  # FIX ME!
            ########## Code ends here ##########

            h, Hx = tb.normalize_line_parameters(h, Hx)
            hs[:, j] = h
            Hx_list.append(Hx)

        return hs, Hx_list
Ejemplo n.º 11
0
    def compute_predicted_measurements(self):
        """
        Adapt this method from EkfLocalization.compute_predicted_measurements().
        """
        J = (self.x.size - 3) // 2
        hs = np.zeros((2, J))
        Hx_list = []
        for j in range(J):
            idx_j = 3 + 2 * j
            alpha, r = self.x[idx_j:idx_j + 2]

            Hx = np.zeros((2, self.x.size))

            ########## Code starts here ##########
            # TODO: Compute h, Hx.
            x, y, th = self.x[:3]
            x_cam, y_cam, th_cam = self.tf_base_to_camera
            # compute pose of camera in world frame
            x_cam_w = x_cam * np.cos(th) - y_cam * np.sin(th) + x
            y_cam_w = x_cam * np.sin(th) + y_cam * np.cos(th) + y

            h = np.array([
                alpha - th - th_cam,
                r - x_cam_w * np.cos(alpha) - y_cam_w * np.sin(alpha)
            ])

            dh_dth = (x_cam * np.sin(th) + y_cam * np.cos(th)) * np.cos(alpha) - \
                     (x_cam * np.cos(th) - y_cam * np.sin(th)) * np.sin(alpha)
            Hx[:, :3] = np.array([[0, 0, -1],
                                  [-np.cos(alpha), -np.sin(alpha), dh_dth]])

            # First two map lines are assumed fixed so we don't want to propagate
            # any measurement correction to them.
            if j >= 2:
                Hx[:, idx_j:idx_j + 2] = np.eye(2)  # FIX ME!
                Hx[1,
                   idx_j] = x_cam_w * np.sin(alpha) - y_cam_w * np.cos(alpha)
            ########## Code ends here ##########

            h, Hx = tb.normalize_line_parameters(h, Hx)
            hs[:, j] = h
            Hx_list.append(Hx)

        return hs, Hx_list
Ejemplo n.º 12
0
    def compute_predicted_measurements(self):
        """
        Adapt this method from EkfLocalization.compute_predicted_measurements().
        """
        J = (self.x.size - 3) // 2
        hs = np.zeros((2, J))
        Hx_list = []
        for j in range(J):
            idx_j = 3 + 2 * j
            alpha, r = self.x[idx_j:idx_j + 2]

            Hx = np.zeros((2, self.x.size))

            ########## Code starts here ##########
            # TODO: Compute h, Hx.
            h = np.zeros(2)
            h[0] = alpha - self.x[2] - self.tf_base_to_camera[2]
            h[1] = r - self.x[0] * np.cos(alpha) - self.x[1] * np.sin(
                alpha) - self.tf_base_to_camera[0] * np.cos(
                    alpha - self.x[2]) - self.tf_base_to_camera[1] * np.sin(
                        alpha - self.x[2])

            Hx[0, 2] = -1
            Hx[1, 0] = -np.cos(alpha)
            Hx[1, 1] = -np.sin(alpha)
            Hx[1, 2] = - self.tf_base_to_camera[0] * \
                np.sin(
                    alpha - self.x[2]) + self.tf_base_to_camera[1] * np.cos(alpha - self.x[2])
            # First two map lines are assumed fixed so we don't want to propagate
            # any measurement correction to them.
            if j >= 2:
                Hx[:, idx_j:idx_j + 2] = np.eye(2)  # FIX ME!
                Hx[1, idx_j] = self.x[0] * np.sin(alpha) - self.x[1] * np.cos(
                    alpha) + self.tf_base_to_camera[0] * np.sin(
                        alpha - self.x[2]
                    ) - self.tf_base_to_camera[1] * np.cos(alpha - self.x[2])
            ########## Code ends here ##########

            h, Hx = tb.normalize_line_parameters(h, Hx)
            hs[:, j] = h
            Hx_list.append(Hx)

        return hs, Hx_list
Ejemplo n.º 13
0
    def compute_predicted_measurements(self):
        """
        Given a single map line in the world frame, outputs the line parameters
        in the scanner frame so it can be associated with the lines extracted
        from the scanner measurements.

        Input:
            None
        Output:
            hs: np.array[M,2,J] - J line parameters in the scanner (camera) frame for M particles.
        """
        J = self.map_lines.shape[1]
        hs_all = np.empty((self.M, 2, J))
        for index, particle in enumerate(self.xs):
            hs = np.empty_like(self.map_lines)
            for j, line in enumerate(self.map_lines.T):
                h = tb.transform_line_to_scanner_frame(line, particle, self.tf_base_to_camera, False)
                h = tb.normalize_line_parameters(h)
                hs[:,j] = h
            hs_all[index] = hs
        return hs_all
Ejemplo n.º 14
0
    def compute_predicted_measurements(self):
        """
        Adapt this method from EkfLocalization.compute_predicted_measurements().
        """
        J = (self.x.size - 3) // 2
        hs = np.zeros((2, J))
        Hx_list = []
        x_b, y_b, th_b = self.x[:3]
        x_cb, y_cb, th_cb = self.tf_base_to_camera
        x_c = np.cos(th_b) * x_cb - np.sin(th_b) * y_cb + x_b
        y_c = np.sin(th_b) * x_cb + np.cos(th_b) * y_cb + y_b
        d_c = np.linalg.norm([x_c, y_c])
        for j in range(J):
            idx_j = 3 + 2 * j
            alpha, r = self.x[idx_j:idx_j + 2]

            Hx = np.zeros((2, self.x.size))

            ########## Code starts here ##########
            # TODO: Compute h, Hx.
            h, Hxp = tb.transform_line_to_scanner_frame(
                self.x[idx_j:idx_j + 2], self.x[:3], self.tf_base_to_camera)
            Hx[:, :3] = Hxp
            # First two map lines are assumed fixed so we don't want to propagate
            # any measurement correction to them.
            if j >= 2:
                Hx[:, idx_j:idx_j + 2] = np.eye(2)  # FIX ME!
                ########## Code ends here ##########
                alpha, r = self.x[idx_j:idx_j + 2]
                Hx[:, idx_j:idx_j + 2] = np.array(
                    [[1, 0], [d_c * np.sin(alpha - np.arctan2(y_c, y_b)), 1]])

            h, Hx = tb.normalize_line_parameters(h, Hx)
            hs[:, j] = h
            Hx_list.append(Hx)

        return hs, Hx_list
Ejemplo n.º 15
0
    def compute_predicted_measurements(self):
        """
        Adapt this method from EkfLocalization.compute_predicted_measurements().
        """
        J = (self.x.size - 3) // 2
        hs = np.zeros((2, J))
        Hx_list = []
        for j in range(J):
            idx_j = 3 + 2 * j
            alpha, r = self.x[idx_j:idx_j + 2]

            Hx = np.zeros((2, self.x.size))

            ########## Code starts here ##########
            # TODO: Compute h, Hx.
            # HINT: Call tb.transform_line_to_scanner_frame() for the j'th map line.
            # HINT: The first 3 columns of Hx should be populated using the same approach as in EkfLocalization.compute_predicted_measurements().
            # HINT: The first two map lines (j=0,1) are fixed so the Jacobian of h wrt the alpha and r for those lines is just 0.
            # HINT: For the other map lines (j>2), write out h in terms of alpha and r to get the Jacobian Hx.
            h, Hx_x = tb.transform_line_to_scanner_frame(
                np.array([alpha, r]),
                self.x,
                self.tf_base_to_camera,
                compute_jacobian=True)
            Hx[:, 0:3] = Hx_x
            # Rotation matrix from the world frame to the base frame
            x = self.x[0:3]
            line = np.array([alpha, r])
            tf_base_to_camera = self.tf_base_to_camera
            # Rotation matrix from the world frame to the base frame
            c, s = np.cos(x[2]), np.sin(x[2])
            R_world2base = np.array([[c, -s], [s, c]])

            # Translation vector from the world frame to the base frame
            t_world2base = x[0:2]
            t_world2base = np.asarray(t_world2base)
            t_world2base = np.reshape(t_world2base, (2, 1))

            # Rotation matrix from the base frame to the camera frame
            c, s = np.cos(tf_base_to_camera[2]), np.sin(tf_base_to_camera[2])
            R_base2cam = np.array([[c, -s], [s, c]])

            # Translation vector from the base frame to the camera frame
            t_base2cam = tf_base_to_camera[0:2]
            t_base2cam = np.asarray(t_base2cam)
            t_base2cam = np.reshape(t_base2cam, (2, 1))
            # Pose of the camera in the world frame
            # pos_cam = np.dot(np.linalg.inv(R_world2base), t_base2cam) + t_world2base
            pos_cam = np.matmul((R_world2base), t_base2cam) + t_world2base

            theta_cam = x[2] + tf_base_to_camera[2]

            alpha_cam = line[0] - theta_cam
            d = np.linalg.norm(pos_cam, 2)
            theta_world2cam = np.arctan2(pos_cam[1], pos_cam[0])

            # print(theta_world2cam)

            r_cam = line[1] - d * np.cos(line[0] - theta_world2cam)
            Hx_j = np.eye(2)
            Hx_j[1, 0] = d * np.sin(alpha - theta_world2cam)
            # First two map lines are assumed fixed so we don't want to propagate
            # any measurement correction to them.
            if j >= 2:
                Hx[:, idx_j:idx_j + 2] = Hx_j  # FIX ME!
            ########## Code ends here ##########

            h, Hx = tb.normalize_line_parameters(h, Hx)
            hs[:, j] = h
            Hx_list.append(Hx)

        return hs, Hx_list
Ejemplo n.º 16
0
    def compute_predicted_measurements(self):
        """
        Adapt this method from EkfLocalization.compute_predicted_measurements().
        """
        J = (self.x.size - 3) // 2
        hs = np.zeros((2, J))
        Hx_list = []
        X = self.x[:3]
        for j in range(J):
            idx_j = 3 + 2 * j
            alpha, r = self.x[idx_j:idx_j + 2]

            Hx = np.zeros((2, self.x.size))

            ########## Code starts here ##########
            # TODO: Compute h, Hx.

            # Exactly the same as tb.transform_line_to_scanner_frame()
            alp = alpha
            x, y, th = X
            xcam_R, ycam_R, thcam_R = self.tf_base_to_camera  # Camera pose. in Robot frame.
            C_Rh = np.array(
                [xcam_R, ycam_R,
                 1])  # Camera pose in homogeneous coords. Robot frame.

            RW_T = np.array([[np.cos(th), -np.sin(th), x],
                             [np.sin(th), np.cos(th), y], [0, 0, 1]])

            Cam_h = np.matmul(
                RW_T, C_Rh)  # Camera in homogeneous coords. World frame.
            xcam, ycam, hcam = Cam_h
            xcam, ycam = xcam / hcam, ycam / hcam  # Camera in World frame coords.

            alp_C = alp - th - thcam_R
            r_C = r - xcam * np.cos(alp) - ycam * np.sin(alp)
            h = np.array([alp_C, r_C])

            dalpC_dxR = 0
            dalpC_dyR = 0
            dalpC_dthR = -1

            drC_dxW = -np.cos(alp)
            drC_dyW = -np.sin(alp)
            drC_dthW = (-np.cos(alp) *
                        (-xcam_R * np.sin(th) - ycam_R * np.cos(th)) -
                        np.sin(alp) *
                        (xcam_R * np.cos(th) - ycam_R * np.sin(th)))

            # Hx is bigger now
            Hx[0:2, 0:3] = np.array([[dalpC_dxR, dalpC_dyR, dalpC_dthR],
                                     [drC_dxW, drC_dyW, drC_dthW]])
            # So the columns of Hx go dx, dy, dth, dalp1, dr1, dalp2, dr2, ...

            # First two map lines are assumed fixed so we don't want to propagate
            # any measurement correction to them. i.e. Entries all zero.
            # Otherwise:

            # dalpC_dalpC = 1
            # dalpC_drC   = 0
            drC_dalpC = xcam * np.sin(alp) - ycam * np.cos(alp)
            # drC_drC     = 1

            if j >= 2:
                Hx[:, idx_j:idx_j + 2] = np.eye(2)
                Hx[1, idx_j] = drC_dalpC
            ########## Code ends here ##########

            h, Hx = tb.normalize_line_parameters(h, Hx)
            hs[:, j] = h
            Hx_list.append(Hx)

        return hs, Hx_list
Ejemplo n.º 17
0
    def compute_predicted_measurements(self):
        """
        Adapt this method from EkfLocalization.compute_predicted_measurements().
        """
        J = (self.x.size - 3) // 2
        hs = np.zeros((2, J))
        Hx_list = []
        for j in range(J):
            idx_j = 3 + 2 * j
            alpha, r = self.x[idx_j:idx_j + 2]

            Hx = np.zeros((2, self.x.size))

            ########## Code starts here ##########
            # TODO: Compute h, Hx.

            x = self.x[0:3]

            rotation = np.array([
                [np.cos(x[2]), -np.sin(x[2])],
                [np.sin(x[2]), np.cos(x[2])]
            ])

            base_to_camera_xy = np.array([
                [self.tf_base_to_camera[0]],
                [self.tf_base_to_camera[1]]
            ])

            base_to_camera_xy_prime = np.matmul(rotation, base_to_camera_xy)

            world_to_camera = base_to_camera_xy_prime + np.array([[x[0]], [x[1]]])

            r_c = r - np.cos(alpha) * world_to_camera[0, 0] - np.sin(alpha) * world_to_camera[1, 0]
            alpha_c = alpha - self.tf_base_to_camera[2] - x[2]

            h = np.array([alpha_c, r_c])

            method = 2
            if method == 1:
                q = (r*np.cos(alpha) - x[0])**2 + (r*np.sin(alpha) - x[1])**2

                a = (x[0] - r*np.cos(alpha))/np.sqrt(q)
                b = (x[1] - r*np.sin(alpha))/np.sqrt(q)
                c = -b/np.sqrt(q)
                d = a/np.sqrt(q)

                Hx[0, 0] = a
                Hx[0, 1] = b
                Hx[1, 0] = c
                Hx[1, 1] = d
                Hx[1, 2] = -1
            elif method == 2:
                del_r_del_th = self.tf_base_to_camera[0] * np.cos(alpha) * np.sin(x[2]) + self.tf_base_to_camera[1] * \
                    np.cos(alpha) * np.cos(x[2]) - self.tf_base_to_camera[0] * np.sin(alpha) * np.cos(x[2]) + \
                    self.tf_base_to_camera[1] * np.sin(alpha) * np.sin(x[2])

                del_r_del_alpha = np.sin(alpha) * (
                    self.tf_base_to_camera[0] * np.cos(x[2]) - self.tf_base_to_camera[1] * np.sin(x[2]) + x[0]
                ) - np.cos(alpha) * (
                    self.tf_base_to_camera[0] * np.sin(x[2]) + self.tf_base_to_camera[1] * np.cos(x[2]) + x[1]
                )

                Hx[0, 2] = -1
            # Hx[0, idx_j] = 1

                Hx[1, 0] = -np.cos(alpha)
                Hx[1, 1] = -np.sin(alpha)
                Hx[1, 2] = del_r_del_th
            else:
                print("WRONG METHOD NUMBER")
                return -1

            # Hx[1, idx_j] = del_r_del_alpha
            # Hx[1, idx_j + 1] = 1

            # First two map lines are assumed fixed so we don't want to propagate
            # any measurement correction to them.
            if j >= 2:
                if method == 1:
                    Hx[:, idx_j:idx_j + 2] = np.array([
                        [-a, -b],
                        [-c, -d]
                    ])  # FIX ME!
                else:
                    Hx[:, idx_j:idx_j + 2] = np.eye(2)
                    Hx[1, idx_j] = del_r_del_alpha      # FIXED!!
            ########## Code ends here ##########

            h, Hx = tb.normalize_line_parameters(h, Hx)
            hs[:, j] = h
            Hx_list.append(Hx)

        return hs, Hx_list