def tfPolarLine(self, tf, line):
        '''
        Transforms a polar line in the robot frame to a polar line in the
        world frame
        '''
        # Decompose transformation
        # Decompose transformation
        x_x = tf[0]
        x_y = tf[1]
        x_ang = tf[2]

        # Compute the new phi
        phi = angle_wrap(line[1] + x_ang)

        rho_ = line[0] + x_x * np.cos(phi) + x_y * np.sin(phi)
        sign = 1
        if rho_ < 0:
            rho_ = -rho_
            phi = angle_wrap(phi + pi)
            sign = -1

        # Allocate jacobians
        H_tf = np.zeros((2, 3))
        H_line = np.eye(2)

        # TODO: Evaluate jacobian respect to transformation
        H_tf = np.array(
            [[cos(phi), sin(phi), -x_x * sin(phi) + x_y * sin(phi)], [0, 0,
                                                                      1]])
        # TODO: Evaluate jacobian respect to line
        H_line = np.array([[1, -x_x * sin(phi) + x_y * sin(phi)], [0, 1]])

        return np.array([rho_, phi]), H_tf, H_line
def merge(lines, dist_thres, ang_thres):
    '''
    Merge similar lines according to the given thresholds.
    '''
    # No data received
    if lines is None:
        return np.array([])
        
    # Check and merge similar consecutive lines
    i = 0
    while i in range(lines.shape[0]-1):
        
        # Line angles
        ang1 = math.atan2((lines[i,3]-lines[i,1]),(lines[i,2]-lines[i,0]))
        ang2 = math.atan2((lines[i+1,3]-lines[i+1,1]),(lines[i+1,2]-lines[i+1,0]))
        
        # Below thresholds?
        angdiff = abs(angle_wrap(ang1 - ang2))
        disdiff = math.sqrt(math.pow(lines[i+1,1]-lines[i,3],2) + math.pow(lines[i+1,0]-lines[i,2],2))
        if angdiff < ang_thres and disdiff < dist_thres:
            
            # Joined line
            lines[i,:] = np.array([lines[i,0], lines[i,1], lines[i+1,2], lines[i+1,3]])
            
            # Delete unnecessary line
            lines = np.delete(lines, i+1, 0)
            
        # Nothing to merge
        else:
            i += 1
    print "============================================================"
    print lines.shape
    return lines
Example #3
0
def merge(lines, dist_thres, ang_thres):
    '''
    Merge similar lines according to the given thresholds.
    '''
    # No data received
    if lines is None:
        return np.array([])
        
    # Check and merge similar consecutive lines
    i = 0
    while i in range(lines.shape[0]-1):
        
        # Line angles
        ang1 = math.atan2(lines[i,3] - lines[i,1], lines[i,2] - lines[i,0]) 
        ang2 = math.atan2(lines[i+1,3] - lines[i+1,1], lines[i+1,2] - lines[i+1,0])  
        
        # Below thresholds?
        angdiff = abs(angle_wrap(ang1-ang2))
        disdiff = math.sqrt(math.pow(lines[i,2]-lines[i+1,0],2) + math.pow(lines[i,3]-lines[i+1,1],2))
        if angdiff < ang_thres and disdiff < dist_thres:
            
            # Joined line
            lines[i,:] = np.array([lines[i,0], lines[i,1], lines[i+1,2], lines[i+1,3]])
            
            # Delete unnecessary line
            lines = np.delete(lines, i+1,0)
            
        # Nothing to merge
        else:
            i += 1
            
    return lines
Example #4
0
    def get_polar_line(self, line, odom):
        '''
        Transforms a line from [x1 y1 x2 y2] from the world frame to the
        vehicle frame using odometry [x y ang].
        Returns [range theta]
        '''
        # Line points
        x1 = line[0]
        y1 = line[1]
        x2 = line[2]
        y2 = line[3]

        # Compute line (a, b, c) and range
        line = np.array([y1 - y2, x2 - x1, x1 * y2 - x2 * y1])
        pt = np.array([odom[0], odom[1], 1])
        dist = np.dot(pt, line) / np.linalg.norm(line[:2])

        # Compute angle
        if dist < 0:
            ang = np.arctan2(line[1], line[0])
        else:
            ang = np.arctan2(-line[1], -line[0])

        # Return in the vehicle frame
        return np.array([np.abs(dist), angle_wrap(ang - odom[2])])
Example #5
0
    def read_position(self, msg):
        '''
        read_position copy the position received in the message (msg) to the
        internal class variables self.position_x, self.position_y and 
        self.position_theta
        
        Tip: in order to convert from quaternion to euler angles give a look
        at tf.transformations
        '''

        #Find index of mobile_base
        mobile_base_idx = -1
        for i in range(len(msg.name)):
            if msg.name[i] == 'mobile_base':
                mobile_base_idx = i
                break

        #Update position_x and position_y
        self.position_x = msg.pose[mobile_base_idx].position.x
        self.position_y = msg.pose[mobile_base_idx].position.y

        #Get euler angles from quaternion
        (ang_x, ang_y, ang_z) = euler_from_quaternion([
            msg.pose[mobile_base_idx].orientation.x,
            msg.pose[mobile_base_idx].orientation.y,
            msg.pose[mobile_base_idx].orientation.z,
            msg.pose[mobile_base_idx].orientation.w
        ])
        #Update theta with euler_z
        self.position_theta = angle_wrap(ang_z)
Example #6
0
    def predict(self, odom):
        '''
        Moves particles with the given odometry.
        odom: incremental odometry [delta_x delta_y delta_yaw] in the vehicle frame
        '''
        #Check if we have moved from previous reading.
        if odom[0] == 0 and odom[1] == 0 and odom[2] == 0:
            self.moving = False
        else:
            # TODO: code here!!
            # Add Gaussian noise to odometry measures
            xy_noise = np.random.randn(2, self.num) / 100
            ang_noise = np.random.randn(self.num) / 100

            # Increment particle positions in correct frame
            cos_theta = np.cos(self.p_ang)
            sin_theta = np.sin(self.p_ang)
            dx = odom[0] * cos_theta - odom[1] * sin_theta
            dy = odom[0] * sin_theta + odom[1] * cos_theta
            self.p_xy += np.vstack((dx, dy)) + xy_noise

            # Increment angle
            self.p_ang += odom[2] + ang_noise
            self.p_ang = angle_wrap(self.p_ang)

            #Update flag for resampling
            self.moving = True
    def predict(self, odom):
        '''
        Moves particles with the given odometry.
        odom: incremental odometry [delta_x delta_y delta_yaw] in the vehicle frame
        '''
        #Check if we have moved from previous reading.
        if odom[0] == 0 and odom[1] == 0 and odom[2] == 0:
            self.moving = False
        else:
            # TODO: code here!!
            # Add Gaussian noise to odometry measures
            delta_x_noisy = odom[0] + self.odom_lin_sigma * np.random.randn(
                self.num)
            delta_y_noisy = odom[1] + self.odom_lin_sigma * np.random.randn(
                self.num)
            delta_yaw_noisy = odom[2] + self.odom_ang_sigma * np.random.randn(
                self.num)

            # Transform from vehicle frame to world frame
            delta_x_noisy_w = delta_x_noisy * np.cos(
                -self.p_ang) + delta_y_noisy * np.sin(-self.p_ang)
            delta_y_noisy_w = -delta_x_noisy * np.sin(
                -self.p_ang) + delta_y_noisy * np.cos(-self.p_ang)

            # Increment particle positions in correct frame
            self.p_xy += np.vstack((delta_x_noisy_w, delta_y_noisy_w))

            # Increment angle
            self.p_ang += delta_yaw_noisy
            self.p_ang = angle_wrap(self.p_ang)

            #Update flag for resampling
            self.moving = True
Example #8
0
    def predict(self, odom):
        '''
        Moves particles with the given odometry.
        odom: incremental odometry [delta_x delta_y delta_yaw] in the vehicle frame
        '''
        #Check if we have moved from previous reading.
        if odom[0] == 0 and odom[1] == 0 and odom[2] == 0:
            self.moving = False
        else:
            for i in range(self.num):
                self.p_xy[
                    0, i] += (odom[0] + np.random.randn() *
                              self.odom_lin_sigma) * np.cos(self.p_ang[i]) - (
                                  odom[1] + np.random.randn() *
                                  self.odom_lin_sigma) * np.sin(self.p_ang[i])
                self.p_xy[
                    1, i] += (odom[0] + np.random.randn() *
                              self.odom_lin_sigma) * np.sin(self.p_ang[i]) + (
                                  odom[1] + np.random.randn() *
                                  self.odom_lin_sigma) * np.cos(self.p_ang[i])

            self.p_ang += odom[2] * np.ones(self.num) + np.random.randn(
                self.num) * self.odom_ang_sigma
            self.p_ang = angle_wrap(self.p_ang)

            #Update flag for resampling
            self.moving = True
Example #9
0
    def predict(self, odom):
        '''
        Moves particles with the given odometry.
        odom: incremental odometry [delta_x delta_y delta_yaw] in the vehicle frame
        '''
        #Check if we have moved from previous reading.
        if odom[0] == 0 and odom[1] == 0 and odom[2] == 0:
            self.moving = False
        else:
            # TODO: code here;!!
            # Add Gaussian noise to odometry measures
            odomx = odom[0] * np.cos(self.p_ang) - odom[1] * np.sin(self.p_ang)
            odomy = odom[0] * np.sin(self.p_ang) + odom[1] * np.cos(self.p_ang)

            odomx += self.odom_lin_sigma * np.random.randn(self.p_xy.shape[1])
            odomy += self.odom_lin_sigma * np.random.randn(self.p_xy.shape[1])
            odomt = odom[2] + self.odom_ang_sigma * np.random.randn(
                self.p_ang.shape[0])

            # Increment particle positions in correct frame
            self.p_xy[0] += odomx
            self.p_xy[1] += odomy

            # Increment angle
            self.p_ang += odomt
            self.p_ang = angle_wrap(self.p_ang)

            #Update flag for resampling
            self.moving = True
Example #10
0
 def dist_to_goal_ang(self):
     '''
     dist_to_goal_ang computes the orientation distance to the active
     goal
     '''
     return np.abs(
         angle_wrap(self.theta[self.active_goal] - self.position_theta))
Example #11
0
    def compute_velocity(self):
        '''
        compute_velocity computes the velocity which will be published.
        
        TODO implement!
        '''
        dx = self.x[
            self.
            active_goal] - self.position_x  # distance in x between turtle and target
        dy = self.y[
            self.
            active_goal] - self.position_y  # distance in y between turtle and target
        d = np.linalg.norm([dx,
                            dy])  # total distance between turtle and target

        if d > self.goal_th_xy / 2:  # Using threshold/2 to avoid entering and leaving goal radius
            theta_target = np.arctan2(dy, dx)  # angle to target
            dt = angle_wrap(theta_target -
                            self.position_theta)  # angle difference

            # For derivative controller
            d_deriv = d - self.d_prev
            self.d_prev = d
            dt_deriv = dt - self.dt_prev
            self.dt_prev = dt

            # Calculate control signals
            linv = (self.kp_v * d + self.kd_v * d_deriv) * np.cos(dt / 2)
            self.vmsg.linear.x = np.min([linv / np.sqrt(np.abs(linv)), 0.2])
            angv = self.kp_w * dt + self.kd_w * dt_deriv
            self.vmsg.angular.z = angv / np.sqrt(np.abs(angv))

        elif not self.has_arrived_ang():
            dt = angle_wrap(self.theta[self.active_goal] -
                            self.position_theta)  # angle difference

            # For derivative controller
            dt_deriv = dt - self.dt_prev
            self.dt_prev = dt

            # Calculate control signals
            self.vmsg.linear.x = 0
            angv = self.kp_w * dt + self.kd_w * dt_deriv
            self.vmsg.angular.z = angv / np.sqrt(np.abs(angv))
Example #12
0
    def weight(self, lines):
        '''
        Look for the lines seen from the robot and compare them to the given map.
        Lines expressed as [x1 y1 x2 y2].
        '''
        # TODO: code here!!
        # Constant values for all weightings
        val_rng = 1.0 / (self.meas_rng_noise * np.sqrt(2 * np.pi))
        val_ang = 1.0 / (self.meas_ang_noise * np.sqrt(2 * np.pi))

        tunung_factor = 15

        # Loop over particles
        for i in range(self.num):
            map_polar = np.zeros((self.map.shape[0],2))

            # Transform map lines to local frame and to [range theta]
            for j in range(self.map.shape[0]):
                map_polar[j,:] = get_polar_line(self.map[j,:],[self.p_xy[0,i],self.p_xy[1,i],self.p_ang[i]])

            # Transform measured lines to [range theta] and weight them
            for j in range(lines.shape[0]):
                if ( (lines[j,0]-lines[j,2])**2 + (lines[j,1]-lines[j,3])**2 ) > 0:
                    local_map_polar = get_polar_line(lines[j,:])
                    w = 0
                    index = -1
                    # Weight them
                    for k in range(map_polar.shape[0]):
                        w_r = np.exp(-(map_polar[k,0]-local_map_polar[0])**2/(tunung_factor*2*self.odom_lin_sigma**2)) * val_rng
                        w_a = np.exp(-angle_wrap(map_polar[k,1]-local_map_polar[1])**2/(tunung_factor*2*self.odom_ang_sigma**2)) * val_ang
                        # w = np.maximum(w,w_r*w_a)
                        if w_r*w_a > w:
                            w = w_r * w_a
                            index = k

                    # self.p_wei[i] *= w
                 
                    # OPTIONAL question
                    # make sure segments correspond, if not put weight to zero
                    len_line_map = np.linalg.norm([(self.map[index,0]-self.map[index,2]), (self.map[index,1]-self.map[index,3])])
                    len_line_odom = np.linalg.norm([(lines[j,0]-lines[j,2]), (lines[j,1]-lines[j,3])])

                    # Check if measured line is larger than best fitting map line
                    # and set p_wei to 0 if it is (best match is invalid)
                    if len_line_odom > 1.1*len_line_map: # 10% error margin
                        # not 0 to avoid problems with turning all values to 0
                        self.p_wei[i] *= 0.00000000000000001
                    else:
                        self.p_wei[i] *= w
                

            
        # Normalize weights
        self.p_wei /= np.sum(self.p_wei)
        # TODO: Compute efficient number
        self.n_eff = 1.0/np.sum(self.p_wei**2)
Example #13
0
    def compute_velocity(self):
        '''
        compute_velocity computes the velocity which will be published.

        '''
        self.vmsg.angular.z = math.atan2(
            self.y[self.active_goal] - self.position_y,
            self.x[self.active_goal] - self.position_x) - self.position_theta
        self.vmsg.angular.z = angle_wrap(self.vmsg.angular.z)
        if (abs(self.vmsg.angular.z) > math.pi /
                12):  # in order not to enter an infinte loop of rotations
            self.vmsg.linear.x = 0.3
        else:
            self.vmsg.linear.x = 0.5 * math.sqrt(
                pow(self.position_x - self.x[self.active_goal], 2) +
                pow(self.position_y - self.y[self.active_goal], 2))
        if (self.has_arrived_xy()):
            self.vmsg.linear.x = 0
            self.vmsg.angular.z = angle_wrap(self.theta[self.active_goal] -
                                             self.position_theta)
Example #14
0
    def tfPolarLine(self, tf, line):
        '''
        Transforms a polar line in the robot frame to a polar line in the
        world frame
        '''
        # Decompose transformation
        x_x = tf[0]
        x_y = tf[1]
        x_ang = tf[2]

        # Compute the new phi
        phi = angle_wrap(line[1] + x_ang)

        # Auxiliar computations
        sqrt2 = x_x**2 + x_y**2
        sqrt = np.sqrt(sqrt2)
        atan2 = np.arctan2(x_y, x_x)
        sin = np.sin(atan2 - phi)
        cos = np.cos(atan2 - phi)

        # Compute the new rho
        rho = line[0] + sqrt * cos
        if rho < 0:
            rho = -rho
            phi = angle_wrap(phi + pi)

        # Allocate jacobians
        H_tf = np.zeros((2, 3))
        H_line = np.eye(2)

        a = np.cos(phi + x_ang)
        b = np.sin(phi + x_ang)
        c = -x_x * np.sin(phi + x_ang) + x_y * np.cos(phi + x_ang)

        # TODO: Evaluate jacobian respect to transformation
        H_tf = np.mat([[a, b, c], [0, 0, 1]])

        # TODO: Evaluate jacobian respect to line
        H_line = np.mat([[1, c], [0, 1]])

        return np.array([rho, phi]), H_tf, H_line
Example #15
0
    def odom_callback(self, msg):
        '''
        Publishes a tf based on the odometry of the robot and calculates the incremental odometry as seen from the vehicle frame.
        '''
        # Lock thread
        self.lock.acquire()

        # Save time
        self.time = msg.header.stamp

        # Translation
        trans = (msg.pose.pose.position.x, msg.pose.pose.position.y,
                 msg.pose.pose.position.z)

        # Rotation
        rot = (msg.pose.pose.orientation.x, msg.pose.pose.orientation.y,
               msg.pose.pose.orientation.z, msg.pose.pose.orientation.w)

        # Publish transform
        self.tfBroad.sendTransform(translation=trans,
                                   rotation=rot,
                                   time=msg.header.stamp,
                                   child='/base_footprint',
                                   parent='/world')

        # Incremental odometry
        if self.last_odom is not None:

            # Increment computation
            delta_x = msg.pose.pose.position.x - self.last_odom.pose.pose.position.x
            delta_y = msg.pose.pose.position.y - self.last_odom.pose.pose.position.y
            yaw = yaw_from_quaternion(msg.pose.pose.orientation)
            lyaw = yaw_from_quaternion(self.last_odom.pose.pose.orientation)

            #            self.uk = np.array([delta_x,delta_y, angle_wrap(yaw)])
            # Odometry seen from vehicle frame
            self.uk = np.array([
                delta_x * np.cos(lyaw) + delta_y * np.sin(lyaw),
                -delta_x * np.sin(lyaw) + delta_y * np.cos(lyaw),
                angle_wrap(yaw - lyaw)
            ])

            # Flag available
            self.cur_odom = msg
            self.new_odom = True

        # Save initial odometry for increment
        else:
            self.last_odom = msg

        # Unlock thread
        self.lock.release()
Example #16
0
 def odom_callback(self, msg):
     '''
     Publishes a tf based on the odometry of the robot and calculates the incremental odometry as seen from the vehicle frame.
     '''
     # Lock thread
     self.lock.acquire()
     
     # Save time
     self.time = msg.header.stamp
     
     # Translation
     trans = (msg.pose.pose.position.x, 
              msg.pose.pose.position.y, 
              msg.pose.pose.position.z)
     
     # Rotation
     rot = (msg.pose.pose.orientation.x,
            msg.pose.pose.orientation.y,
            msg.pose.pose.orientation.z,
            msg.pose.pose.orientation.w)
     
     # Publish transform
     self.tfBroad.sendTransform(translation = trans,
                                rotation = rot, 
                                time = msg.header.stamp,
                                child = '/base_footprint',
                                parent = '/world')
                                
     # Incremental odometry
     if self.last_odom is not None:
         
         # Increment computation
         delta_x = msg.pose.pose.position.x - self.last_odom.pose.pose.position.x
         delta_y = msg.pose.pose.position.y - self.last_odom.pose.pose.position.y
         yaw = yaw_from_quaternion(msg.pose.pose.orientation)
         lyaw = yaw_from_quaternion(self.last_odom.pose.pose.orientation)
         # Odometry seen from vehicle frame
         self.uk = np.array([delta_x * np.cos(lyaw) + delta_y * np.sin(lyaw),
                            -delta_x * np.sin(lyaw) + delta_y * np.cos(lyaw),
                             angle_wrap(yaw - lyaw)])
                             
         # Flag available
         self.cur_odom = msg
         self.new_odom = True
     
     # Save initial odometry for increment
     else:
         self.last_odom = msg
     
     # Unlock thread
     self.lock.release()
Example #17
0
    def odom_callback(self, msg):
        '''
        Calculates the incremental odometry as seen from the vehicle frame.
        '''
        # Check if first data
        if self.last_odom is None:

            # Save
            self.last_odom = msg

            # Translation
            self.trans = (msg.pose.pose.position.x, msg.pose.pose.position.y,
                          msg.pose.pose.position.z)

            # Rotation
            self.rot = (msg.pose.pose.orientation.x,
                        msg.pose.pose.orientation.y,
                        msg.pose.pose.orientation.z,
                        msg.pose.pose.orientation.w)

        # Calculate and publish increment
        else:

            # Increment computation
            delta_x = msg.pose.pose.position.x - self.last_odom.pose.pose.position.x
            delta_y = msg.pose.pose.position.y - self.last_odom.pose.pose.position.y
            yaw = yaw_from_quaternion(msg.pose.pose.orientation)
            lyaw = yaw_from_quaternion(self.last_odom.pose.pose.orientation)

            # Publish
            odom = IncrementalOdometry2D()
            odom.header.stamp = msg.header.stamp
            odom.header.frame_id = '/estimated_position'
            odom.delta_x = +delta_x * np.cos(lyaw) + delta_y * np.sin(lyaw)
            odom.delta_y = -delta_x * np.sin(lyaw) + delta_y * np.cos(lyaw)
            odom.delta_a = angle_wrap(yaw - lyaw)
            self.pub_odom.publish(odom)

            # For next loop
            self.last_odom = msg

        # World transform
        self.tf_broad.sendTransform(translation=self.trans,
                                    rotation=self.rot,
                                    time=msg.header.stamp,
                                    parent='/odom',
                                    child='/world')
Example #18
0
def merge(lines, dist_thres, ang_thres):
    '''
    Merge similar lines according to the given thresholds.
    '''
    # No data received
    if lines is None:  # or lines.shape[1] < 4:
        return np.array([])

    # if lines.shape[0]<2:
    #     return lines

    # Check and merge similar consecutive lines
    i = 0
    while i < lines.shape[0] - 1:

        # Line angles
        ang1 = np.arctan2(lines[i][0] - lines[i][2], lines[i][1] - lines[i][3])
        ang2 = np.arctan2(lines[i + 1][0] - lines[i + 1][2],
                          lines[i + 1][1] - lines[i + 1][3])

        # Below thresholds?
        angdiff = np.abs(angle_wrap(ang1 - ang2))
        disdiff = np.linalg.norm([(lines[i][2] - lines[i + 1][0]),
                                  (lines[i][3] - lines[i + 1][1])])
        if angdiff < ang_thres and disdiff < dist_thres:

            # Joined line
            lines[i, :] = np.array(
                [[lines[i][0], lines[i][1], lines[i + 1][2], lines[i + 1][3]]])

            # Delete unnecessary line
            lines = np.delete(lines, i + 1, 0)

        # Nothing to merge
        else:
            i += 1

    return lines
Example #19
0
    def compute_velocity(self):
        '''
        compute_velocity computes the velocity which will be published.
       
        '''
        self.vmsg=Twist()
        #Compute the angle from turtlebot to the goal
        angle_to_goal = math.atan2(self.y[self.active_goal]-self.position_y,self.x[self.active_goal]-self.position_x)
        #Compute the angle difference between turtlebot orientation and the angle from turtlebot to the goal
        ang_dif = angle_wrap(self.position_theta[2] - angle_to_goal)

        #If turtlebot has not reach the goal position
        if not self.has_arrived_xy():
            self.vmsg.angular.z = -0.2*ang_dif
            dis = math.sqrt(math.pow(self.y[self.active_goal] - self.position_y,2) + math.pow(self.x[self.active_goal] - self.position_x,2))
            if dis > 0.5:
                self.vmsg.linear.x =  0.1*dis
            else:
                self.vmsg.linear.x = 0.1
            print ("distance to current goal:%f" %dis)
        else:
                self.vmsg.angular.z = 0.6 # When arriving the position, rotate in order to meet the angle restriction     
                print ("rotating")
Example #20
0
    def predict(self, uk):
        """
        Implement the prediction equations of the EKF.

        Saves the new robot state and uncertainty.

        Input:
          uk : numpy array [shape (3,) with (dx, dy, dtheta)]
        """
        # TODO: Program this function
        ################################################################
        # Check website for numpy help:
        #         http://wiki.scipy.org/NumPy_for_Matlab_Users
        # 1. Update self.xk and self.Pk using uk and self.Qk
        #        can use comp() from funtions.py

        # Calculate new state vector components
        c = np.cos(self.xk[2])
        s = np.sin(self.xk[2])
        xr = self.xk[0] + uk[0] * c - uk[1] * s
        yr = self.xk[1] + uk[0] * s + uk[1] * c
        theta = funcs.angle_wrap(self.xk[2] + uk[2])
        self.xk = np.array([xr, yr, theta])  # Should this be here?

        # Calculate the current Ak matrix
        A13 = -uk[0] * s - uk[1] * c
        A23 = uk[0] * c - uk[1] * s
        Ak = np.array([[1, 0, A13], [0, 1, A23], [0, 0, 1]])
        # print(Ak)

        # Calculate Wk
        Wk = np.array([[c, -s, 0], [s, c, 0], [0, 0, 1]])

        # Calculate Pk'
        self.Pk = np.dot(Ak, np.dot(self.Pk, Ak.T)) + np.dot(
            Wk, np.dot(self.Qk, Wk.T))
Example #21
0
def merge(lines, dist_thres, ang_thres):
    '''
    Merge similar lines according to the given thresholds.
    '''
    # No data received
    if lines is None:
        return np.array([])

    # Check and merge similar lines
    i = 0
    while i < lines.shape[0] - 1:

        # Line angles
        ang1 = np.arctan2(lines[i, 3] - lines[i, 1], lines[i, 2] - lines[i, 0])
        ang2 = np.arctan2(lines[i + 1, 3] - lines[i + 1, 1],
                          lines[i + 1, 2] - lines[i + 1, 0])

        # Below thresholds
        ang = abs(angle_wrap(ang1 - ang2))
        dis = np.sqrt( (lines[i,2] - lines[i+1,0])**2 + \
                       (lines[i,3] - lines[i+1,1])**2 )
        if ang < ang_thres and dis < dist_thres:

            # Joined line
            lines[i, :] = [
                lines[i, 0], lines[i, 1], lines[i + 1, 2], lines[i + 1, 3]
            ]

            # Delete unnecessary line
            lines = np.delete(lines, i + 1, axis=0)

        # Nothing to merge
        else:
            i += 1

    return lines
Example #22
0
    def compute_velocity(self):
        '''
        compute_velocity computes the velocity which will be published.
        
        TODO implement!
        '''
        self.vmsg = Twist()

        if self.dist_to_goal_xy() > self.goal_th_xy:
            # Compute delta_angle
            angle_to_goal = math.atan2(
                self.y[self.active_goal] - self.position_y,
                self.x[self.active_goal] - self.position_x)
            delta_angle_to_goal = angle_wrap(self.position_theta -
                                             angle_to_goal)

            # Assign angular velocity as 2*delta_angle
            self.vmsg.angular.z = -delta_angle_to_goal

            # If delta_angle small (facing obstacle, we can move)
            if np.abs(delta_angle_to_goal) < 0.05:
                self.vmsg.linear.x = min(0.5, 2 * self.dist_to_goal_xy())
        else:
            self.vmsg.angular.z = -self.dist_to_goal_ang()
Example #23
0
 def dist_to_goal_ang(self):
     '''
     dist_to_goal_ang computes the orientation distance to the active
     goal
     '''
     return np.abs(angle_wrap(self.theta[self.active_goal]-self.position_theta[2]))
Example #24
0
File: node.py Project: raabid236/PR
    def odom_callback(self, msg):
        '''
        Publishes a tf based on the odometry of the robot and calculates the incremental odometry as seen from the vehicle frame.
        '''
        # Save time
        self.time = msg.header.stamp

        # Translation
        trans = (msg.pose.pose.position.x, msg.pose.pose.position.y,
                 msg.pose.pose.position.z)

        # Rotation
        rot = (msg.pose.pose.orientation.x, msg.pose.pose.orientation.y,
               msg.pose.pose.orientation.z, msg.pose.pose.orientation.w)

        # Publish transform
        self.tfBroad.sendTransform(translation=self.robot2sensor,
                                   rotation=(0, 0, 0, 1),
                                   time=msg.header.stamp,
                                   child='/sensor',
                                   parent='/robot')
        self.tfBroad.sendTransform(translation=self.robot2sensor,
                                   rotation=(0, 0, 0, 1),
                                   time=msg.header.stamp,
                                   child='/camera_depth_frame',
                                   parent='/base_footprint')
        self.tfBroad.sendTransform(translation=trans,
                                   rotation=rot,
                                   time=msg.header.stamp,
                                   child='/base_footprint',
                                   parent='/odom')
        self.tfBroad.sendTransform(
            translation=(self.x0[0], self.x0[1], 0),
            rotation=tf.transformations.quaternion_from_euler(
                0, 0, self.x0[2]),
            time=msg.header.stamp,
            child='/odom',
            parent='/world')

        # Incremental odometry
        if self.last_odom is not None and not self.new_odom:

            # Increment computation
            delta_x = msg.pose.pose.position.x - self.last_odom.pose.pose.position.x
            delta_y = msg.pose.pose.position.y - self.last_odom.pose.pose.position.y
            yaw = yaw_from_quaternion(msg.pose.pose.orientation)
            lyaw = yaw_from_quaternion(self.last_odom.pose.pose.orientation)

            # Odometry seen from vehicle frame
            self.uk = np.array([
                delta_x * np.cos(lyaw) + delta_y * np.sin(lyaw),
                -delta_x * np.sin(lyaw) + delta_y * np.cos(lyaw),
                angle_wrap(yaw - lyaw)
            ])

            # Flag
            self.new_odom = True

            # For next loop
            self.last_odom = msg
        if self.last_odom is None:
            self.last_odom = msg
Example #25
0
    def cbk_odom(self, msg):
        """Publish tf and calculate incremental odometry."""
        # Save time
        self.odomtime = msg.header.stamp
        self.odom = msg

        # Translation
        trans = (msg.pose.pose.position.x,
                 msg.pose.pose.position.y,
                 msg.pose.pose.position.z)

        # Rotation
        rot = (msg.pose.pose.orientation.x,
               msg.pose.pose.orientation.y,
               msg.pose.pose.orientation.z,
               msg.pose.pose.orientation.w)

        # Publish transform
        self.tfBroad.sendTransform(translation=self.robot2sensor,
                                   rotation=(0, 0, 0, 1),
                                   time=msg.header.stamp,
                                   child='sensor',
                                   parent='robot')
        self.tfBroad.sendTransform(translation=self.robot2sensor,
                                   rotation=(0, 0, 0, 1),
                                   time=msg.header.stamp,
                                   child='camera_depth_frame',
                                   parent='base_footprint')
        self.tfBroad.sendTransform(translation=trans,
                                   rotation=rot,
                                   time=msg.header.stamp,
                                   child='base_footprint',
                                   parent='world')
        self.tfBroad.sendTransform(translation=(0, 0, 0),
                                   rotation=(0, 0, 0, 1),
                                   time=msg.header.stamp,
                                   child='odom',
                                   parent='world')

        # Incremental odometry
        if self.last_odom is not None:

            # Increment computation
            delta_x = msg.pose.pose.position.x - \
                self.last_odom.pose.pose.position.x
            delta_y = msg.pose.pose.position.y - \
                self.last_odom.pose.pose.position.y
            yaw = funcs.yaw_from_quaternion(msg.pose.pose.orientation)
            lyaw = funcs.yaw_from_quaternion(
                self.last_odom.pose.pose.orientation)

            # Odometry seen from vehicle frame
            self.uk = np.array([delta_x*np.cos(lyaw) + delta_y*np.sin(lyaw),
                               -delta_x*np.sin(lyaw) + delta_y*np.cos(lyaw),
                                funcs.angle_wrap(yaw - lyaw)])

            # Flag available
            self.new_odom = True

        # Save initial odometry for increment
        else:
            self.last_odom = msg
Example #26
0
 def odom_callback(self, msg):
     '''
     Publishes a tf based on the odometry of the robot and calculates the incremental odometry as seen from the vehicle frame.
     '''
     # Save time
     self.time = msg.header.stamp
     
     # Translation
     trans = (msg.pose.pose.position.x, 
              msg.pose.pose.position.y, 
              msg.pose.pose.position.z)
     
     # Rotation
     rot = (msg.pose.pose.orientation.x,
            msg.pose.pose.orientation.y,
            msg.pose.pose.orientation.z,
            msg.pose.pose.orientation.w)
            
     # Publish transform
     self.tfBroad.sendTransform(translation = self.robot2sensor,
                                rotation = (0, 0, 0, 1), 
                                time = msg.header.stamp,
                                child = '/sensor',
                                parent = '/robot')
     self.tfBroad.sendTransform(translation = self.robot2sensor,
                                rotation = (0, 0, 0, 1), 
                                time = msg.header.stamp,
                                child = '/camera_depth_frame',
                                parent = '/base_footprint')
     self.tfBroad.sendTransform(translation = trans,
                                rotation = rot, 
                                time = msg.header.stamp,
                                child = '/base_footprint',
                                parent = '/odom')
     self.tfBroad.sendTransform(translation = (self.x0[0],self.x0[1],0),
                                rotation = tf.transformations.quaternion_from_euler(0,0,self.x0[2]), 
                                time = msg.header.stamp,
                                child = '/odom',
                                parent = '/world')
                                
     # Incremental odometry
     if self.last_odom is not None and not self.new_odom:
         
         # Increment computation
         delta_x = msg.pose.pose.position.x - self.last_odom.pose.pose.position.x
         delta_y = msg.pose.pose.position.y - self.last_odom.pose.pose.position.y
         yaw = yaw_from_quaternion(msg.pose.pose.orientation)
         lyaw = yaw_from_quaternion(self.last_odom.pose.pose.orientation)
         
         # Odometry seen from vehicle frame
         self.uk = np.array([delta_x * np.cos(lyaw) + delta_y * np.sin(lyaw),
                           - delta_x * np.sin(lyaw) + delta_y * np.cos(lyaw),
                             angle_wrap(yaw - lyaw)])
         
         # Flag
         self.new_odom = True
     
         # For next loop
         self.last_odom = msg
     if self.last_odom is None:
         self.last_odom = msg