示例#1
0
文件: fastSLAM.py 项目: lackley/slam
def update_landmark(particle, landmark, measurement):
    """ update the mean and covariance of a landmark

    uses the Extended Kalman Filter (EKF) to update the existing
    landmark's mean (x, y) and covariance according to the new measurement

    args: 
        particle: the particle to update
        landmark: the old_landmark to update, ((x, y), covariance)
        measurement: the new measurement to `landmark`, 
            in the form of (distance, bearing)
        
    returns: 

        None
    """
    robot_x, robot_y, robot_theta = particle.robot_position
    distance, bearing = measurement

    landmark_pos = landmark[0]
    oldCovariance = landmark[1]
    landmark_x = landmark_pos[0]
    landmark_y = landmark_pos[1]
    x_dif = landmark_x - robot_x
    y_dif = landmark_y - robot_y
    # calculate the predicted measurement using landmark and robot posistions
    m_r = math.sqrt(x_dif * x_dif + y_dif * y_dif)
    m_phi = math.atan2(y_dif, x_dif)
    predicted_measurement = (m_r, m_phi)

    jacobian = utils.calculate_jacobian((robot_x, robot_y), (landmark_x, landmark_y))
    Q = utils.compute_measurement_covariance(jacobian, oldCovariance, sigma_observation)
    K = utils.compute_kalman_gain(jacobian, oldCovariance, Q)

    new_landmark = utils.compute_new_landmark(measurement, predicted_measurement, K, landmark_pos)

    new_cov = utils.compute_new_covariance(K, jacobian, oldCovariance)

    # remove the landmark from the particle's list of landmarks then change landmark and add it back
    particle.landmarks.remove(landmark)
    landmark = (new_landmark, new_cov)
    particle.landmarks.append(landmark)
示例#2
0
文件: fastSLAM.py 项目: lackley/slam
def update_map(particle, measurements):

    """ associates new measurements with old landmarks

    given a list of measurements to landmarks, determine whether a 
    landmark is a new landmark or a re-observed landmark. If it is
    a new one, call add_landmark(). If it is an existing landmark, 
    call update_landmark(), and update the weight. 
    
    
    traditionally done maximizing the likelihood of observation at
    that particular correspondance

    args: 
        particle: the particle to perform the data association on
        measurements: a list of (distance, bearing) where a landmark was observed

    returns: 
        index pairs? none if not matched

    """
    # retrieve the newly sampled robot pos
    robot_x, robot_y, robot_theta = particle.robot_position

    # some measurements were [] so need to loop through and only work with the valid ones
    valid_measurements = []
    for m in measurements:
        if len(m) > 0:
            valid_measurements.append(m)

    weight_as_log_sum = 0
    # loop through non-empty measurements to find the best landmark and best probability
    for measurement in valid_measurements:
        best_landmark = None
        best_prob = 0
        for landmark in particle.landmarks:
            landmark_pos = landmark[0]
            oldCovariance = landmark[1]
            landmark_x, landmark_y = landmark_pos
            x_dif = landmark_x - robot_x
            y_dif = landmark_y - robot_y
            m_r = math.sqrt(x_dif * x_dif + y_dif * y_dif)
            m_phi = math.atan2(y_dif, x_dif) - robot_theta
            # calculate where the landmark should be
            predicted_measurement = (m_r, m_phi)

            jacobian = utils.calculate_jacobian((robot_x, robot_y), (landmark_x, landmark_y))
            Q = utils.compute_measurement_covariance(jacobian, oldCovariance, sigma_observation)
            likelihood = utils.calculate_likelihood_probability(measurement, predicted_measurement, Q)
            if likelihood > best_prob:
                best_prob = likelihood
                best_landmark = landmark
        # if the landmark is likely to be a new landmark then add it
        if best_prob < prob_threshold:
            add_landmark(particle, measurement)
            weight_as_log_sum += np.log(prob_threshold)
        # otherwise if it is likely the same as an observed landmark, update it
        else:
            update_landmark(particle, best_landmark, measurement)
            weight_as_log_sum += np.log(best_prob)

    # update the particles weight, will take e^weight later
    particle.weight = weight_as_log_sum