コード例 #1
0
ファイル: fastSLAM.py プロジェクト: lackley/slam
def add_landmark(particle, measurement):
    """ adds newly observed landmark to particle

    if a landmark has not been matched to an existing landmark, 
    add it to the particle's list with the appropriate 
    mean (x, y) and covariance (sigma)

    args: 
        particle: the particle to add the new landmark to
        measurement: the measurement to the new landmark (distance, bearing) to add to the particle

    returns: 
        None
    """
    robot_x, robot_y, robot_theta = particle.robot_position
    distance, bearing = measurement

    # use trig to find the landmark's possistion
    landmark_x = distance * math.cos(bearing + robot_theta) + robot_x
    landmark_y = distance * math.sin(bearing + robot_theta) + robot_y

    jacobian = utils.calculate_jacobian((robot_x, robot_y), (landmark_x, landmark_y))
    init_cov = utils.compute_initial_covariance(jacobian, sigma_observation)

    # initialize particle.landmarks if necessary
    # add the posistion and covariacne
    if len(particle.landmarks) == 0:
        particle.landmarks = [((landmark_x, landmark_y), init_cov)]
    else:
        particle.landmarks.append(((landmark_x, landmark_y), init_cov))

    return None
コード例 #2
0
ファイル: fastSLAM.py プロジェクト: lackley/slam
def update_landmark(particle, landmark, measurement):
    """ update the mean and covariance of a landmark

    uses the Extended Kalman Filter (EKF) to update the existing
    landmark's mean (x, y) and covariance according to the new measurement

    args: 
        particle: the particle to update
        landmark: the old_landmark to update, ((x, y), covariance)
        measurement: the new measurement to `landmark`, 
            in the form of (distance, bearing)
        
    returns: 

        None
    """
    robot_x, robot_y, robot_theta = particle.robot_position
    distance, bearing = measurement

    landmark_pos = landmark[0]
    oldCovariance = landmark[1]
    landmark_x = landmark_pos[0]
    landmark_y = landmark_pos[1]
    x_dif = landmark_x - robot_x
    y_dif = landmark_y - robot_y
    # calculate the predicted measurement using landmark and robot posistions
    m_r = math.sqrt(x_dif * x_dif + y_dif * y_dif)
    m_phi = math.atan2(y_dif, x_dif)
    predicted_measurement = (m_r, m_phi)

    jacobian = utils.calculate_jacobian((robot_x, robot_y), (landmark_x, landmark_y))
    Q = utils.compute_measurement_covariance(jacobian, oldCovariance, sigma_observation)
    K = utils.compute_kalman_gain(jacobian, oldCovariance, Q)

    new_landmark = utils.compute_new_landmark(measurement, predicted_measurement, K, landmark_pos)

    new_cov = utils.compute_new_covariance(K, jacobian, oldCovariance)

    # remove the landmark from the particle's list of landmarks then change landmark and add it back
    particle.landmarks.remove(landmark)
    landmark = (new_landmark, new_cov)
    particle.landmarks.append(landmark)
コード例 #3
0
ファイル: fastSLAM.py プロジェクト: lackley/slam
def update_map(particle, measurements):

    """ associates new measurements with old landmarks

    given a list of measurements to landmarks, determine whether a 
    landmark is a new landmark or a re-observed landmark. If it is
    a new one, call add_landmark(). If it is an existing landmark, 
    call update_landmark(), and update the weight. 
    
    
    traditionally done maximizing the likelihood of observation at
    that particular correspondance

    args: 
        particle: the particle to perform the data association on
        measurements: a list of (distance, bearing) where a landmark was observed

    returns: 
        index pairs? none if not matched

    """
    # retrieve the newly sampled robot pos
    robot_x, robot_y, robot_theta = particle.robot_position

    # some measurements were [] so need to loop through and only work with the valid ones
    valid_measurements = []
    for m in measurements:
        if len(m) > 0:
            valid_measurements.append(m)

    weight_as_log_sum = 0
    # loop through non-empty measurements to find the best landmark and best probability
    for measurement in valid_measurements:
        best_landmark = None
        best_prob = 0
        for landmark in particle.landmarks:
            landmark_pos = landmark[0]
            oldCovariance = landmark[1]
            landmark_x, landmark_y = landmark_pos
            x_dif = landmark_x - robot_x
            y_dif = landmark_y - robot_y
            m_r = math.sqrt(x_dif * x_dif + y_dif * y_dif)
            m_phi = math.atan2(y_dif, x_dif) - robot_theta
            # calculate where the landmark should be
            predicted_measurement = (m_r, m_phi)

            jacobian = utils.calculate_jacobian((robot_x, robot_y), (landmark_x, landmark_y))
            Q = utils.compute_measurement_covariance(jacobian, oldCovariance, sigma_observation)
            likelihood = utils.calculate_likelihood_probability(measurement, predicted_measurement, Q)
            if likelihood > best_prob:
                best_prob = likelihood
                best_landmark = landmark
        # if the landmark is likely to be a new landmark then add it
        if best_prob < prob_threshold:
            add_landmark(particle, measurement)
            weight_as_log_sum += np.log(prob_threshold)
        # otherwise if it is likely the same as an observed landmark, update it
        else:
            update_landmark(particle, best_landmark, measurement)
            weight_as_log_sum += np.log(best_prob)

    # update the particles weight, will take e^weight later
    particle.weight = weight_as_log_sum