def draw_world(est_pose, particles, world): """Visualization. This functions draws robots position in the world.""" offset = 100; world[:] = CWHITE # Clear background to white # Find largest weight max_weight = 0 for particle in particles: max_weight = max(max_weight, particle.getWeight()) # Draw particles for particle in particles: x = int(particle.getX()) + offset y = int(particle.getY()) + offset colour = jet(particle.getWeight() / max_weight) cv2.circle(world, (x,y), 2, colour, 2) b = (int(particle.getX() + 15.0*np.cos(particle.getTheta()))+offset, int(particle.getY() - 15.0*np.sin(particle.getTheta()))+offset) cv2.line(world, (x,y), b, colour, 2) # Draw landmarks lm0 = (landmarks[0][0]+offset, landmarks[0][1]+offset) lm1 = (landmarks[1][0]+offset, landmarks[1][1]+offset) cv2.circle(world, lm0, 5, CRED, 2) cv2.circle(world, lm1, 5, CGREEN, 2) # Draw estimated robot pose a = (int(est_pose.getX())+offset, int(est_pose.getY())+offset) b = (int(est_pose.getX() + 15.0*np.cos(est_pose.getTheta()))+offset, int(est_pose.getY() - 15.0*np.sin(est_pose.getTheta()))+offset) cv2.circle(world, a, 5, CMAGENTA, 2) cv2.line(world, a, b, CMAGENTA, 2)
def sample_resample(particles, resample_num, landmark_location, measured_angle, measured_distance): print(landmark_location) # Adding uncertainty to the particles 10 # particle.add_uncertainty_von_mises(particles, 0, 0.02) particle.add_uncertainty(particles, 10, 0.08) # Calculating weights for angles particleAngles = [particle.getTheta() for particle in particles] e_l = [((landmark_location[0] - particle.getX()) , (landmark_location[1] - particle.getY())) / np.linalg.norm([(particle.getX() - landmark_location[0]), (particle.getY() - landmark_location[1])]) for particle in particles] e_theta = [np.array((np.cos(angle),np.sin(angle))) for angle in particleAngles] phi = [np.arccos(np.dot(x,y)) for x, y in zip(e_l, e_theta)] e_hat_theta = [(-np.sin(angle),np.cos(angle)) for angle in particleAngles] particle_angle_diff_temp = [np.dot(x,y) for x,y in zip(e_l,e_hat_theta)] particle_angle_diff = [np.sign(x) * y for x,y in zip(particle_angle_diff_temp, phi)] angleWeights = calcWeights(particle_angle_diff, measured_angle, 0.08) #print ("angle weights after: {}".format(angleWeights)) # Weights for distance particleLengths = [np.linalg.norm([(particle.getX() - landmark_location[0]), (particle.getY() - landmark_location[1])]) for particle in particles] # Var should be roughly 50 lengthWeights = calcWeights(particleLengths, measured_distance, 20) #print ("length weights after: {}".format(lengthWeights)) # Combining the weight types ''' normWeights = [] for i in range(resample_num): normWeights.append(angleWeights[i] * lengthWeights[i]) ''' normWeights = [x * y for x, y in zip(angleWeights, lengthWeights)] normWeights = normWeights/np.sum(normWeights) # Setting the weight for each particle for i in range(resample_num): particles[i].setWeight(normWeights[i]) # Resampling # Chooses resample_num random particles from the current particles based on the calculated weights. temp = np.random.choice(particles, resample_num, True, normWeights) retVal = [] for elem in temp: retVal.append(copy.deepcopy(elem)) return retVal
def draw_world(est_pose, particles, world): """Visualization. This functions draws robots position in the world coordinate system.""" # Fix the origin of the coordinate system offsetX = 100 offsetY = 250 # Constant needed for transforming from world coordinates to screen coordinates (flip the y-axis) ymax = world.shape[0] world[:] = CWHITE # Clear background to white # Find largest weight max_weight = 0 for particle in particles: max_weight = max(max_weight, particle.getWeight()) # Draw particles for particle in particles: x = int(particle.getX() + offsetX) y = ymax - (int(particle.getY() + offsetY)) colour = jet(particle.getWeight() / max_weight) cv2.circle(world, (x, y), 2, colour, 2) b = (int(particle.getX() + 15.0 * np.cos(particle.getTheta())) + offsetX, ymax - (int(particle.getY() + 15.0 * np.sin(particle.getTheta())) + offsetY)) cv2.line(world, (x, y), b, colour, 2) # Draw landmarks lm1 = (int(landmarks[0][0]), int(ymax - (landmarks[0][1]))) lm2 = (int(landmarks[1][0]), int(ymax - (landmarks[1][1]))) lm3 = (int(landmarks[2][0]), int(ymax - (landmarks[2][1]))) lm4 = (int(landmarks[3][0]), int(ymax - (landmarks[3][1]))) cv2.circle(world, lm1, 5, CRED, 2) cv2.circle(world, lm2, 5, CGREEN, 2) cv2.circle(world, lm3, 5, CGREEN, 2) cv2.circle(world, lm4, 5, CRED, 2) # Draw estimated robot pose a = (int(est_pose.getX()) + offsetX, ymax - (int(est_pose.getY()) + offsetY)) b = (int(est_pose.getX() + 15.0 * np.cos(est_pose.getTheta())) + offsetX, ymax - (int(est_pose.getY() + 15.0 * np.sin(est_pose.getTheta())) + offsetY)) cv2.circle(world, a, 5, CMAGENTA, 2) cv2.line(world, a, b, CMAGENTA, 2)
def draw_world(est_pose, particles, world): """Visualization. This functions draws robots position in the world.""" offset = 100; world[:] = CWHITE # Clear background to white # Find largest weight max_weight = 0 for particle in particles: max_weight = max(max_weight, particle.getWeight()) # Draw particles for particle in particles: x = int(particle.getX()) + offset y = int(particle.getY()) + offset colour = jet(particle.getWeight() / max_weight) cv2.circle(world, (x,y), 2, colour, 2) # Draw landmarks lm0 = (landmarks[0][0]+offset, landmarks[0][1]+offset) lm1 = (landmarks[1][0]+offset, landmarks[1][1]+offset) cv2.circle(world, lm0, 5, CRED, 2) cv2.circle(world, lm1, 5, CGREEN, 2) # Draw estimated robot pose a = (int(est_pose.getX())+offset, int(est_pose.getY())+offset) b = (int(est_pose.getX() + 15.0*np.cos(est_pose.getTheta()))+offset, int(est_pose.getY() + 15.0*np.sin(est_pose.getTheta()))+offset) cv2.circle(world, a, 5, CMAGENTA, 2) cv2.line(world, a, b, CMAGENTA, 2)
def where_to_go(particle, goal): """ Takes a particle (which must be our best bet for our actual position) and the place we want to be (goal = [x, y]) returns a list of the length the robot needs to drive, the degrees it needs to turn and the direction it needs to turn""" ang = vector_angle([particle.getX(), particle.getY()], goal) if ang <= 180: turn_dir = 'right' turn_deg = ang else: turn_dir = 'left' turn_deg = 180 - ang length = np.sqrt( np.power((particle.getX() - goal[0]), 2) + np.power((particle.getY() - goal[1]), 2)) return [length, turn_dir, turn_deg]
def where_to_go(particle, goal): """ Takes a particle (which must be our best bet for our actual position) and the place we want to be (goal = [x, y]) returns a list of the length the robot needs to drive, the degrees it needs to turn and the direction it needs to turn""" ang = vector_angle([particle.getX(), particle.getY()], goal) if ang <= 180: turn_dir = 'right' turn_deg = ang else: turn_dir = 'left' turn_deg = 180 - ang length = np.sqrt( np.power((particle.getX() - goal[0]), 2) + np.power( (particle.getY() - goal[1]), 2)) print 'REPORT FROM: where_to_go' print 'est_particle: ' + str([particle.getX(), particle.getY()]) print 'goal: ' + str(goal) print 'estimated course: dist=' + str(length) + 'dir=' + turn_dir +\ 'turn degree=' + str(turn_deg) return [length, turn_dir, turn_deg]
def draw_world(est_pose, particles, world): """Visualization. This functions draws robots position in the world.""" offset = 100 world[:] = CWHITE # Clear background to white # Draw particles for particle in particles: x = int(particle.getX()) + offset y = int(particle.getY()) + offset cv2.circle(world, (x, y), 2, CBLUE, 2) b = (int(particle.getX() + 15.0 * np.cos(particle.getTheta())) + offset, int(particle.getY() - 15.0 * np.sin(particle.getTheta())) + offset) cv2.line(world, (x, y), b, CBLUE, 2) # Draw landmarks lm0 = (landmarks[0][0] + offset, landmarks[0][1] + offset) lm1 = (landmarks[1][0] + offset, landmarks[1][1] + offset) lm2 = (landmarks[2][0] + offset, landmarks[2][1] + offset) lm3 = (landmarks[3][0] + offset, landmarks[3][1] + offset) cv2.circle(world, lm0, 5, CRED, 2) cv2.circle(world, lm1, 5, CGREEN, 2) cv2.circle(world, lm2, 5, CGREEN, 2) cv2.circle(world, lm3, 5, CRED, 2) # Draw estimated robot pose a = (int(est_pose.getX()) + offset, int(est_pose.getY()) + offset) b = (int(est_pose.getX() + 15.0 * np.cos(est_pose.getTheta())) + offset, int(est_pose.getY() - 15.0 * np.sin(est_pose.getTheta())) + offset) cv2.circle(world, a, 5, CMAGENTA, 2) cv2.line(world, a, b, CMAGENTA, 2)
angleWeight = measured_angle/particle.getTheta() else: posWeight = calculated_distance/measured_distance angleWeight = particle.getTheta()/measured_angle particle.setWeight() = posWeight*angleWeight """ for particle in particles: posWeight = 0 angleWeight = 0 if(objectType == 'horizontal'): calculated_distance = (math.sqrt(2**particle.getX() + 2**particle.getY())) PosWeight = (1/math.sqrt(2*math.pi*sigma_d**2))*math.exp((-measured_distance-calculated_distance)/(2*sigma_distance**2))) angleWeight = (1/math.sqrt(2*math.pi*sigma_d**2))*math.exp((-measured_angle-particle.getTheta()/(2*sigma_theta**2))) elif(objectType == 'vertical'): calculated_distance = (math.sqrt(2**(particle.getX-300) + 2**particle.getY)) PosWeight = (1/math.sqrt(2*math.pi*sigma_d**2))*math.exp((-measured_distance-calculated_distance)/(2*sigma_distance**2))) angleWeight = (1/math.sqrt(2*math.pi*sigma_d**2))*math.exp((-measured_angle-particle.getTheta()/(2*sigma_theta**2))) # Resampling # XXX: You do this # Draw detected pattern cam.draw_object(colour) else: # No observation - reset weights to uniform distribution for p in particles:
def particle_landmark_vector(mark, particle): (mark_x, mark_y) = LANDMARK_COORDINATES[mark] x = mark_x - particle.getX() y = -1.0 * (mark_y - particle.getY()) return [x, y]
def particle_landmark_vector(mark, particle): (mark_x, mark_y) = landmarks[mark] x = mark_x - particle.getX() y = -1.0 * (mark_y - particle.getY()) return [x, y]
if delta.getDistance() < 75: # this may have to be less print "We've visited ", target.getIdentifier() goals.pop() target = None else: # TODO: turn delta.getTheta() # TODO: drive forward delta.getDistance() # Read odometry, see how far we have moved, and update particles. # Or use motor controls to update particles for particle in particles: s = sin(delta.getTheta()) c = cos(delta.getTheta()) x = particle.getX() y = particle.getY() particle.setX((x * c - y * s) - delta.getX()) particle.setY((x * s + y * c) - delta.getY()) # Fetch next frame colour, distorted = cam.get_colour() if waittime < time.time(): if nextmove == "S": stop() else: # Detect objects objectType, measured_distance, measured_angle, colourProb = cam.get_object(colour) if objectType != 'none': print "Object type = ", objectType