def init_filter(self): # New robot's position self.robot = pf.robot() self.robot.set_noise(pf.bearing_noise, pf.steering_noise, pf.distance_noise) # Make particles self.particles = list(itertools.islice(self.particle_stream, self.N))
def registerInitialState(self, gameState): CaptureAgent.registerInitialState(self, gameState) enemies = [ gameState.getAgentState(i) for i in self.getOpponents(gameState) ] self.particlefilter = ParticleFilter.ApproximateAgent( gameState, enemies)
def lidar_sense(): """Function returns landmark data""" timestamp, scan = lidar.get_intens() # distances to landmarks(up to 3)! ans = [] for i in pf.get_beacons(scan): ans.append(i[1] + 40) return ans
def make_mov(parameters, particles): pm = [ parameters[0] / 1000., parameters[1] / 1000., parameters[2], parameters[3] ] stm_driver('go_to_global_point', pm) stamp = time.time() while not stm_driver('is_point_was_reached'): time.sleep(0.3) if (time.time() - stamp) > 30: return False particles = pf.particles_move(particles, parameters[:-1]) lidar_data = lidar_sense() particles = pf.particles_sense(particles, lidar_data) main_robot = pf.calculate_main(particles) send_message(str(main_robot)) return True
def initParticles(center, N): global p_filter global bounds global var print 'initialize', N, 'particles' p_filter = ParticleFilter(N, center, var, bounds=bounds) drawParticles() drawRect(center) plt.draw()
def movements(): stm = multiprocessing.Process(target=stmDriver.stm_loop, args=(input_command_queue, reply_to_fsm_queue)) stm.start() init_vk() parameters = [0, 0, 0] particles = [pf.Robot() for i in range(pf.particle_number)] main_robot = pf.calculate_main(particles) send_message(str(main_robot)) stm_driver('set_coordinates_without_movement', parameters) ax = plt.subplot(111) plot = ax.plot([], [], 'ro')[0] plt.axis([0, 2000, 0, 3000]) plt.show() cord = [0, 0] plot.set_data(cord[0], cord[1]) # make movement parameters = [1000, 0, 0, 4] make_mov(parameters, parameters) cord = [main_robot.x, main_robot.y] plot.set_data(cord[0], cord[1]) print main_robot
def display_error(self): ''' Set the error Labels ''' ghost = pf.get_position(self.particles) error_x, error_y, error_orientation = compute_error(self.robot, ghost) if error_x < pf.tolerance_xy and error_y < pf.tolerance_xy: fg = 'blue' else: fg = 'red' error_xy = math.sqrt(error_x**2+error_y**2) self.posErrorStr.set('Pos: {e:.2f}'.format(e = error_xy)) self.posError.configure(fg = fg) self.angleErrorStr.set('Angle: {e:.2f}'.format(e = error_orientation)) fg = ('blue' if error_orientation < pf.tolerance_orientation else 'red') self.angleError.configure(fg = fg)
def draw_all(self, p, realRob, path, displayRealRobot, displayGhost): self.configure(bg = 'ivory', bd = 2, relief = tk.SUNKEN) self.delete(tk.ALL) self.particles = p self.plot_particles() if displayGhost: ghost = pf.get_position(self.particles) self.plot_robot(ghost[0], ghost[1], ghost[2], self.robot_radius, self.ghost_color) self.plot_landmarks(reverse_xy(pf.landmarks), self.landmarks_radius, self.landmarks_color) if displayRealRobot: self.plot_robot(realRob.x, realRob.y, realRob.orientation, self.robot_radius, self.robot_color) if path: self.plot_landmarks(path, self.path_radius, self.path_color)
self._auto_run() else: self._manual_run() if __name__ == '__main__': print 'Testing FilterManager...' origin_pos = np.array([200, 0, np.pi/2]) origin_cov = np.eye(3) * 3 # Initial Covariance origin_cov[2,2] = 0.02 # Need to call this or gtk will never release locks gobject.threads_init() fm = FilterManager() run_kf = False run_pf = False #run_kf = True run_pf = True if run_kf: fm.add_filter(KalmanFilter.KalmanFilter(origin_pos, origin_cov)) if run_pf: fm.add_filter(ParticleFilter.ParticleFilter(origin_pos, origin_cov)) tg = TestGUI(fm.get_draw()) #tt = TestThread(fm, tg, auto=True) tt = TestThread(fm, tg, auto=False) tt.start() tg.mainloop() tt.quit = True
for i in List_plugged_motors: print("Port ", i[0], " for ", i[1], " side motor\n") # Necessary for the instantiation of the Robot, empty for the moment List_plugged_sensors = [[2, "SENSOR_ULTRASONIC", 2]] # Create the instance of the robot, initialize the interface Simple_robot = Robot(List_plugged_motors, List_plugged_sensors) # Enable its motors and sensors Simple_robot.motor_init() Simple_robot.sensor_init() size_step = 10 turn_side = 'L' PF = ParticleFilter(100) #Conversion from cm to pixels def conversion(x): u = 60 + 16*x return u def draw_square(distance, left_or_right): line1 = (conversion(0),conversion(0), conversion(0), conversion(distance*4)) line2 = (conversion(0),conversion(distance*4), conversion(distance*4), conversion(distance*4)) line3 = (conversion(distance*4), conversion(distance*4), conversion(distance*4), conversion(0)) line4 = (conversion(distance*4),conversion(0), conversion(0), conversion(0)) print "drawLine:" + str(line1) print "drawLine:" + str(line2) print "drawLine:" + str(line3) print "drawLine:" + str(line4)
vstd = 0.9 hstd = 0.6 #Kalman Filter xInit1 = 1 yInit1 = 1.3 xInit2 = 1 yInit2 = 0.7 initPosError1 = 0 initPosError2 = 0 sigmaSensor = 0.3 covarianceCoeff = 0.001 #Initialize Filters if (FILTER == 1): pf1 = ParticleFilter(500, 10, 10, anchors, sigma, vstd, hstd) pf2 = ParticleFilter(500, 10, 10, anchors, sigma, vstd, hstd) elif (FILTER == 2): kalman1 = KalmanFilter(xInit1, yInit1, initPosError1, sigmaSensor, covarianceCoeff) kalman2 = KalmanFilter(xInit2, yInit2, initPosError2, sigmaSensor, covarianceCoeff) def loop(): global dt1, dt2, i """ if (i < 8): trueX = 1 trueY = 1 orientation = 0
fusion_range = 5 # source_strength = 2.2e6 * 100 # 100 u Curies in CPM source_strength = 100 min_particle_strength = 0.5 * source_strength max_particle_strength = 1.5 * source_strength # Initialize world source_locations = world_size * np.random.rand(num_sources, 2) source_strengths = source_strength * np.ones(num_sources) sensor_location = np.array([[1., 2.]]) particle_filter = ParticleFilter.ParticleFilter(min_particle_strength, max_particle_strength, num_sources, world_size=world_size, num_particles=num_particles, fusion_range=fusion_range) # particle_filter.render(sensor_location, source_locations) # plt.pause(1) # Take a reading for t in range(300): source_dist_sq = distance.cdist(sensor_location, source_locations, 'sqeuclidean') source_expected_intensity = source_strengths / (1 + source_dist_sq) # reading = np reading = np.round(np.sum(source_expected_intensity)) particle_filter.step(reading, sensor_location)
# Necessary for the instantiation of the Robot, empty for the moment List_plugged_sensors = [[2, "SENSOR_ULTRASONIC", 2]] # Create the instance of the robot, initialize the interface Robot = Robot(List_plugged_motors, List_plugged_sensors) # Enable its motors and sensors Robot.motor_init() Robot.sensor_init() # Coordinates possibilities waypoints = [(84., 30.), (180., 30.), (180., 54.), (138., 54.), (138., 168.), (138., 54.)] # Create a ParticleFilter PF = ParticleFilter(100) # Create a Map mymap = Map() ''' def conversion(x): u = 60 + 3*x return u def conversiony(x): u = 700 - 3*x return u line1 = (conversion(0), conversiony(0), conversion(0), conversiony(168)) line2 = (conversion(0), conversiony(168), conversion(84), conversiony(168)) line3 = (conversion(84), conversiony(126), conversion(84), conversiony(210))
location = np.concatenate( [np.concatenate([xs, ys], axis=0), np.zeros((1, xs.shape[1]))], axis=0) location = np.concatenate([location, np.ones((1, xs.shape[1]))], axis=0) # 4*1081 location = np.dot(bTl, location) # delta is relative pose change if i == 0: delta = lidar_pose[0].reshape(3, 1) else: delta = sum(lidar_pose[i - (step - 1):i + 1]).reshape(3, 1).astype( np.float) # Particle Filter predict and update particle = PF.predict(N, delta, particle) particle, weight = PF.update(N, MAP, location, particle, weight, x_im, y_im, x_range, y_range) # Find the best particle and its trajectory max_weight_idx = np.argmax(weight) best_particle = particle[:, max_weight_idx] # 3*1 trajectory = np.hstack((trajectory, best_particle[0:2].reshape(2, 1))) # Map update MAP = mapping.update(MAP, best_particle, location) # Particle Filter resample Neff = 1 / np.dot(weight.reshape(1, N), weight.reshape(N, 1)).squeeze() if Neff < 5: particle, weight = PF.resample(N, particle, weight)
def resample(self): weights = map(lambda p: p.getWeight(), self.particles) self.particles = pf.ParticleFilter(self.particles, weights)
# Information displayed, to be sure software motor configuration matches reality print("The configuration states that plugged motors are :\n") for i in List_plugged_motors: print("Port ", i[0], " for ", i[1], " side motor\n") # Necessary for the instantiation of the Robot, empty for the moment List_plugged_sensors = [[2, "SENSOR_ULTRASONIC", 2]] # Create the instance of the robot, initialize the interface Simple_robot = Robot(List_plugged_motors, List_plugged_sensors) # Enable its motors and sensors Simple_robot.motor_init() Simple_robot.sensor_init() # Create a ParticleFilter PF = ParticleFilter(100) # Start point coordinates in the arena, which are passed to the robot, and to the particle filters start_point = [84., 30.] Simple_robot.set_x(start_point[0]) Simple_robot.set_y(start_point[1]) PF.particles = np.array([[start_point[0], start_point[1], 0]] * PF.num_particles) # Lab spec states it should be 20 cm step_size = 20 # Create a Map mymap = Map()
def particle_stream(): while True: r = pf.robot() r.set_noise(pf.bearing_noise, pf.steering_noise, pf.distance_noise) yield r
env = SingleBotLaser2Dgrid(bot_pos, bot_param, 'map_large.png') # Initialize GridMap # lo_occ, lo_free, lo_max, lo_min map_param = [0.4, -0.4, 5.0, -5.0] m = GridMap(map_param, gsize=1.0) sensor_data = env.Sensor() SensorMapping(m, env.bot_pos, env.bot_param, sensor_data) img = Draw(env.img_map, 1, env.bot_pos, sensor_data, env.bot_param) mimg = AdaptiveGetMap(m) cv2.imshow('view',img) cv2.imshow('map',mimg) # Initialize Particle pf = ParticleFilter(bot_pos.copy(), bot_param, copy.deepcopy(m), 10) # Scan Matching Test matching_m = GridMap(map_param, gsize=1.0) SensorMapping(matching_m, env.bot_pos, env.bot_param, sensor_data) matching_pos = np.array([150.0, 100.0, 0.0]) # Main Loop while(1): # Input Control action = -1 k = cv2.waitKey(1) if k==ord('w'): action = 1 if k==ord('s'): action = 2
''' Created on Dec 18, 2014 @author: hcmi ''' from ParticleFilter import * from motionModels import * import matplotlib.pyplot as plt if __name__ == '__main__': PARTICLE_NUM = 100 pf = ParticleFilter(PARTICLE_NUM, 2, 4, motionModel, sensorModel2) pos = np.loadtxt('pos.txt') n_obs1 = np.loadtxt('n_obs1.txt') n_obs2 = np.loadtxt('n_obs2.txt') DATA_LEN = n_obs1.shape[0] fused_obs = np.hstack([n_obs1, n_obs2]) print fused_obs.shape d = np.random.normal(loc=5, scale=1, size=DATA_LEN) #theta = np.random.uniform(np.pi/5 - np.pi/36, np.pi/5 + np.pi/36, DATA_LEN) theta = np.random.uniform(np.pi / 5 - np.pi / 10, np.pi / 5 + np.pi / 10, DATA_LEN)
class Localizer: '''Full localization solution, with various performance assessment functions. Mostly centered around terrain classification. Does not work properly yet.''' def __init__(self, map_file, camera_path, weights_file, frames, num_particles=1000): ''' Initialize Localizer object. Args: map_file: path to file of the global map to use camera_path: path to camera images taken during the flight weights_file: path to weights file to use learning model with frames: total count of images taken during the flight ''' self._set_processor(map_file=map_file, camera_path=camera_path, weights_file=weights_file) self._set_filter(x_range=(600, 4200), y_range=(600, 4200), z_range=(30,150), N=num_particles) self._set_odometry(camera_path=camera_path, last_frame=frames) # ------------------------ OBJECT SETTERS/GETTERS ------------------------# def _set_processor(self, map_file, camera_path, weights_file): self.processor = DataProcessor(map_file, camera_path, weights_file) def get_processor(self): return self.processor def _set_filter(self, x_range, y_range, z_range, N): self.filter = ParticleFilter(x_range, y_range, z_range, N) def get_filter(self): return self.filter def _set_odometry(self, camera_path, last_frame): self.odometry = VisualOdometry(camera_path, last_frame) def get_odometry(self): return self.odometry # ------------------------------------------------------------------------# # -------------- PERFORMANCE ASSESSMENT AND RESULT PLOTTING --------------# def plot_trajectory(self, estimated_traj, true_traj, plot_title="Flight Trajectory"): ''' Plot estimated and true trajectories (only considers x and y values). Args: estimated_traj: x,y-value pairs of trajectory estimated with particle filter true_traj: x,y-value pairs of trajectory declared in ground truth plot_title: title for the plot ''' # initialize plot fig, ax = plt.subplots() ax.title.set_text(plot_title) # show map img = plt.imread(self.processor.map_file) ax.imshow(img) # plot trajectories ax.plot(estimated_traj[:,0], estimated_traj[:,1], 'r', label="Estimated Trajectory") ax.plot(true_traj[:,0], true_traj[:,1], label="True Trajectory") ax.legend() ax.set_xlabel("X-values (pixels)") ax.set_ylabel("Y-values (pixels)") plt.show() def collect_mean_data(self, true_traj, runs=5, measurement="sse"): ''' Calculate mean errors and variances in trajectory between the runs. Args: true_traj: array with x,y-value pairs of true trajectory runs: number of times to run the particle filter Returns mean errors and variances. ''' errors = [] variances = [] for i in range(runs): print("ITERATION:", i+1) estimated_traj, variance_traj = self.process_flight(measurement=measurement) self.odometry.reset_frames() self.filter.reset_particles() error_xy = np.abs(estimated_traj - true_traj) error_traj = np.hypot(error_xy[:,0], error_xy[:,1]) errors.append(error_traj) variances.append(variance_traj) mean_err = np.mean(np.array(errors), axis=0) mean_var = np.mean(np.array(variances), axis=0) return mean_err, mean_var def plot_errors(self, mean_err, plot_title="Mean Trajectory Errors"): ''' Plot mean errors in trajectory. Args: mean_err: mean errors on each camera frame plot_title: title of the plot ''' plt.title(plot_title) plt.plot(np.arange(1, mean_err.shape[0]+1), mean_err) plt.xlabel("Iteration (Camera Frame Number)") plt.ylabel("Mean Trajectory Error (pixels, 1px ~ 0.25m)") plt.show() def plot_variances(self, mean_var, plot_title="Mean Position Variances"): ''' Plot mean variances of position. Args: mean_var: mean variances in x and y coordinates on each camera frame plot_title: title of the plot ''' plt.title(plot_title) plt.plot(np.arange(1, mean_var.shape[0]+1), mean_var[:,0], label="along x-axis") plt.plot(np.arange(1, mean_var.shape[0]+1), mean_var[:,1], label="along y-axis") plt.xlabel("Iteration (Camera Frame Number)") plt.ylabel("Mean Variance (pixels, 1px ~ 0.25m)") plt.legend() plt.show() # ------------------------------------------------------------------------# # ----------------------------- LOCALIZATION -----------------------------# def process_flight(self, measurement="brief"): print("Begin processing flight.") # initialize objects processor = self.get_processor() p_filter = self.get_filter() odometry = self.get_odometry() print("All objects initialized.") # initialize MCL particles p_filter.initialize_particles() particles = p_filter.particles print("MCL particles initialized.") mean_positions = [] var_positions = [] timer_start = time.time() # process all flight images for i in range(2, odometry.last_frame+1): # get camera image frame = processor.get_camera_image(i) print("Frame {}:".format(i)) # extract mean optical flow, update particle positions dx, dy = odometry.process_next_frame() p_filter.motion_update(dx, dy) print("Received motion update from odometry: {:.3f}, {:.3f}".format(dx, dy)) print("Begin measuring particles.") # keep frame resize constant to conserve resources (downscale by ~half on each side) frame = processor.adjust_for_height(frame, drone_height=90) frame_prediction = processor.construct_prediction(frame) pred_shape = [frame_prediction.shape[0], frame_prediction.shape[1]] # weigh particles weights = [] for particle in particles: x, y, z, angle = particle[0], particle[1], particle[2], particle[3] particle_prediction = processor.get_particle_area(pX=x, pY=y, angle=angle, w=pred_shape[1], h=pred_shape[0]) if measurement == "sse": weight = calc_weight_sse(frame_prediction, particle_prediction) else: weight = calc_weight_brief(frame_prediction, particle_prediction) weights.append(weight) # update particle weights, resample p_filter.sensor_update(np.array(weights)) p_filter.resample() pos_mean, pos_var = p_filter.estimate() mean_positions.append(pos_mean) var_positions.append(pos_var) print("Mean:", np.round(pos_mean,3)) print("Variance: {}\n".format(np.round(pos_var,3))) print("\nTotal time: {:.3f} seconds.".format(time.time()-timer_start)) trajectory = np.array(mean_positions)[:, :2] var_positions = np.array(var_positions)[:, :2] return trajectory, var_positions
world_name = "localization_maze" env_params = {"env_width": WIDTH, "env_height": HEIGHT} world_generator = WorldGenerator(WIDTH, HEIGHT, 20, world_name, scenario, collision) if use_human_controller: controller_func = HumanController # Game loop while True: world, robot = world_generator.create_world(random_robot=False) ### TODO initialize stuff for particle filter robo_pos = (robot.x, robot.y, robot.angle) grid_world = Grid((WIDTH, HEIGHT), 1) pf = ParticleFilter.ParticleFilter(deepcopy(list(robo_pos)), grid_world) # pf.particle_list[0].world.grid ### controller = controller_func(robot, scenario) env_params["world"] = world env_params["robot"] = robot env_params["robot_controller"] = controller env_params["scenario"] = scenario env_params["particle_filter"] = pf env_params["debug"] = debug game = MobileRobotGame(**env_params) game.init() game.run(args.snapshot, args.snapshot_dir)
def _set_filter(self, x_range, y_range, z_range, N): self.filter = ParticleFilter(x_range, y_range, z_range, N)
''' Created on Dec 18, 2014 @author: hcmi ''' from ParticleFilter import * from motionModels import * import matplotlib.pyplot as plt if __name__ == '__main__': PARTICLE_NUM = 1000 pf = ParticleFilter(PARTICLE_NUM, 2, 8, motionModel, sensorModel4) pos = np.loadtxt('pos.txt') n_obs1 = np.loadtxt('n_obs1.txt') n_obs2 = np.loadtxt('n_obs2.txt') n_obs3 = np.loadtxt('n_obs3.txt') n_obs4 = np.loadtxt('n_obs4.txt') DATA_LEN = n_obs1.shape[0] fused_obs = np.hstack([n_obs1, n_obs2, n_obs3, n_obs4]) print fused_obs.shape d = np.random.normal(loc=5, scale=1, size=DATA_LEN) #theta = np.random.uniform(np.pi/5 - np.pi/36, np.pi/5 + np.pi/36, DATA_LEN)
# Information displayed, to be sure software motor configuration matches reality print("The configuration states that plugged motors are :\n") for i in List_plugged_motors: print("Port ", i[0], " for ", i[1], " side motor\n") # Necessary for the instantiation of the Robot, empty for the moment List_plugged_sensors = [[2, "SENSOR_ULTRASONIC", 2]] # Create the instance of the robot, initialize the interface Simple_robot = Robot(List_plugged_motors, List_plugged_sensors) # Enable its motors and sensors Simple_robot.motor_init() Simple_robot.sensor_init() PF = ParticleFilter(100) while True: destination = (input("Enter (Wx, Wy) coordinates (in meters): ")) move = Simple_robot.navigateToWaypoint(destination[0], destination[1]) print("alpha angle value is :", move[1]) PF.update_after_rotation_full(move[1], 0, 0.01) PF.update_after_straight_line_full(move[0], 0, 0.01) new_estimated_coordinates = PF.estimate_position_from_particles() print("Particles angle estimation is :", new_estimated_coordinates[2]) Simple_robot.set_x(new_estimated_coordinates[0][0]) Simple_robot.set_y(new_estimated_coordinates[1][0]) Simple_robot.set_theta(new_estimated_coordinates[2][0])