Exemple #1
0
 def update(self, dt):
     self.update_data()
     self.data.pose = utils.update_pose(self.data.pose, self.data.wheel_speeds, dt)
     self.data.x_positions.append(self.data.pose.x)
     self.data.y_positions.append(self.data.pose.y)
     self.data.theta = self.data.pose.theta
     self.particle_filter.update(dt)
     self.ppose = self.particle_filter.most_likely()
     if self.plotting:
         self.fig.canvas.draw()
Exemple #2
0
    def update(self, dt):
        t0 = time.time()
        particles = self.sample_particles(len(self.particles))
        #Move sampled particles based on a motor control
        new_particles = [
            utils.update_pose(p, self.data.wheel_speeds, dt, noisy=True)
            for p in particles
            if p.x < 1469 and p.x > 0
            and p.y < 962 and p.y > 0]
        # odo_pose = self.data.pose
        # odo_pose.w = 1./len(new_particles)*10
        # new_particles.append(odo_pose)
        if len(new_particles) < self.n:
            shortage = self.n - len(new_particles)
            likliest_particle = self.most_likely()
            new_particles += (self.rand_gaussian_particles(likliest_particle,
                                 shortage, likliest_particle.w))
        eta = 0
        #Calculate the sensor probabilities (weights) for each particle
        #Use a pool of workers to utilize multiple cores
        exp_readings = self.pool.map(raycasting.exp_readings_for_pose_star, itertools.izip(new_particles, itertools.repeat(self.data.distance_thresholds)))
        if len(exp_readings) != len(new_particles):
            assert False, "Array of expected readings must have the same size as the array of new particles. exp_readings: %d new_particles: %d" % (len(exp_readings), len(new_particles))
        #Sum sensor probabilites (assumption is that they are independent)
        for i, p in enumerate(new_particles):
            weight = self.probability_sum(4, exp_readings[i])
            eta += weight
            p.w = weight

        #Normalize weights
        print 'eta: ', eta
        new_particles = self.normalize(eta, new_particles)

        while len(new_particles) > len(self.particles):
            new_particles.remove(self.least_likely(new_particles))

        self.particles = new_particles
        duration = time.time() - t0