Example #1
0
def train_filter(iterations, kernel, sensor_accuracy, 
             move_distance, do_print=True):
    track = np.array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12])
    prior = np.array([1.] + [0]*12)
    posterior = prior[:]
    normalize(prior)
    
    robot = Train(len(track), kernel, sensor_accuracy)
    for i in range(iterations):
        # move the robot and
        robot.move(distance=move_distance)
        
        # peform prediction
        prior = predict(posterior, move_distance, kernel)#preve o sistema

#        print(prior)
        #  and update the filter
        m = robot.sense()
        likelihood = lh_hallway(track, m, sensor_accuracy)
        posterior = update(likelihood, prior)
        index = np.argmax(posterior)
        
        if do_print:
            print('''time {}: pos {}, sensed {}, '''
                  '''at position {}'''.format(
                    i, robot.pos, m, track[robot.pos]))
            
            print('''        estimated position is {}'''
                  ''' with confidence {:.4f}%:'''.format(
                  index, posterior[index]*100))            

#    book_plots.bar_plot(prior, ylim=(0, 1))
    if iterations > 0:
        plt.bar(x-width/2,posterior,width,label='Filtro')
#        plt.bar(x,normalize(likelihood),width,label='Medição')
        plt.bar(x+width/2,prior,width,label='Previsao')
    else:
        m = 0    
        plt.bar(x,prior,width,label='Previsão')
    
    if do_print:
        print()
        print('final position is', robot.pos)
        index = np.argmax(posterior)
        print('''Estimated position is {} with '''
              '''confidence {:.4f}%:'''.format(
                index, posterior[index]*100))
    
    return m
Example #2
0
 def predict_move_conv(self, pdf, offset, kernel):
     n = len(pdf)
     kN = len(kernel)
     width = int((kN - 1) / 2)
     prior = np.zeros(n)
     for i in range(n):
         for k in range(kN):
             pdfIdx = (i + width - k - offset) % n
             prior[i] = kernel[k] * pdf[pdfIdx]
     return normalize(prior)
def test_predictions():
    s = 0.

    for k in range(3, 22, 2):  # different kernel sizes
        for _ in range(1000):
            a = randn(100)
            kernel = normalize(randn(k))
            move = randint(1, 200)
            s += sum(predict(a, move, kernel) - _predict(a, move, kernel))

        assert s < 1.e-8, "sum of difference = {}".format(s)
Example #4
0
def test_predictions():
    s = 0.

    for k in range(3, 22, 2):  # different kernel sizes
        for _ in range(1000):
            a = randn(100)
            kernel = normalize(randn(k))
            move = randint(1, 200)
            s += sum(predict(a, move, kernel) - _predict(a, move, kernel))

        assert s < 1.e-8, "sum of difference = {}".format(s)
Example #5
0
def scaled_update(hall, belief, z, prob):
    scale_ = prob / (1. - prob)
    likelihood = np.ones(len(hall))
    print("su.likelihood:", likelihood)
    likelihood[hall == z] *= scale_
    print("su.likelihood:", likelihood)
    print("su.belief:", belief)
    newhyp = likelihood * belief
    print("su.lik*bel:", newhyp)
    n = normalize(newhyp)
    print("su.posterior:", n)
    return n
Example #6
0
def train_filter(iterations, kernel, sensor_accuracy,
      move_distance, do_print=True):
  track = np.array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
  prior = np.array([.9] + [0.01] * 9)
  posterior = prior[:]
  normalize(prior)

  robot = Train(len(track), kernel, sensor_accuracy)
  for i in range(iterations):
    # move the robot and
    robot.move(distance=move_distance)

    # peform prediction
    prior = predict(posterior, move_distance, kernel)

    #  and update the filter
    m = robot.sense()
    likelihood = lh_hallway(track, m, sensor_accuracy)
    posterior = update(likelihood, prior)
    index = np.argmax(posterior)

    if do_print:
      print('''time {}: pos {}, sensed {}, '''
          '''at position {}'''.format(
        i, robot.pos, m, track[robot.pos]))

      print('''        estimated position is {}'''
          ''' with confidence {:.4f}%:'''.format(
        index, posterior[index] * 100))

  bar_plot(posterior)
  if do_print:
    print()
    print('final position is', robot.pos)
    index = np.argmax(posterior)
    print('''Estimated position is {} with '''
        '''confidence {:.4f}%:'''.format(
      index, posterior[index] * 100))
Example #7
0
def scaled_update (hall, belief, z, prob):
    scale_ = prob/(1-prob)
    belief[hall==1] *=scale_
    normalize(belief)
Example #8
0
from filterpy.gh import GHFilter
import numpy as np
from filterpy.discrete_bayes import normalize

hallway = np.array([1, 1, 0, 0, 0, 0, 0, 0, 1, 0])
# belief = np.array([1./3, 1./3, 0, 0, 0, 0, 0, 0, 1/3, 0])


def update_belief(hall, belief, z, correct_scale):
    for i, val in enumerate(hall):
        if val == z:
            belief[i] *= correct_scale


belief = np.array([0.1] * 10)
reading = 1  # 1 is 'door'
update_belief(hallway, belief, z=reading, correct_scale=3.)
# belief = belief / sum(belief)  # normalizar
normalize(
    belief
)  # necessário normalizar para que o somatório dos valores resulte em 1 (ou seja, 100%)

print('belief:', belief)
print('sum =', sum(belief))
Example #9
0
def update(likelihood, belief):
    return normalize( likelihood * belief)
Example #10
0
def update(likelihood, prior):
  return normalize(likelihood * prior)
Example #11
0
def scaled_update(hall, belief, z, z_prob): 
  scale = z_prob / (1. - z_prob)
  likelihood = np.ones(len(hall))
  likelihood[hall==z] *= scale
  return normalize(likelihood * belief)
Example #12
0
def scaled_update(hall, belief, z, z_prob):
  scale = z_prob / (1. - z_prob)
  belief[hall==z] *= scale
  normalize(belief)