Esempio n. 1
0
def stop(wait):
    global nextmove
    global currentmove
    global est_pose

    # send stop command to the adrino
    # calculate and apply the movemt to the particles
    msg = 's'
    serialRead.write(msg.encode('ascii'))
    runtime = time.time() - movestarttime

    waittime = time.time() + wait
    if runtime > 1.0:
        if currentmove == "F":
            if runtime < 1.2:
                distance = 0
            else:

                distance = forward_time_2_cm(runtime)

                for p in particles:
                    x = p.getX() + (np.cos(p.getTheta()) * distance)
                    y = p.getY() + ((-np.sin(p.getTheta())) * distance)
                    p.setX(x)
                    p.setY(y)
                add_uncertainty(particles, 5, 0.2)
        elif currentmove == "H":
            distance = cw_time_2_deg(runtime) * (np.pi / 180)
            for p in particles:
                theta = p.getTheta()
                p.setTheta(theta - distance)
            add_uncertainty(particles, 1, 0.2)
        elif currentmove == "L":
            distance = ccw_time_2_deg(runtime) * (np.pi / 180)
            for p in particles:
                theta = p.getTheta()
                p.setTheta(theta + distance)
            add_uncertainty(particles, 1, 0.2)

    currentmove = "0"
    est_pose = estimate_pose(particles)
Esempio n. 2
0
def stop(wait):
    global nextmove
    global currentmove
    global est_pose

    # send stop command to the adrino
    # calculate and apply the movemt to the particles
    msg = 's'
    serialRead.write(msg.encode('ascii'))
    runtime = time.time() - movestarttime

    waittime = time.time() + wait
    if runtime > 1.0:
        if currentmove == "F":
            if runtime < 1.2:
                distance = 0
            else:

                distance = forward_time_2_cm(runtime)

                for p in particles:
                    x = p.getX() + (np.cos(p.getTheta()) * distance)
                    y = p.getY() + ((-np.sin(p.getTheta())) * distance)
                    p.setX(x)
                    p.setY(y)
                add_uncertainty(particles, 5, 0.2)
        elif currentmove == "H":
            distance = cw_time_2_deg(runtime) * (np.pi/180)
            for p in particles:
                theta = p.getTheta()
                p.setTheta(theta - distance)
            add_uncertainty(particles, 1, 0.2)
        elif currentmove == "L":
            distance = ccw_time_2_deg(runtime) * (np.pi/180)
            for p in particles:
                theta = p.getTheta()
                p.setTheta(theta + distance)
            add_uncertainty(particles, 1, 0.2)
    
    currentmove = "0"
    est_pose = estimate_pose(particles)
Esempio n. 3
0
cv2.namedWindow(WIN_World)
cv2.moveWindow(WIN_World, 500, 50)

# Initialize particles
num_particles = 250
particles = []
for i in range(num_particles):
    # Random starting points. (x,y) \in [-1000, 1000]^2, theta \in [-pi, pi].
    p = par.Particle(2000.0 * np.random.ranf() - 1000,
                     2000.0 * np.random.ranf() - 1000,
                     2.0 * np.pi * np.random.ranf() - np.pi,
                     1.0 / num_particles)
    #p = particle.Particle(2000.0*np.random.ranf() - 1000, 2000.0*np.random.ranf() - 1000, np.pi+3.0*np.pi/4.0, 1.0/num_particles)
    particles.append(p)

est_pose = par.estimate_pose(
    particles)  # The estimate of the robots current pose

# Driving parameters
velocity = 0.0
# cm/sec
angular_velocity = 0.0
# radians/sec

# Initialize the robot (XXX: You do this)
arlo = robot.Robot()
lastSeenLM = None
LMInSight = False
lastMeasuredAngle = 0
translationInOneSecond = 100
rotationInOneSecond = 0.79  # 1.13826
weightMean = 0
Esempio n. 4
0
def detect_objects():
    global goals
    global target
    global particles
    global landmark_x
    global landmark_y

    # Detect objects
    if DEMO:
        objectType, measured_distance, measured_angle, colourProb = cam.get_object(
            colour)
    else:
        objectType = 'vertical'
        measured_distance = 100
        measured_angle = deg_2_rad(90.0)
        colourProb = (0, 0, 0)

    if objectType != 'none':
        """ FOR EXAM """
        for goal in goals:

            # if we find one of the goals
            if goal.match(objectType, colourProb):

                # calculate position of the goal
                x = np.cos(measured_angle) * measured_distance
                y = -np.sin(measured_angle) * measured_distance
                print measured_distance
                # set its position
                goal.setPosition(x, y)
                goal.setTheta(measured_angle)

                # announce that we're clever enough to identify which goal and where it is :)
                print "Found", goal.getIdentifier(), "at (", goal.getX(
                ), ",", goal.getY(), "), theta: ", goal.getTheta()

                landmark_x = goal.getUX()
                landmark_y = goal.getUY()

                # is it the one we want to visit?
                if goal == goals[0]:
                    target = goal

        # Compute particle weights
        sigma_D = 15
        sigma_A = 0.2
        for p in particles:
            X = p.getX()
            Y = p.getY()
            if (X - landmark_x) == 0 and (Y - landmark_y) == 0:
                p.setWeight(0)
            else:
                distance = np.sqrt((X - landmark_x) * (X - landmark_x) +
                                   (Y - landmark_y) * (Y - landmark_y))
                theta = p.getTheta()
                dx = np.cos(theta)
                dy = -np.sin(theta)
                lx = landmark_x - X
                ly = landmark_y - Y
                angle = (lx * dx + ly * dy) / (np.sqrt(lx * lx + ly * ly) *
                                               np.sqrt(lx * lx + ly * ly))
                if angle < -1:
                    angle = -1
                elif angle > 1:
                    angle = 1
                D = gaussian_distribution(measured_distance, distance, sigma_D)
                A = gaussian_distribution(measured_angle, np.arccos(angle),
                                          sigma_A)
                p.setWeight(D * A)

        # Resampling
        particles = resample(particles, 100)

        # Draw detected pattern
        if DEMO:
            cam.draw_object(colour)
    else:
        # No observation - reset weights to uniform distribution
        for p in particles:
            p.setWeight(1.0 / num_particles)

    est_pose = estimate_pose(
        particles)  # The estimate of the robots current pose
Esempio n. 5
0
def movecommand(command, wait):
    global nextmove
    global waittime
    global currentmove
    global movestarttime

    emergency = False
    #sends command to the adrino, saves the current time and command.
    if command == "F":
        msg = 'r1\n'
        serialRead.write(msg.encode('ascii'))
        front = serialRead.readline()
        print "infrared sensor read: ", front
        front_val = float(front)
        if front_val > 10.0 and front_val < 80:
            est_pose = est_pose = estimate_pose(particles)
            a = np.cos(est_pose.getTheta()) * front_val + est_pose.getX()
            b = -np.sin(est_pose.getTheta()) * front_val + est_pose.getY()
            too_close = False
            for goal in goals:
                minx = minimum(a, goal.getUX())
                miny = minimum(b, goal.getUY())
                maxx = maximum(a, goal.getUX())
                maxy = maximum(b, goal.getUY())

                da = maxx - minx
                db = maxy - miny
                dc = np.sqrt(a * a + b * b)
                if dc < 75:
                    too_close = True
                    break

            if not too_close:
                avoid.append(Landmark("box", (a, b), GREEN, "irrelvant"))
        if not checkpath(0.0, wait):
            notrouble = False
            if inside_bubble:
                msg = 'p 8 1.0'
            else:
                offroad = 0
                while not notrouble and offroad < 4:
                    if offroad < 0:
                        offroad = (-offroad) + 1
                    else:
                        offroad = -offroad
                    notrouble = checkpath(offroad, wait)
            if notrouble:
                if offroad < 0:
                    turn = "H"
                    turnlength = cw_deg_2_time(rad_2_deg(offroad))
                else:
                    turn = "L"
                    turnlength = ccw_deg_2_time(rad_2_deg(offroad))
                nextmove = [[turn, turnlength], ["F", wait]]
                emergency = True
            else:
                msg = 'p 8.0 1.0'
        msg = 'p 8 1.0'
    elif command == "L":
        msg = 'p 300.0 1.0'
    elif command == "H":
        msg = 'p -310.0 1.0'
    else:
        print "WARNING: unknown move command!"
    if not emergency:
        if DEMO:
            serialRead.write(msg.encode('ascii'))

        movestarttime = time.time()

        if len(nextmove) == 0:
            nextmove = [["S", 0.3]]
        else:
            nextmove.pop(0)
            nextmove.insert(0, ["S", 0.3])

        currentmove = command
        waittime = time.time() + wait
Esempio n. 6
0
cv2.namedWindow(WIN_World)
cv2.moveWindow(WIN_World, 500, 50)


# Initialize particles
num_particles = 250
particles = []
for i in range(num_particles):
    # Random starting points. (x,y) \in [-1000, 1000]^2, theta \in [-pi, pi].
    p = par.Particle(2000.0 * np.random.ranf() - 1000, 2000.0 * np.random.ranf() -
                     1000, 2.0 * np.pi * np.random.ranf() - np.pi, 1.0 / num_particles)
    #p = particle.Particle(2000.0*np.random.ranf() - 1000, 2000.0*np.random.ranf() - 1000, np.pi+3.0*np.pi/4.0, 1.0/num_particles)
    particles.append(p)

# The estimate of the robots current pose
est_pose = par.estimate_pose(particles)

# Driving parameters
velocity = 0.0  # cm/sec
angular_velocity = 0.0  # radians/sec

# Initialize the robot (XXX: You do this)
arlo = robot.Robot()


# Allocate space for world map
world = np.zeros((500, 500, 3), dtype=np.uint8)

# Draw map
draw_world(est_pose, particles, world)
Esempio n. 7
0
WIN_World = "World view";
cv2.namedWindow(WIN_World);
cv2.moveWindow(WIN_World, 500       , 50);



# Initialize particles
num_particles = 1000
particles = []
for i in range(num_particles):
    # Random starting points. (x,y) \in [-1000, 1000]^2, theta \in [-pi, pi].
    p = particle.Particle(2000.0*np.random.ranf() - 1000, 2000.0*np.random.ranf() - 1000, 2.0*np.pi*np.random.ranf() - np.pi, 1.0/num_particles)
    particles.append(p)

est_pose = particle.estimate_pose(particles) # The estimate of the robots current pose

# Driving parameters
velocity = 0.0; # cm/sec
angular_velocity = 0.0; # radians/sec

# Initialize the robot (XXX: You do this)

# Allocate space for world map
world = np.zeros((500,500,3), dtype=np.uint8)

# Draw map
draw_world(est_pose, particles, world)

print "Opening and initializing camera"
Esempio n. 8
0
def detect_objects():
    global goals
    global target
    global particles
    global landmark_x
    global landmark_y

    # Detect objects
    if DEMO:
        objectType, measured_distance, measured_angle, colourProb = cam.get_object(colour)
    else:
        objectType = 'vertical'
        measured_distance = 100
        measured_angle = deg_2_rad(90.0)
        colourProb = (0,0,0)
    
    if objectType != 'none':
        """ FOR EXAM """
        for goal in goals:

            # if we find one of the goals
            if goal.match(objectType, colourProb):
                
                # calculate position of the goal
                x = np.cos(measured_angle) * measured_distance
                y = -np.sin(measured_angle) * measured_distance
                print measured_distance
                # set its position
                goal.setPosition(x, y)
                goal.setTheta(measured_angle)

                # announce that we're clever enough to identify which goal and where it is :)
                print "Found", goal.getIdentifier(), "at (", goal.getX(), ",", goal.getY(), "), theta: ", goal.getTheta()

                landmark_x = goal.getUX()
                landmark_y = goal.getUY()

                # is it the one we want to visit?
                if goal == goals[0]:
                    target = goal

        # Compute particle weights
        sigma_D = 15 
        sigma_A = 0.2
        for p in particles:
            X = p.getX()
            Y = p.getY()
            if (X - landmark_x) == 0 and (Y - landmark_y) == 0:
                p.setWeight(0)
            else:
                distance = np.sqrt((X - landmark_x) * (X - landmark_x) + (Y - landmark_y) * (Y - landmark_y))
                theta = p.getTheta()
                dx = np.cos(theta)
                dy = -np.sin(theta)
                lx = landmark_x - X
                ly = landmark_y - Y 
                angle = (lx*dx + ly*dy) / (np.sqrt(lx * lx + ly * ly) * np.sqrt(lx * lx + ly * ly))
                if angle < -1:
                    angle = -1
                elif angle > 1:
                    angle = 1
                D = gaussian_distribution(measured_distance, distance, sigma_D)
                A = gaussian_distribution(measured_angle, np.arccos(angle), sigma_A)
                p.setWeight(D * A)

        # Resampling
        particles = resample(particles, 100)

        # Draw detected pattern
        if DEMO:
            cam.draw_object(colour)
    else:
        # No observation - reset weights to uniform distribution
        for p in particles:
            p.setWeight(1.0/num_particles)

    est_pose = estimate_pose(particles) # The estimate of the robots current pose
Esempio n. 9
0
def movecommand (command, wait):
    global nextmove
    global waittime
    global currentmove
    global movestarttime
    
    emergency = False
    #sends command to the adrino, saves the current time and command.
    if command == "F":
        msg = 'r1\n'
        serialRead.write(msg.encode('ascii'))
        front = serialRead.readline()
        print "infrared sensor read: ", front
        front_val = float(front)
        if front_val > 10.0 and front_val < 80:
            est_pose = est_pose = estimate_pose(particles)
            a = np.cos(est_pose.getTheta()) * front_val + est_pose.getX()
            b = -np.sin(est_pose.getTheta()) * front_val + est_pose.getY()
            too_close = False
            for goal in goals:
                minx = minimum(a, goal.getUX()) 
                miny = minimum(b, goal.getUY())
                maxx = maximum(a, goal.getUX())
                maxy = maximum(b, goal.getUY())

                da = maxx - minx
                db = maxy - miny
                dc = np.sqrt(a * a + b * b)
                if dc < 75:
                    too_close = True
                    break

            if not too_close:
		avoid.append(Landmark("box", (a,b), GREEN, "irrelvant"))
        if not checkpath(0.0, wait):
            notrouble = False
            if inside_bubble:
               msg = 'p 8 1.0'
            else:
               offroad = 0
               while not notrouble and offroad < 4:
                   if offroad < 0:
                       offroad = (-offroad) + 1
                   else:
                       offroad = -offroad
                   notrouble = checkpath(offroad,wait)
            if notrouble:
               if offroad < 0:
                   turn = "H"
                   turnlength = cw_deg_2_time(rad_2_deg(offroad))
               else:
                   turn = "L"
                   turnlength = ccw_deg_2_time(rad_2_deg(offroad))
               nextmove = [[turn, turnlength],["F",wait]]
               emergency = True
            else:
               msg = 'p 8.0 1.0'
        msg = 'p 8 1.0'
    elif command == "L":
        msg = 'p 300.0 1.0'
    elif command == "H":
        msg = 'p -310.0 1.0'
    else:
        print "WARNING: unknown move command!"
    if not emergency: 
        if DEMO:
            serialRead.write(msg.encode('ascii'))

        movestarttime = time.time()

        if len(nextmove) == 0:
            nextmove = [["S", 0.3]]
        else:
            nextmove.pop(0)
            nextmove.insert(0, ["S", 0.3])

        currentmove = command
        waittime = time.time() + wait
Esempio n. 10
0
def update_particles(particles, cam, velocity, angular_velocity, world,
                     WIN_RF1, WIN_World):
    #print 'update: ' + str(angular_velocity)
    cv2.waitKey(4)
    num_particles = len(particles)
    for p in particles:
        # calculates new orientation
        curr_angle = add_to_angular_v2(np.degrees(p.getTheta()),
                                       angular_velocity)
        if velocity > 0.0:
            [x, y] = move_vector(p, velocity)
            particle.move_particle(p, x, y, curr_angle)
        else:
            particle.move_particle(p, 0.0, 0.0, curr_angle)
    if velocity != 0.0:
        particle.add_uncertainty(particles, 12, 5)
    if velocity == 0.0 and angular_velocity != 0.0:
        particle.add_uncertainty(particles, 0, 5)

    # Fetch next frame
    colour, distorted = cam.get_colour()

    # Detect objects
    objectType, measured_distance, measured_angle, colourProb = cam.get_object(
        colour)

    if objectType != 'none':
        obs_landmark = ret_landmark(colourProb, objectType)
        observed_obj = [
            objectType, measured_distance, measured_angle, obs_landmark
        ]

        list_of_particles = weight_particles(particles,
                                             np.degrees(measured_angle),
                                             measured_distance, obs_landmark)

        particles = resample_particles(list_of_particles[:, [0, 2]])

        particle.add_uncertainty(particles, 15, 5)

        cam.draw_object(colour)
    else:
        observed_obj = [None, None, None, None]
        for p in particles:
            p.setWeight(1.0 / num_particles)

        particle.add_uncertainty(particles, 10, 5)

    est_pose = particle.estimate_pose(particles)

    draw_world(est_pose, particles, world)
    #Show frame
    cv2.imshow(WIN_RF1, colour)
    #Show world
    cv2.imshow(WIN_World, world)

    return {
        'est_pos': est_pose,
        'obs_obj': observed_obj,
        'particles': particles
    }
Esempio n. 11
0
def estimate_position(particles):
    return particle.estimate_pose(particles)
Esempio n. 12
0
def update_particles(particles, cam, velocity, angular_velocity, world,
                     WIN_RF1, WIN_World):
    raw_input()
    print 'update: ' + str(angular_velocity)
    cv2.waitKey(4)
    num_particles = len(particles)
    for p in particles:
        # calculates new orientation

        curr_angle = add_to_angular_v2(np.degrees(p.getTheta()), angular_velocity)
        print 'theta_rad: ' + str(p.getTheta())
        print 'theta_deg: ' + str(np.degrees(p.getTheta()))
        print 'cur_ang_deg: ' + str(np.degrees(curr_angle))
        if velocity > 0.0:
            [x, y] = move_vector(p, velocity)
            particle.move_particle(p, x, y, curr_angle)
        else:
            particle.move_particle(p, 0.0, 0.0, curr_angle)
            print 'cur_ang_rad: ' + str(curr_angle)
    if velocity != 0.0:
        particle.add_uncertainty(particles, 12, 15)
    if velocity == 0.0 and angular_velocity != 0.0:
        particle.add_uncertainty(particles, 0, 15)

    # Fetch next frame
    colour, distorted = cam.get_colour()

    # Detect objects
    objectType, measured_distance, measured_angle, colourProb = cam.get_object(
        colour)

    if objectType != 'none':
        obs_landmark = ret_landmark(colourProb, objectType)
        observed_obj = [objectType, measured_distance, measured_angle,
                        obs_landmark]


        list_of_particles = weight_particles(particles,
                                             np.degrees(measured_angle),
                                             measured_distance, obs_landmark)


        particles = []
        for count in range(0, num_particles):
            rando = np.random.uniform(0.0,1.0)  # np.random.normal(0.0, 1.0, 1)
            # dicto = {'i': 500,
            #          'n': 2}
            p = when_in_range(list_of_particles,
                              0,
                              num_particles,
                              rando)
            particles.append(
                particle.Particle(p.getX(), p.getY(), p.getTheta(),
                                  1.0 / num_particles))
        print 'list_of_particles: ' + str(list_of_particles)
        print 'particles: ' + str(particles)

        particle.add_uncertainty(particles, 5, 2)

        # new random particles added
        #for c in range(0, int(math.ceil(num_particles * 0.05))):
        #    p = particle.Particle(500.0 * np.random.ranf() - 100,
        #                          500.0 * np.random.ranf() - 100,
        #                          2.0 * np.pi * np.random.ranf() - np.pi, 0.0)

        #    particles.append(p)

        # Draw detected pattern
        cam.draw_object(colour)
    else:
        observed_obj = [None, None, None, None]
        # No observation - reset weights to uniform distribution
        for p in particles:
            p.setWeight(1.0 / num_particles)

        particle.add_uncertainty(particles, 5, 2)

    # est_pose = particle.estimate_pose(particles)  # The estimate of the robots current pose
    # return [est_pose, observed_obj]

    est_pose = particle.estimate_pose(
        particles)  # The estimate of the robots current pose

    print 'Updated pose: ' + str([est_pose.getX(), est_pose.getY()])
    draw_world(est_pose, particles, world)
    # Show frame
    cv2.imshow(WIN_RF1, colour)
    # Show world
    cv2.imshow(WIN_World, world)
    return {'est_pos': est_pose,
            'obs_obj': observed_obj,
            'particles': particles}
Esempio n. 13
0
def localize_self(particles, seen_landmarks, landmarks, cam, world, WIN_World, WIN_RF1, draw_fun, oneMeter = 2.0):
    deg_turned = 0.0
    converged = False
    box_location = (0,0)
    num_particles = len(particles)
    oneMeter_ret = oneMeter

    while deg_turned < 391 and not converged:

        print("Getting next frame")
        colour = cam.get_colour()
        saw_box, box_location, seen_landmarks, measured_angle, measured_distance = camfun.get_frame(cam, landmarks, seen_landmarks, box_location, colour)
        turn_angle = 30
        if saw_box:
            #for i in range(10):
            particles = sample.sample_resample(particles, num_particles, box_location, measured_angle, measured_distance)
            turn_angle = 45
            '''
            if len(seen_landmarks) <= 1:
                if measured_angle > 0:
                    smov.move_robot(oneMeter, particles, 'l', np.degrees(measured_angle))
                else:
                    smov.move_robot(oneMeter, particles, 'r', -np.degrees(measured_angle))
                remaining, stop_dir = smov.move_robot(oneMeter, particles, 'f', 50)
                prev_sens_dist = measured_distance
                motor_dist = 50 - (remaining * 100)
                _, _, _, _, sens_dist = camfun.get_frame(cam, landmarks, seen_landmarks, box_location, colour)
                real_dist = (prev_sens_dist - sens_dist)
                ratio = motor_dist / real_dist
                oneMeter_ret = oneMeter * ratio
                time.sleep(0.2)
                print("prev_dist, new_dist:", measured_distance, sens_dist)
                print("Prev oneMeter, new oneMeter:", oneMeter, oneMeter_ret)
                turn_angle = 45
            '''

        else:
            for p in particles:
                p.setWeight(1.0/num_particles)

        print("Localizing...")
        stdX = [elem.getX() for elem in particles]
        stdY = [elem.getY() for elem in particles]
        converged = np.std(stdX) < 80 and np.std(stdY) < 80 and len(seen_landmarks) > 1
        if not converged:
            smov.move_robot(oneMeter, particles, 'l', turn_angle)
            deg_turned += turn_angle
            est_pose = particle.estimate_pose(particles)

        draw_fun(est_pose, particles, world)

        # Show frame
        cv2.imshow(WIN_RF1, colour)

        # Show world
        cv2.imshow(WIN_World, world)

    if converged:
        print("Localization complete!")
        return particles, est_pose, oneMeter_ret
    else:
        print("Couldn't localize, trying new position")
        smov.move_robot(oneMeter, particles, 'f', 50)
        return localize_self(oneMeter, particles, [], landmarks, cam, world, WIN_World, WIN_RF1, draw_world)