Exemple #1
0
def test_add():
    # Q1
    v = Vec2d(0, 100) + Vec2d(90, 100)
    assert_almost_equal(v.angle, 45, places=3)
    assert_almost_equal(v.mag, 141.4213, places=3)

    # Q2
    v = Vec2d(-90, 100) + Vec2d(0, 100)
    assert_almost_equal(v.angle, 315, places=3)
    assert_almost_equal(v.mag, 141.4213, places=3)

    # Q3
    v = Vec2d(-90, 100) + Vec2d(-180, 100)
    assert_almost_equal(v.angle, 225, places=3)
    assert_almost_equal(v.mag, 141.4213, places=3)

    # Q4
    v = Vec2d(90, 100) + Vec2d(-180, 100)
    assert_almost_equal(v.angle, 135, places=3)
    assert_almost_equal(v.mag, 141.4213, places=3)

    # Div by 0
    v = Vec2d(0, 100) + Vec2d(0, 100)
    assert_almost_equal(v.angle, 0, places=3)
    assert_almost_equal(v.mag, 200, places=3)
Exemple #2
0
    def __init__(self, generator: Generator, gravity: force):

        self._generator = generator
        self.gravity = Vec2d(gravity)
        self._blocks: Dict[int, List[Block]] = {}
        self._objects: Set[Object] = set()
        self._last_states: Dict[Tuple[pos, pos], State] = LimitedSizeDict(size_limit=100)
Exemple #3
0
 def update(self, dt):
     v = self.scene.player.velocity
     if is_pressed(pygame.K_LEFT):
         v.x = -200 * dt
         self.scene.player.data["look_direction"] = "left"
     elif is_pressed(pygame.K_RIGHT):
         v.x = 200 * dt
         self.scene.player.data["look_direction"] = "right"
     else:
         v.x = 0
         self.scene.player.data["foot_step"] = 0
     self.scene.player.velocity = v
     p = self.scene.player.pos
     r = self.scene.application.resolution
     self.scene.scroll = [p.x * 16 - r[0] / 2 + 16, p.y * 16 - r[1] / 2]
     self.scene.player.data["foot_step"] += v.x * self.s
     if -45 > self.scene.player.data["foot_step"]:
         self.s *= -1
         self.scene.player.data["foot_step"] = -45
     if 45 < self.scene.player.data["foot_step"]:
         self.s *= -1
         self.scene.player.data["foot_step"] = 45
     self.scene.player.data["head_rotation"] = (
         Vec2d(pygame.mouse.get_pos()) -
         (r[0] / 2 - 16, r[1] / 2)).rotation - 90
Exemple #4
0
def point_to_vector(p):
    [[x, y]] = p
    x = x*X_RES_MM
    y = (y* Y_RES_MM) + Y_OFFSET_MM
    # y*=-1
    # x*=-1
    v = Vec2d.from_point(x, y)
    return v.with_angle(v.angle+270)
Exemple #5
0
def calculate_gps_heading(loc, waypoint):
    """returns a vector, where the angle is the initial bearing and the magnitude
    is the distance between two GPS coordinates"""

    dist = dist_to_waypoint(loc, waypoint)
    angle = initial_bearing(
        (loc['lat'], loc['lon']), waypoint) - loc['gps_heading']

    return Vec2d(angle, dist)
Exemple #6
0
 def __init__(self, pos, data=None):
     self.images = {}
     for n, p in self.image_paths.items():
         self.images[n] = pygame.image.load(p)
     self.pos: Vec2d = Vec2d(pos)
     self._data: Dict[str, Any] = None
     self._hash = None
     self._image = None
     self._rect = None
     self._draw_offset = None
     self.data = data or {}
Exemple #7
0
def find_ideal(heading, x, y):
    heading = to360(heading)
    x = y = 50
    if 0 <= heading <= 45 or 315 <= heading <= 360:
        a = 50
        angle = math.cos(math.radians(heading))
    elif 45 <= heading <= 135:
        a = 50
        angle = math.cos(math.radians(heading - 90))
    elif 135 <= heading <= 225:
        a = 50
        angle = math.cos(math.radians(heading - 180))
    else:
        a = 50
        angle = math.cos(math.radians(heading - 270))
    return Vec2d(heading, a / angle)
Exemple #8
0
 def __init__(self, pos, data=None):
     self.base_images = {}
     for n, p in self.image_paths.items():
         self.base_images[n] = pygame.image.load(p)
     self._velocity: Vec2d = Vec2d((0, 0))
     self.images: Dict[str, BodyPart] = {}
     for n, v in self.image_parts.items():
         self.images[n] = BodyPart(self.base_images[v[0]].subsurface(v[1]),
                                   v[2])
     self._data: Dict[str, Any] = None
     self._hash = None
     self._image = None
     self._rect = Rect(pos, self.size)
     self._draw_offset = None
     self.data = data or {}
     self.one_ground = False
     self.setup()
Exemple #9
0
def camera_processor():

    # open a video capture feed
    cam = cv2.VideoCapture(cam_name)

    #init ros & camera stuff
    # pub = rospy.Publisher(topics.CAMERA, String, queue_size=10)
    no_barrel_pub = rospy.Publisher(topics.CAMERA, String, queue_size=10)
    line_angle_pub = rospy.Publisher(topics.LINE_ANGLE, Int16, queue_size=0)
    global lidar
    lidar_obs = rx_subscribe(topics.LIDAR)

    Observable.combine_latest(lidar_obs, lambda n: (n)) \
        .subscribe(update_lidar)

    rospy.init_node('camera')
    rate = rospy.Rate(10)

    exposure_init = False

    rawWidth = 640
    rawHeight = 480
    #camera_info = CameraInfo(53,38,76,91,134)#ground level#half (134 inches out)
    camera_info = CameraInfo(53, 40, 76, 180, 217, croppedWidth,
                             croppedHeight)  #ground level# 3/4 out
    while not rospy.is_shutdown():

        #grab a frame
        ret_val, img = cam.read()

        # camera will set its own exposure after the first frame, regardless of mode
        if not exposure_init:
            update_exposure(cv2.getTrackbarPos('exposure', 'img_HSV'))
            update_auto_white(cv2.getTrackbarPos('auto_white', 'img_HSV'))
            exposure_init = True

        #record a video simultaneously while processing
        if ret_val == True:
            out.write(img)

        #for debugging
        # cv2.line(img,(640/2,0),(640/2,480),color=(255,0,0),thickness=2)
        # cv2.line(img,(0,int(480*.25)),(640,int(480*.25)),color=(255,0,0),thickness=2)

        #crop down to speed processing time
        #img = cv2.imread('test_im2.jpg')
        dim = (rawWidth, rawHeight)
        img = cv2.resize(img, dim, interpolation=cv2.INTER_AREA)
        cropRatio = float(croppedHeight) / float(rawHeight)
        crop_img = img[int(rawHeight * float(1 - cropRatio)):rawHeight,
                       0:rawWidth]  # crops off the top 25% of the image
        cv2.imshow("cropped", crop_img)

        #process the cropped image. returns a "birds eye" of the contours & binary image
        img_displayBirdsEye, contours = process_image(crop_img, camera_info)

        #raw
        contours = convert_to_cartesian(camera_info.map_width,
                                        camera_info.map_height, contours)
        #for filtered barrels
        vec2d_contour = contours_to_vectors(contours)  #replaces NAV
        filtered_contours = filter_barrel_lines(camera=vec2d_contour,
                                                angle_range=8,
                                                lidar_vecs=lidar,
                                                mag_cusion=300,
                                                barrel_cusion=5)

        #EXTEND THE LINES
        filtered_cartesian_contours = vectors_to_contours(filtered_contours)

        try:

            closest_filtered_contour = closest_contour(
                filtered_cartesian_contours)

            # print "CLOSESTCONTOUR: ",closest_filtered_contour

            x_range = 5000
            contour_lines = []
            interval = 40

            #just one
            line_angle, slope, intercept = calculate_line_angle(
                closest_filtered_contour)
            for x in range(x_range * -1, x_range):
                if x % interval == 0:
                    y = slope * x + intercept
                    v = Vec2d.from_point(x, y)
                    contour_lines.append(v)

        except TypeError:  #no camera data
            contour_lines = []
            line_angle = 0

        #build the camera message with the contours and binary image
        # local_map_msg = CameraMsg(contours=contours, camera_info=camera_info)
        # filtered_map_msg=CameraMsg(contours=contour_lines,camera_info=camera_info)#1 polyfit contour
        c = []
        for cs in filtered_cartesian_contours:
            for v in cs:
                c.append(v)
        filtered_map_msg = CameraMsg(
            contours=c, camera_info=camera_info)  #all raw contours

        #make bytestream and pass if off to ros
        # local_map_msg_string = local_map_msg.pickleMe()
        filtered_map_msg_string = filtered_map_msg.pickleMe()

        #rospy.loginfo(local_map_msg_string)
        # pub.publish(local_map_msg_string)
        no_barrel_pub.publish(filtered_map_msg_string)
        line_angle_pub.publish(line_angle)

        if cv2.waitKey(1) == 27:
            break
        rate.sleep()
    cv2.destroyAllWindows()
Exemple #10
0
from nav_msgs.msg import OccupancyGrid
from sensor_msgs.msg import PointCloud
from std_msgs.msg import Header, String

import topics
from map import MapUpdate, Map
from util import Vec2d
from util.rosutil import unpickle

MAP_SIZE_PIXELS = 101
MAP_SIZE_METERS = 5

MIN_SAMPLES = 50
MAX_DIST_MM = 10000

MAXED = [Vec2d(a, MAX_DIST_MM) for a in xrange(360)]


class MapPublisher:
    def __init__(self, map, map_topic, tf_frame=None):
        self.map = map
        self.map_pub = rospy.Publisher(map_topic, String, queue_size=1)
        self.tf_frame = tf_frame
        if tf_frame is not None:
            self.br = tf.TransformBroadcaster()

        # debug
        self.rviz_pub = rospy.Publisher(map_topic + '_rviz',
                                        OccupancyGrid,
                                        queue_size=1)
        self.point_cloud_pub = rospy.Publisher(map_topic + '_point_cloud',
Exemple #11
0
#!/usr/bin/env python
import pickle

import rospy
from std_msgs.msg import String

import topics
from util import Vec2d

if __name__ == '__main__':
    rospy.init_node('fake_lidar')
    pub = rospy.Publisher(topics.LIDAR, String, queue_size=10)
    rate = rospy.Rate(10)

    msg = pickle.dumps([Vec2d(x, 1000) for x in xrange(-45, 45)])

    while not rospy.is_shutdown():
        pub.publish(msg)
        rate.sleep()
Exemple #12
0
def create_vector((quality, angle, dist)):
    d = Vec2d(angle, dist)
    return Vec2d.from_point(-d.x, d.y)  # lidar is backwards, invert x
Exemple #13
0
def test_x():
    v = Vec2d(30, 100)
    assert_almost_equal(v.x, 86.6025, places=3)
Exemple #14
0
def test_from_point(x, y, expected):
    actual = Vec2d.from_point(x, y)
    assert_equal((actual.angle, actual.mag), (expected.angle, expected.mag))
    (-100, -100, Vec2d(0, 100)),
Exemple #15
0
def test_y():
    v = Vec2d(30, 100)
    assert_almost_equal(v.y, 50, places=3)
Exemple #16
0
@parameterized([(0, 0), (90, 90), (180, 180), (270, -90), (360, 0), (450, 90),
                (-90, -90), (-180, 180), (-270, 90), (-360, 0), (-450, -90)])
def test_to180(angle, expected):
    assert_equal(to180(angle), expected)


@parameterized([(0, 0), (90, 90), (180, 180), (270, 270), (360, 0), (450, 90),
                (-90, 270), (-180, 180), (-270, 90), (-360, 0), (-450, 270)])
def test_to360(angle, expected):
    assert_equal(to360(angle), expected)


@parameterized([
    # y
    (0, 100, Vec2d(90, 100)),
    (0, -100, Vec2d(270, 100)),
    # x
    (100, 0, Vec2d(0, 100)),
    (-100, 0, Vec2d(180, 100)),
    # both
    (100, 100, Vec2d(45, math.sqrt(20000))),
    (100, -100, Vec2d(315, math.sqrt(20000))),
    (-100, 100, Vec2d(135, math.sqrt(20000))),
    (-100, -100, Vec2d(225, math.sqrt(20000)))
])
def test_from_point(x, y, expected):
    actual = Vec2d.from_point(x, y)
    assert_equal((actual.angle, actual.mag), (expected.angle, expected.mag))
    (-100, -100, Vec2d(0, 100)),
Exemple #17
0
def update_control((gps, costmap, pose, line_angle, state)):
    """figures out what we need to do based on the current state and map"""
    map_pose = costmap.transform

    transform = pose.transform.translation
    map_transform = map_pose.transform.translation
    diff = transform.x - map_transform.x, transform.y - map_transform.y
    diff = Vec2d.from_point(diff[0], diff[1])
    map_rotation = get_rotation(map_pose.transform)
    rotation = get_rotation(pose.transform)
    diff = diff.with_angle(diff.angle + map_rotation)
    diff_rotation = -rotation + map_rotation

    path = generate_path(costmap.costmap_bytes, diff_rotation,
                         (diff.x, diff.y))

    if path is None:
        path = []
    path_debug.publish(
        Path(header=Header(frame_id='map'),
             poses=[
                 PoseStamped(
                     header=Header(frame_id='map'),
                     pose=Pose(position=Point(x=x_to_m(p[0]), y=y_to_m(p[1]))))
                 for p in path
             ]))
    print state
    # calculate theta_dot based on the current state
    if state['state'] == LINE_FOLLOWING or \
            state['state'] == TRACKING_THIRD_WAYPOINT:
        offset = 10
        if len(path) < offset + 1:
            goal = Vec2d(0, ATTRACTOR_THRESHOLD_MM)  # always drive forward
        else:
            point = path[offset]
            goal = Vec2d.from_point(x_to_m(point[0] + 0.5),
                                    y_to_m(point[1] + 0.5))
            goal = goal.with_magnitude(ATTRACTOR_THRESHOLD_MM)
        rotation = -line_angle.data  # rotate to follow lines
        if abs(rotation) < 10:
            rotation = 0

        rotation /= 1.0

    else:
        # FIXME
        goal = calculate_gps_heading(gps, WAYPOINTS[state['tracking'] -
                                                    1])  # track the waypoint

        rotation = to180(goal.angle)
        goal = goal.with_angle(
            0
        )  # don't need to crab for GPS waypoint, steering will handle that
        # state_debug.publish(str(goal))

    # calculate translation based on obstacles
    repulsors = extract_repulsors((diff.x, diff.y), costmap.map_bytes)
    potential = compute_potential(repulsors, goal)
    obstacle_debug.publish(
        PointCloud(header=Header(frame_id=topics.ODOMETRY_FRAME),
                   points=[
                       Point32(x=v.x / 1000.0, y=v.y / 1000.0)
                       for v in repulsors
                   ]))
    translation = to180(potential.angle)

    # rospy.loginfo('translation = %s, rotation = %s, speed = %s', translation, rotation, INITIAL_SPEED)

    # don't rotate if bender needs to translate away from a line
    # if state['state'] == LINE_FOLLOWING:
    #     translation_threshhold = 60
    #     rotation_throttle = 0
    #     if np.absolute(translation) > translation_threshhold:
    #         rotation = rotation * rotation_throttle

    if state['state'] == CLIMBING_UP:
        speed = RAMP_SPEED
        rotation = gps['roll']
        rotation *= -10
        translation = 0
    elif state['state'] == CLIMBING_DOWN:
        speed = SLOW_SPEED
        rotation = gps['roll']
        rotation *= 10
        translation = 0
    else:
        obstacles = extract_repulsors((diff.x, diff.y), costmap.lidar_bytes)
        obstacles = [o for o in obstacles if abs(to180(o.angle)) < 45]
        min_dist = min(
            obstacles,
            key=lambda x: x.mag) if len(obstacles) > 0 else DIST_CUTOFF
        if min_dist < DIST_CUTOFF:
            speed = SLOW_SPEED
        else:
            speed = FAST_SPEED

    update_drivetrain(translation, rotation, speed)

    # rviz debug
    q = quaternion_from_euler(0, 0, math.radians(translation))
    heading_debug.publish(
        PoseStamped(header=Header(frame_id='map'),
                    pose=Pose(position=Point(x=diff.x, y=diff.y),
                              orientation=Quaternion(q[0], q[1], q[2], q[3]))))
Exemple #18
0
 def pos(self) -> Vec2d:
     return Vec2d(self._rect.center)
Exemple #19
0
    f = 0.5 * A_FACTOR * mag**2  # quadratic
    return attractor.with_magnitude(f)


def calc_repulsive_force(repulsor, position, weight):
    repulsor -= position
    if repulsor.mag <= 1000:
        f = 0.5 * R_FACTOR * (REPULSOR_THRESHOLD_MM -
                              repulsor.mag)**2  # quadratic
    else:
        f = 0  # out of range

    return repulsor.with_magnitude(f * weight)


zero = Vec2d(0, 0)


def sum_repulsors(vecs, position, cluster_mm, weight):
    if len(vecs) == 0:
        return zero

    clusters = partition(vecs, cluster_mm)
    return sum([calc_repulsive_force(r, position, weight) for r in clusters])


def compute_potential(repulsors, goal, position=zero):
    a = calc_attractive_force(goal, position)
    r = sum_repulsors(repulsors, zero, cluster_mm=150, weight=2)
    print a, r
    return a - r
Exemple #20
0
 def velocity(self, value):
     self._velocity = Vec2d(value)