def turn2(d, r, init_time, init_yaw, current_time, current_yaw):
    status = True
    msg = "Turned successfully"
    if init_yaw == None:
        rospy.logerr(
            "Initialization time is None, the startup was not correctly done!")
        msg = "Initialization time is None, the startup was not correctly done!"
        return False, msg
    else:
        orient = Orientation()
        correct_yaw = orient.getCorrectedYaw(init_time, init_yaw, current_time,
                                             current_yaw)
        print " Current_yaw ->" + str(correct_yaw)
        if d == 'NORTH':
            pass
        elif d == 'SOUTH':
            init_yaw = init_yaw + radians(180)
        elif d == 'EAST':
            init_yaw = init_yaw + radians(90)
        elif d == 'WEST':
            init_yaw = init_yaw - radians(90)
        else:
            rospy.logerr("Direction is not correct")

        print "Direction_yaw ->" + str(init_yaw)
        if correct_yaw > init_yaw:
            angle = correct_yaw - init_yaw
            return turn(degrees(angle), -1)
        else:
            angle = init_yaw - correct_yaw
            return turn(degrees(angle), 1)
Ejemplo n.º 2
0
    def __init__(self):
        #inicializando variáveis
        self.distance_x = 0
        self.distance_y = 0
        self.center_H = 0
        self.alpha = 0
        self.beta = 0
        self.worldTranslation = [0, 0, 0]
        self.distance_x_array = []
        self.distance_y_array = []
        self.alpha_array = []
        self.beta_array = []
        self.markerFound = False

        #coordenadas dos marcadores
        self.coordenadas = []
        self.coordenadas.append(
            np.array([[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]]))
        self.coordenadas.append(
            np.array([[0, 0, 18.5], [0, 0, 0], [0, 18.5, 0], [0, 18.5, 18.5]]))
        self.coordenadas.append(
            np.array([[0, 50, 18.5], [0, 50, 0], [0, 68.5, 0], [0, 68.5,
                                                                18.5]]))
        self.coordenadas.append(
            np.array([[0, 100, 18.5], [0, 100, 0], [0, 118.5, 0],
                      [0, 118.5, 18.5]]))
        self.coordenadas.append(
            np.array([[50, 150, 18.5], [50, 150, 0], [68.5, 150, 0],
                      [68.5, 150, 18.5]]))
        self.coordenadas.append(
            np.array([[100, 150, 18.5], [100, 150, 0], [118.5, 150, 0],
                      [118.5, 150, 18.5]]))
        self.coordenadas.append(
            np.array([[150, 150, 18.5], [150, 150, 0], [168.5, 150, 0],
                      [168.5, 150, 18.5]]))
        self.coordenadas.append(
            np.array([[200, 118.5, 18.5], [200, 118.5, 0], [200, 100, 0],
                      [200, 100, 18.5]]))
        self.coordenadas.append(
            np.array([[200, 68.5, 18.5], [200, 68.5, 0], [200, 50, 0],
                      [200, 50, 18.5]]))
        self.coordenadas.append(
            np.array([[200, 18.5, 18.5], [200, 18.5, 0], [200, 0, 0],
                      [200, 0, 18.5]]))
        self.coordenadas.append(
            np.array([[0, 0, 18.5], [0, 0, 0], [0, 18.5, 0], [0, 18.5, 18.5]]))

        #self.template = cv2.imread('data/7x7-10.jpg', 0)
        #self.finder = Finder(self.template)
        self.arucoFinder = ArucoFinder()
        self.orientation = Orientation()

        #parâmetros da câmera, conseguido com camera calibration
        with np.load('camera_array_fisheye.npz') as X:
            self.mtx, self.dist, _, _ = [
                X[i] for i in ('mtx', 'dist', 'rvecs', 'tvecs')
            ]
Ejemplo n.º 3
0
    def test_matrix_init(self):
        random_m = ttf.random_rotation_matrix()
        ornt = Orientation(random_m[:3, :3])
        euler = ornt.euler
        quat = ornt.quaternion
        matrix = ornt.matrix3

        self.assertTrue(np.allclose(euler,
                                    ttf.euler_from_matrix(random_m)))
        self.assertTrue(np.allclose(quat,
                                    ttf.quaternion_from_matrix(random_m)))
        self.assertTrue(np.allclose(matrix, random_m[:3, :3]))
Ejemplo n.º 4
0
    def test_euler_init(self):
        random_e = ttf.euler_from_quaternion(ttf.random_quaternion())
        ornt = Orientation(random_e)
        euler = ornt.euler
        quat = ornt.quaternion
        matrix = ornt.matrix3

        self.assertTrue(np.allclose(euler, random_e))
        self.assertTrue(np.allclose(
            matrix, ttf.euler_matrix(*random_e)[:3, :3]))
        self.assertTrue(np.allclose(
            quat, ttf.quaternion_from_euler(*random_e)))
Ejemplo n.º 5
0
def main():
    global m
    global ot
    global vp
    global ss
    global count_down
    v = None
    count_start = None

    broad_sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
    broad_sock.bind(('', PORT_TO_BROADCAST))
    data = None
    addr = None
    print 'Wait for broadcast message.'
    while True:
        data, addr = broad_sock.recvfrom(4096)
        if Check_Identity(data) is True:
            break
    broad_sock.close()
    host = addr[0]
    print 'Get broadcast message from host:', host

    ss = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    ss.connect((host, PORT_FROM_VIDEO))

    sr = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    sr.bind(('', PORT_TO_VIDEO))
    sr.listen(1)
    md, addr = sr.accept()

    print 'Start to instantiate classes.'
    while True:
        try:
            m = Motor() if not m else m
            ot = Orientation() if not ot else ot
            if not count_start:
                count_start = time.time()
            v = VFFmpeg(host) if not v else v
            if m and ot and v:
                #            if m and ot:
                break
        except Exception, e:
            print '[FATAL ERROR]', e
            exit(-1)
    def __init__(self):
        self.distance_x = 0
        self.distance_y = 0
        self.alpha = 0
        self.beta = 0
        self.center = 0
        self.markerFound = False
        self.coordenadas = np.array([[0, 30, 0], [0, 0, 0], [21, 0, 0],
                                     [21, 30, 0]])
        self.worldTranslation = [0, 0, 0]

        self.template = cv2.imread('data/board.jpg', 0)
        self.finder = ArucoFinder()
        self.orientation = Orientation()

        with np.load('camera_array_fisheye.npz') as X:
            self.mtx, self.dist, _, _ = [
                X[i] for i in ('mtx', 'dist', 'rvecs', 'tvecs')
            ]
Ejemplo n.º 7
0
    def update(self, car, time):
        ori = Orientation(car.physics.rotation)
        pos = Vec3(car.physics.location) + Vec3(z=12) - 30 * ori.forward
        if len(self.points) == 0:
            # Initial point
            point = TrailPoint(pos, time)
            self.points.append(point)
        else:
            # Add points
            prev = self.points[-1]
            diff = pos - prev.pos
            if diff.longer_than(self.segment_size):
                point = TrailPoint(pos, time)
                self.points.append(point)

        # Remove points
        earliest = self.points[0]
        if earliest.time + self.duration < time:
            self.points = self.points[1:]
        self.points = self.points[-MAX_TRAIL_LENGTH:]
Ejemplo n.º 8
0
    def generate_fractures(self, min_distance, min_radius, max_radius):
        fractures = []

        for i in range(self.frac_type.n_fractures):
            x = uniform(2 * min_distance, 1 - 2 * min_distance)
            y = uniform(2 * min_distance, 1 - 2 * min_distance)
            z = uniform(2 * min_distance, 1 - 2 * min_distance)

            tpl = TPL(self.frac_type.kappa, self.frac_type.r_min,
                      self.frac_type.r_max, self.frac_type.r_0)
            r = tpl.rnd_number()

            orient = Orientation(self.frac_type.trend, self.frac_type.plunge,
                                 self.frac_type.k)
            axis, angle = orient.compute_axis_angle()

            fd = FractureData(x, y, z, r, axis[0], axis[1], axis[2], angle,
                              i * 100)

            fractures.append(fd)

        return fractures
Ejemplo n.º 9
0
#!/usr/bin/python
# coding:UTF-8
# Author: David
# Email: [email protected]
# Created: 2016-04-08 01:27
# Last modified: 2016-04-08 06:05
# Filename: find_relative_ypr.py
# Description:

from orientation import Orientation
import time

bias_set = False
ot = Orientation()

start = time.time()
try:
    while True:
        now = time.time()
        print '[%5.2f]' % (now - start),
        if now - start > 20 and not bias_set:
            ot.bias[0] = ot.get_base_ypr()[0]
            ot.bias[1] = ot.get_ypr()[0]
            bias_set = True
        print ot.get_base_ypr(), ot.get_ypr(), ot.get_orientation(False)
except KeyboardInterrupt:
    ot.exit()
finally:
    ot.exit()
Ejemplo n.º 10
0
import threading
from ultrasonic import Ultrasonic
from orientation import Orientation
from driver import Driver
from visual import Visual
from stream import Stream
import logging

ROBOT_NAME = 'Robot'
logging.basicConfig(level=logging.INFO, format='%(levelname)s | %(threadName)s | %(message)s')

if __name__ == "__main__":
    ultrasonic = Ultrasonic()
    orientation = Orientation()
    driver = Driver()
    visual = Visual()
    streaming = Stream()
    ultrasonic_thread = threading.Thread(name='Ultrasonic', target=ultrasonic.start, daemon=False)
    orientation_thread = threading.Thread(name='Orientation', target=orientation.start, daemon=False)
    driver_thread = threading.Thread(name='Driver', target=driver.start, daemon=False)
    streaming_thread = threading.Thread(name='Streaming', target=streaming.start, daemon=False)
    visual_thread = threading.Thread(name='Visual', target=visual.start, daemon=False)
    ultrasonic_thread.start()
    orientation_thread.start()
    streaming_thread.start()
    visual_thread.start()
    driver_thread.start()
Ejemplo n.º 11
0
    print("Exiting")
    exit()


# Attach the Ctrl+C signal interrupt
signal.signal(signal.SIGINT, ctrlC)

# Setup encoder
encoder = Encoder()
encoder.initEncoders()

# Setup motor control
motorControl = MotorControl(encoder)

orientation = Orientation(encoder, motorControl)

tof = TOF()

pid = PID(0.5, -6, 6)

try:
    with open('calibratedSpeeds.json') as json_file:
        motorControl.speedMap = json.load(json_file)

except IOError:
    input("You must calibrate speeds first. Press enter to continue")
    motorControl.calibrateSpeeds()

while True:
    print("\n(1) Calibrate speeds")
Ejemplo n.º 12
0
    def do_collisions(self, script, packet):
        ball_pos = Vec3(packet.game_ball.physics.location)
        for i in range(len(self.points) - 2):
            seg_start = self.points[i].pos
            seg_end = self.points[i + 1].pos
            seg = seg_end - seg_start

            # Ball
            if not IGNORE_BALL_COLLISION:
                ball_pos_from_seg_pov = ball_pos - seg_start
                t = (ball_pos_from_seg_pov.dot(seg) / seg.dot(seg))
                ball_proj_seg = seg * t
                seg_ball = (ball_pos_from_seg_pov - ball_proj_seg)
                if 0 <= t <= 1 and not seg_ball.longer_than(100):
                    # Collision
                    seg_ball_u = seg_ball.unit()
                    vel = Vec3(packet.game_ball.physics.velocity)
                    refl_vel = vel - 1.9 * vel.dot(seg_ball_u) * seg_ball_u
                    ball_pos_moved = seg_start + ball_proj_seg + seg_ball_u * 101
                    script.set_game_state(
                        GameState(ball=BallState(physics=Physics(
                            location=ball_pos_moved.to_desired_vec(),
                            velocity=refl_vel.to_desired_vec()))))
                    script.particle_burst(
                        packet.game_info.seconds_elapsed,
                        seg_start + ball_proj_seg + seg_ball_u * 10,
                        seg_ball_u, int(1 + abs(vel.dot(seg_ball_u) / 300)**3),
                        self.team)
                    hit_strength = abs(seg_ball_u.dot(vel))
                    script.sounds.ball_hit(hit_strength)

            # Cars
            for car_index in range(packet.num_cars):
                car = packet.game_cars[car_index]
                if car.is_demolished \
                        or (car.is_bot and IGNORE_BOT_COLLISION) \
                        or (not car.is_bot and IGNORE_HUMAN_COLLISION):
                    continue
                car_ori = Orientation(car.physics.rotation)
                car_pos = Vec3(car.physics.location)
                car_pos_from_seg_pov = car_pos - seg_start
                t = (car_pos_from_seg_pov.dot(seg) / seg.dot(seg))
                car_proj_seg = seg * t
                seg_car = (car_pos_from_seg_pov - car_proj_seg)
                # seg_car_local = relative_location(Vec3(), car_ori, seg_car)
                if 0 <= t <= 1 and not seg_car.longer_than(85):
                    # Collision
                    seg_car_u = seg_car.unit()
                    vel = Vec3(car.physics.velocity)
                    refl_vel = vel - 1.5 * vel.dot(seg_car_u) * seg_car_u
                    car_pos_moved = seg_start + car_proj_seg + seg_car_u * 86
                    script.set_game_state(
                        GameState(
                            cars={
                                car_index:
                                CarState(physics=Physics(
                                    location=car_pos_moved.to_desired_vec(),
                                    velocity=refl_vel.to_desired_vec()))
                            }))
                    script.particle_burst(
                        packet.game_info.seconds_elapsed,
                        seg_start + car_proj_seg + seg_car_u * 13, seg_car_u,
                        int(1 + abs(vel.dot(seg_car_u) / 300)**3), self.team)
                    hit_strength = abs(seg_car_u.dot(vel))
                    script.sounds.car_hit(hit_strength)