Ejemplo n.º 1
0
    def __init__(self, boxes, error):
        self.boxes = boxes
        self.scale = 10  # scales the generated field to create more detail
        self.robit = RobotModel("type", error, 13 * self.scale,
                                15 * self.scale)
        self.window = pyglet.window.Window(width=96 * self.scale,
                                           height=72 * self.scale)
        self.PsuedoRayCast = PsuedoRayCast(self.window, self.boxes, self.scale)
        self.Θ1 = atan(self.robit.length / self.robit.height)
        self.d = sqrt((self.robit.length / 2)**2 + (self.robit.height / 2)**2)

        # this is just to calculate
        self.sensors = [
            (sqrt((self.robit.length / 2)**2 + (1 * self.robit.height / 4)**2),
             atan((2 * self.robit.length) / (self.robit.height)), PI / 2,
             "Right Top"),  # right top
            (sqrt((self.robit.length / 2)**2 + (1 * self.robit.height / 4)**2),
             -atan((2 * self.robit.length) / (self.robit.height)), -PI / 2,
             "Left Top"),  # Left top
            (sqrt((self.robit.length / 2)**2 + (1 * self.robit.height / 4)**2),
             PI + atan((2 * self.robit.length) / (self.robit.height)), -PI / 2,
             "Left bottom"),  # Left bottom
            (sqrt((self.robit.length / 2)**2 + (1 * self.robit.height / 4)**2),
             -PI + -atan((2 * self.robit.length) / (self.robit.height)),
             PI / 2, "Right Bottom"),  # right bottom
            (sqrt((self.robit.length / 3)**2 + (1 * self.robit.height / 2)**2),
             atan((self.robit.length / 3) / (self.robit.height / 2)), 0,
             "Front Right"),
            (sqrt((self.robit.length / 3)**2 + (1 * self.robit.height / 2)**2),
             -atan((self.robit.length / 3) / (self.robit.height / 2)), 0,
             "Front Left"),
        ]
        self.setXYAbsolute(0, 462)
        self.setΘAbsolute(PI / 2)
Ejemplo n.º 2
0
 def __init__(self, dtype, robot_path, joint_coord, joint_states):
     """
     Default constructor. Constructs batched, loadable train and test tensors.
     :param dtype: Specifies if real or synthetic data is to be used
     :type dtype: string
     :param robot_path: file path to robot URDF
     :type robot_path: string
     :param joint_coord: file path to joint coordinates
     :type joint_coord: string
     :param joint_states: file path to joint angles
     :type joint_states: string
     """
     self.robot = RobotModel(robot_path)
     self.limits = self.robot.phys_limits()
     self.labels = self.limits.index
     if dtype == 'synth':
         self._files = self._parse_synth(joint_coord)
         self._synth_input(joint_coord, self._files)
         self._synth_output(joint_states, self._files)
     else:
         input_ = np.load(joint_coord)
         self.input_ = F.normalize(
             torch.tensor(input_.reshape((input_.shape[0], -1)),
                          dtype=torch.float32))
         self.output = F.normalize(
             torch.tensor(self._parse_json(joint_states),
                          dtype=torch.float32))
Ejemplo n.º 3
0
 def __init__(self, robot_model: RobotModel, address: str = "127.0.0.1"):
     """
     Create a HTTP robot controller.
     :param robot_model: model of the robot (see RobotModel enum for reference).
     :param address: controller address, which can be an URL or an IP address. Defaults to "127.0.0.1" (localhost).
     """
     self._movement_url = "http://" + address + ":" + str(robot_model.get_movement_controller_port())
     try:
         self._gripper_url = "http://" + address + ":" + str(robot_model.get_gripper_controller_port())
     except ValueError:
         # No gripper available in this robot
         self._gripper_url = None
     self._camera_url = "http://" + address + ":" + str(robot_model.get_camera_port())
Ejemplo n.º 4
0
 def _start_streaming(self):
     # Start streaming task
     value = self.combobox.get()
     robot_model = RobotModel(value)
     if self.pipeline:
         self.pipeline.stop()
     self.pipeline = ImagePipeline(address=self.ip_field.get(),
                                   robot_model=robot_model,
                                   adq_rate=Application.fps)
     self.pipeline.start()
     self.task_id = self.image_canvas.after(ms=0, func=self._load_image)
Ejemplo n.º 5
0
def simulated_control(control, plot):
    from RobotModel import RobotModel
    model = RobotModel(State(-10, -2, 0, 0))
    dt = 1
    t = 0.0
    state = model.get_state()
    plot.update(state)
    while not control.end(t, state):
        t += dt
        state = model.update(dt)
        plot.update(state)
        speed, omega = control.update(t, state)
        if speed is not None and omega is not None:
            model.set_speed_omega(speed, omega)
Ejemplo n.º 6
0
class Visualizer():
    def __init__(self, boxes, error):
        self.boxes = boxes
        self.scale = 10  # scales the generated field to create more detail
        self.robit = RobotModel("type", error, 13 * self.scale,
                                15 * self.scale)
        self.window = pyglet.window.Window(width=96 * self.scale,
                                           height=72 * self.scale)
        self.PsuedoRayCast = PsuedoRayCast(self.window, self.boxes, self.scale)
        self.Θ1 = atan(self.robit.length / self.robit.height)
        self.d = sqrt((self.robit.length / 2)**2 + (self.robit.height / 2)**2)

        # this is just to calculate
        self.sensors = [
            (sqrt((self.robit.length / 2)**2 + (1 * self.robit.height / 4)**2),
             atan((2 * self.robit.length) / (self.robit.height)), PI / 2,
             "Right Top"),  # right top
            (sqrt((self.robit.length / 2)**2 + (1 * self.robit.height / 4)**2),
             -atan((2 * self.robit.length) / (self.robit.height)), -PI / 2,
             "Left Top"),  # Left top
            (sqrt((self.robit.length / 2)**2 + (1 * self.robit.height / 4)**2),
             PI + atan((2 * self.robit.length) / (self.robit.height)), -PI / 2,
             "Left bottom"),  # Left bottom
            (sqrt((self.robit.length / 2)**2 + (1 * self.robit.height / 4)**2),
             -PI + -atan((2 * self.robit.length) / (self.robit.height)),
             PI / 2, "Right Bottom"),  # right bottom
            (sqrt((self.robit.length / 3)**2 + (1 * self.robit.height / 2)**2),
             atan((self.robit.length / 3) / (self.robit.height / 2)), 0,
             "Front Right"),
            (sqrt((self.robit.length / 3)**2 + (1 * self.robit.height / 2)**2),
             -atan((self.robit.length / 3) / (self.robit.height / 2)), 0,
             "Front Left"),
        ]
        self.setXYAbsolute(0, 462)
        self.setΘAbsolute(PI / 2)

    """
  (g, h)     |Θ    (a,b)
    _________|________
    |        |      /|
    |        |     / |                  
    |      r ---> /  |
    |        |   /   |
    |        |Θ1/    |
    |        | /     |
    |        |/      |
    |      (x, y)    |
    |                |
    |                |
    |                |
    |                |
    |________________|
  (e, f)            (c, d)
    """

    def drawBoxAtAngle(
            self, x, y, r, Θ,
            Θ1):  # simply draws a box at an angle acording to above diagram
        global main_batch
        (a, b) = self.getPointOnBox(x, y, r, Θ, Θ1,
                                    1)  # use all corners to generate box
        (c, d) = self.getPointOnBox(x, y, r, Θ, Θ1, 2)
        (e, f) = self.getPointOnBox(x, y, r, Θ, Θ1, 3)
        (g, h) = self.getPointOnBox(x, y, r, Θ, Θ1, 4)
        main_batch.add(
            8,
            pyglet.gl.GL_LINES,
            None,
            (
                "v2f",
                (  # generate box
                    e,
                    f,
                    c,
                    d,  # e, f -> c, d
                    e,
                    f,
                    g,
                    h,  # e. f -> g, h
                    g,
                    h,
                    a,
                    b,  # g, h -> a, b
                    c,
                    d,
                    a,
                    b,  # c, d -> a, b
                )))

    """
    Relative x and y: where the box is centered
    d: distance to one corner
    Θ: the orientation of the box
    Θ1: the angle the crossline makes with the square's relative perpendicular y axis see above diagram
    quadrent: The quadrent
    """

    def getPointOnBox(self, x, y, d, Θ, Θ1,
                      quadrent):  # gets a corner of the box based on above
        if (quadrent == 1):
            return ((d * sin(Θ + Θ1) + x), (d * cos(Θ + Θ1) + y))
        if (quadrent == 2):
            return ((d * sin(Θ - Θ1 + PI) + x), (d * cos(Θ - Θ1 + PI) + y))
        if (quadrent == 3):
            return ((self.d * sin(Θ + Θ1 + PI) + x),
                    (d * cos(Θ + Θ1 + PI) + y))
        if (quadrent == 4):
            return ((self.d * sin(Θ - Θ1) + x), (self.d * cos(Θ - Θ1) + y))

    def drawSquare(self, a, b, c, d):
        global main_batch
        main_batch.add(8, pyglet.gl.GL_LINES, None, (
            'v2f',
            (a, b, c, b, a, b, a, d, c, b, c, d, a, d, c, d),
        ))

    def drawAllBoxes(self):
        if (not self.boxes):
            return
        print(len(self.boxes))
        for box in self.boxes:
            self.drawSquare(box[0][0] * self.scale, box[0][1] * self.scale,
                            box[1][0] * self.scale, box[1][1] * self.scale)

    def redraw(self):
        print("redraw")
        self.window.clear()
        global main_batch
        main_batch = pyglet.graphics.Batch()
        self.drawAllBoxes()
        self.drawBoxAtAngle(self.robit.x, self.robit.y, self.d,
                            self.robit.theta, self.Θ1)
        if (__name__ == '__main__'):
            self.runAllSensors()
        main_batch.draw()

    def getSensorData(self):
        data = []
        for sensor in self.sensors:
            (a, b) = self.getSenorPoint(sensor)
            distance = self.PsuedoRayCast.rayCast(a, b,
                                                  self.robit.theta + sensor[2])
            data.append((distance / self.scale, sensor[3]))
        return data

    def runAllSensors(self):
        for sensor in self.sensors:
            (a, b) = self.getSenorPoint(sensor)
            distance = self.PsuedoRayCast.rayCast(a, b,
                                                  self.robit.theta + sensor[2])
            if (distance):
                print(sensor[3] + " ", distance / self.scale, "in")
            else:
                print(sensor[3] + " ?in")

    def getSenorPoint(self, sensor):
        return (self.robit.x + sensor[0] * sin(sensor[1] + self.robit.theta),
                self.robit.y + sensor[0] * cos(sensor[1] + self.robit.theta))

    def setXYAbsolute(self, x, y):
        self.robit.x = x
        self.robit.y = y
        self.redraw()

    def setΘAbsolute(self, Θ):
        self.robit.theta = Θ
        self.redraw()

    def setXYRelative(self, x, y):
        self.robit.x = self.robit.x + x
        self.robit.y = self.robit.y + y
        self.redraw()

    def setΘReltive(self, Θ):
        self.robit.theta = self.robit.theta + Θ
        self.redraw()

    def moveForward(self, x):
        self.robit.moveDistance(x)
        self.redraw()

    def radToDeg(self, Θ):
        return (180 / PI) * Θ

    def degToRad(self, Θ):
        return (PI / 180) * Θ

    def getFrontSensorPoint(self):
        return (sin(self.robit.theta) * self.robit.height // 2 + self.robit.x,
                cos(self.robit.theta) * self.robit.height // 2 + self.robit.y)

    def getFrontSensorData(self):
        return self.getFrontSensorPoint()
Ejemplo n.º 7
0
import pygame, os, sys
from pygame.locals import *

from RobotModel import RobotModel
from RobotView import RobotView
from RadarView import RadarView
from RobotController import RobotController

pygame.init()
fpsClock = pygame.time.Clock()
surface = pygame.display.set_mode((640, 480))

model = RobotModel((640 - 32) / 2, (480 - 32) / 2, .2)
view = RobotView('robotframes.png')
radarView = RadarView('radar.png', 'blip.png')
controller = RobotController()

while True:
    for event in pygame.event.get():
        if event.type == QUIT:
            pygame.quit()
            sys.exit()

    controller.update(fpsClock.get_time() / 1000.0, model)

    surface.fill((0, 0, 0))
    radarView.draw(surface, model)
    view.draw(surface, model)

    pygame.display.update()
    fpsClock.tick(30)
Ejemplo n.º 8
0
def get_urdf(model=None):
    if type(model) is None:
        model = input('Enter URDF file name in data/urdf/: ')
    return RobotModel(model)
Ejemplo n.º 9
0
class KNData(Dataset):
    """
    Dataloader class for train/test data import. Extends torch Dataset class.
    :var self.input_: Loaded input torch tensor for joint coordinates
    :type self.input_: torch tensor - dims = [(batch) x (joint coordinates) x 3]
    :var self.output: Loaded output torch tensor for validation position
    :type self.output: torch tensor - dims = [(batch) x (joint positions)]
    """
    def __init__(self, dtype, robot_path, joint_coord, joint_states):
        """
        Default constructor. Constructs batched, loadable train and test tensors.
        :param dtype: Specifies if real or synthetic data is to be used
        :type dtype: string
        :param robot_path: file path to robot URDF
        :type robot_path: string
        :param joint_coord: file path to joint coordinates
        :type joint_coord: string
        :param joint_states: file path to joint angles
        :type joint_states: string
        """
        self.robot = RobotModel(robot_path)
        self.limits = self.robot.phys_limits()
        self.labels = self.limits.index
        if dtype == 'synth':
            self._files = self._parse_synth(joint_coord)
            self._synth_input(joint_coord, self._files)
            self._synth_output(joint_states, self._files)
        else:
            input_ = np.load(joint_coord)
            self.input_ = F.normalize(
                torch.tensor(input_.reshape((input_.shape[0], -1)),
                             dtype=torch.float32))
            self.output = F.normalize(
                torch.tensor(self._parse_json(joint_states),
                             dtype=torch.float32))

    @staticmethod
    def _parse_json(joint_states):
        """
        Reads JSON to extract joint angles from real data training set. Merges all json's at location into single
        array.
        :param joint_states: location to json directory
        :type joint_states: string
        :return: compiled array of joint angles
        :rtype: numpy array - dims = [(batch) x (angles - T)]
        """
        json_f = [J for J in os.listdir(joint_states) if J.endswith('.json')]
        dfs = []
        for j in json_f:
            js = pd.read_json(f'{joint_states}/{j}')
            dfs.append(
                np.array(
                    [A['angle'] for A in js[js.columns[0]][0]['joint_angles']],
                    dtype=float))
        return np.array(dfs)[:, :5]

    @staticmethod
    def _parse_synth(path):
        """
        Constructs list of all data files of synthetic data specified in path training directory file.
        :param path: Location of path.txt
        :type path: string
        :return: all file locations serialized in path.txt
        :rtype: list of strings
        """
        files, coords, angles = [], '', ''
        with open(f'{path}paths.txt') as f:
            while True:
                line = f.readline()
                if not line:
                    break
                else:
                    files.append(line.rstrip('\n'))
        return files

    def _synth_input(self, path, files):
        """
        Handles reading of synthetic input data. Normalizes dataframe to [-1,1]
        :param path: file path location to synthetic data
        :type path: string
        :param files: all serialized files in path
        :type files: list of string
        """
        features = np.empty((0, 15))
        for i in range(len(files)):
            train_set = np.load(f'{path}coords/{files[i]}.npy')
            train_set = train_set.reshape((train_set.shape[0], -1))
            features = np.concatenate((features, train_set), axis=0)
        self.input_ = F.normalize(
            torch.tensor(np.array(features), dtype=torch.float32))

    def _synth_output(self, path, files):
        """
        Handles reading of synthetic output data. Normalizes angles to fit in [-1,1]
        :param path: file location of output validation angles
        :type path: string
        :param files: serialized list of all files in path
        :type files: list of string
        """
        features = np.empty((0, 6))
        for i in range(len(files)):
            train_set = np.load(f'{path}angles/{files[i]}.npy')
            features = np.concatenate((features, train_set), axis=0)
        self.output = F.normalize(
            torch.tensor(np.array(features[:, :5]), dtype=torch.float32))

    def __len__(self):
        """
        Mandatory override function in superclass. Returns length of loaded dataset.
        :return: entry size of dataset
        :rtype: long
        """
        return len(self.output)

    def __getitem__(self, index):
        """
        Mandatory override function in superclass. Retrieves dataset entry from tensors.
        :param index: batch location identifier
        :type index: long
        :return: input, output tensor for given entry
        :rtype: tuple of two torch.tensors - dims = ([1 x (joints) x 3], [1 x (joints - T)])
        """
        return self.input_[index], self.output[index]