def __init__(self):
        # Get the vehicle name, which comes in as HOSTNAME
        self.vehicle = os.getenv('HOSTNAME')

        # Get the hostname to conect to server
        host = os.getenv("DUCKIETOWN_SERVER", "localhost")
        
        # Create ZMQ connection with Server
        self.sim = RemoteRobot(host, silent=False)
        
        # Subscribes to the output of the lane_controller_node and IK node
        self.action_sub = rospy.Subscriber('/{}/lane_controller_node/car_cmd'.format(
            self.vehicle), Twist2DStamped, self._action_cb)
        self.ik_action_sub = rospy.Subscriber('/{}/wheels_driver_node/wheels_cmd'.format(
            self.vehicle), WheelsCmdStamped, self._ik_action_cb)
        
        # Place holder for the action
        self.action = np.array([0, 0])

        # Publishes onto the corrected image topic 
        # since image out of simulator is currently rectified
        self.cam_pub = rospy.Publisher('/{}/corrected_image/compressed'.format(
            self.vehicle), CompressedImage, queue_size=10)
        
        # Publisher for camera info - needed for the ground_projection
        self.cam_info_pub = rospy.Publisher('/{}/camera_node/camera_info'.format(
            self.vehicle), CameraInfo, queue_size=1)

        # Initializes the node
        rospy.init_node('ROSAgent')

        # 10Hz ROS Cycle - TODO: What is this number?
        self.r = rospy.Rate(10)
    def __init__(
            self,
            host,
            gain=-1.0,
            trim=0.0,
            radius=0.0318,
            k=27.0,
            limit=1.0
    ):
        RemoteRobot.__init__(self, host=host, silent=True)

        self.action_space = spaces.Box(
            low=np.array([-1, -1]),
            high=np.array([1, 1]),
            dtype=np.float32
        )

        # Should be adjusted so that the effective speed of the robot is 0.2 m/s
        self.gain = gain

        # Directional trim adjustment
        self.trim = trim

        # Wheel radius
        self.radius = radius

        # Motor constant
        self.k = k

        # Wheel velocity limit
        self.limit = limit
    def __init__(self):
        print("Hello!")
        host = os.getenv("DUCKIETOWN_SERVER", "localhost")
        # Create ZMQ connection
        self.sim = RemoteRobot(host, silent=False)
        self.rosmaster = os.getenv('HOSTNAME')
        self.action_sub = rospy.Subscriber(
            '/{}/lane_controller_node/car_cmd'.format(self.rosmaster),
            Twist2DStamped, self._action_cb)

        self.cam_pub = rospy.Publisher('/{}/corrected_image/compressed'.format(
            self.rosmaster),
                                       CompressedImage,
                                       queue_size=10)
        self.action = np.array([0, 0])

        self.cam_info_pub = rospy.Publisher(
            '/{}/camera_node/camera_info'.format(self.rosmaster),
            CameraInfo,
            queue_size=1)

        rospy.init_node('RemoteRobotRos')
        rospy.set_param('{}/verbose'.format(os.getenv('HOSTNAME')), "true")

        self.r = rospy.Rate(10)
Exemplo n.º 4
0
    def __init__(self, silent=False):
        # Produce the occasional print
        self.silent = silent

        # In the docker container this will be set to point to the
        # hostname of the `gym-duckietown-server` container, but in
        # the local test environment this will just map to localhost
        host = os.getenv("DUCKIETOWN_SERVER", "localhost")

        self.action_sub = rospy.Subscriber('/cmd', String, self._action_cb)
        self.reset_sub = rospy.Subscriber('/2', Bool, self._reset_cb)

        self.cam_pub = rospy.Publisher('/img/compressed',
                                       CompressedImage,
                                       queue_size=10)
        self.action_debug_pub = rospy.Publisher('/1', String, queue_size=10)
        self.action = [0., 0.]

        # Create ZMQ connection
        self.sim = RemoteRobot(host, silent=self.silent)

        # Tuple of velocity and steering angle, each in the range
        # [-1, 1] for full speed ahead, full speed backward, full
        # left turn, and full right turn respectively
        self.action_space = spaces.Box(low=-1,
                                       high=1,
                                       shape=(2, ),
                                       dtype=np.float32)

        # We observe an RGB image with pixels in [0, 255]
        # Note: the pixels are in uint8 format because this is more compact
        # than float32 if sent over the network or stored in a dataset
        self.observation_space = spaces.Box(low=0,
                                            high=255,
                                            shape=(CAMERA_HEIGHT, CAMERA_WIDTH,
                                                   3),
                                            dtype=np.uint8)

        # Create a black image buffer for the last observation
        self.last_obs = np.zeros((CAMERA_HEIGHT, CAMERA_WIDTH, 3), np.uint8)

        self.reward_range = (-1000, 1000)

        self._windows_exists = False

        # Initialize the state
        self.seed()
        self.reset(
        )  # FIXME: I'm quite sure this has to be called by the agent by gym convention

        rospy.init_node('RemoteRobotRos')
    def step(self, action, with_observation=True):
        vel, angle = action

        # Distance between the wheels
        baseline = 0.13

        # assuming same motor constants k for both motors
        k_r = self.k
        k_l = self.k

        # adjusting k by gain and trim
        k_r_inv = (self.gain + self.trim) / k_r
        k_l_inv = (self.gain - self.trim) / k_l

        omega_r = (vel + 0.5 * angle * baseline) / self.radius
        omega_l = (vel - 0.5 * angle * baseline) / self.radius

        # conversion from motor rotation rate to duty cycle
        u_r = omega_r * k_r_inv
        u_l = omega_l * k_l_inv

        # limiting output to limit, which is 1.0 for the duckiebot
        u_r_limited = max(min(u_r, self.limit), -self.limit)
        u_l_limited = max(min(u_l, self.limit), -self.limit)

        vels = np.array([u_l_limited, u_r_limited])

        return RemoteRobot.step(self, vels)
    def __init__(self, debug=False):
        # Produce graphical output
        self.debug = debug  # CURRENTLY UNUSED

        # In the docker container this will be set to point to the
        # hostname of the `gym-duckietown-server` container, but in
        # the local test environment this will just map to localhost
        host = os.getenv("DUCKIETOWN_SERVER", "localhost")

        # Create ZMQ connection
        self.sim = RemoteRobot(host)

        # Tuple of velocity and steering angle, each in the range
        # [-1, 1] for full speed ahead, full speed backward, full
        # left turn, and full right turn respectively
        self.action_space = spaces.Box(
            low=-1,
            high=1,
            shape=(2,),
            dtype=np.float32
        )

        # We observe an RGB image with pixels in [0, 255]
        # Note: the pixels are in uint8 format because this is more compact
        # than float32 if sent over the network or stored in a dataset
        self.observation_space = spaces.Box(
            low=0,
            high=255,
            shape=(CAMERA_HEIGHT, CAMERA_WIDTH, 3),
            dtype=np.uint8
        )

        # Create a black image buffer for the last observation
        self.last_obs = np.zeros((CAMERA_HEIGHT, CAMERA_WIDTH, 3), np.uint8)

        self.reward_range = (-1000, 1000)

        self._windows_exists = False

        self.sim_ready = False

        # Initialize the state
        self.seed()
        self.reset()  # FIXME: I'm quite sure this has to be called by the agent by gym convention
class ROSBridge(object):
    def __init__(self):
        print("Hello!")
        host = os.getenv("DUCKIETOWN_SERVER", "localhost")
        # Create ZMQ connection
        self.sim = RemoteRobot(host, silent=False)
        self.rosmaster = os.getenv('HOSTNAME')
        self.action_sub = rospy.Subscriber(
            '/{}/lane_controller_node/car_cmd'.format(self.rosmaster),
            Twist2DStamped, self._action_cb)

        self.cam_pub = rospy.Publisher('/{}/corrected_image/compressed'.format(
            self.rosmaster),
                                       CompressedImage,
                                       queue_size=10)
        self.action = np.array([0, 0])

        self.cam_info_pub = rospy.Publisher(
            '/{}/camera_node/camera_info'.format(self.rosmaster),
            CameraInfo,
            queue_size=1)

        rospy.init_node('RemoteRobotRos')
        rospy.set_param('{}/verbose'.format(os.getenv('HOSTNAME')), "true")

        self.r = rospy.Rate(10)

    def _action_cb(self, msg):
        v = msg.v
        omega = msg.omega
        self.action = np.array([v, omega])

    def _publish_info(self):
        self.cam_info_pub.publish(CameraInfo())

    def _publish_img(self, obs):
        img_msg = CompressedImage()

        time = rospy.get_rostime()
        img_msg.header.stamp.secs = time.secs
        img_msg.header.stamp.nsecs = time.nsecs

        img_msg.format = "jpeg"
        contig = cv2.cvtColor(np.ascontiguousarray(obs), cv2.COLOR_BGR2RGB)
        img_msg.data = np.array(cv2.imencode('.jpg', contig)[1]).tostring()

        self.cam_pub.publish(img_msg)

    def spin(self):
        while not rospy.is_shutdown():
            img, r, d, _ = self.sim.step(self.action)
            self._publish_img(img)
            self._publish_info()
            self.r.sleep()
Exemplo n.º 8
0
class SimpleSimAgentEnv(gym.Env):
    """
    Remote client for the simple Duckietown road simulator.
    Draws a road with turns using OpenGL, and simulates
    basic differential-drive dynamics.
    """

    metadata = {
        'render.modes': ['human', 'rgb_array'],
        'video.frames_per_second': 30  # TODO: do we need this on the client?
    }

    def __init__(self, silent=False):
        # Produce the occasional print
        self.silent = silent

        # In the docker container this will be set to point to the
        # hostname of the `gym-duckietown-server` container, but in
        # the local test environment this will just map to localhost
        host = os.getenv("DUCKIETOWN_SERVER", "localhost")

        # Create ZMQ connection
        self.sim = RemoteRobot(host, silent=self.silent)

        # Tuple of velocity and steering angle, each in the range
        # [-1, 1] for full speed ahead, full speed backward, full
        # left turn, and full right turn respectively
        self.action_space = spaces.Box(
            low=-1,
            high=1,
            shape=(2,),
            dtype=np.float32
        )

        # We observe an RGB image with pixels in [0, 255]
        # Note: the pixels are in uint8 format because this is more compact
        # than float32 if sent over the network or stored in a dataset
        self.observation_space = spaces.Box(
            low=0,
            high=255,
            shape=(CAMERA_HEIGHT, CAMERA_WIDTH, 3),
            dtype=np.uint8
        )

        # Create a black image buffer for the last observation
        self.last_obs = np.zeros((CAMERA_HEIGHT, CAMERA_WIDTH, 3), np.uint8)

        self.reward_range = (-1000, 1000)

        self._windows_exists = False

        # Initialize the state
        self.seed()
        #self.reset()  # FIXME: I'm quite sure this has to be called by the agent by gym convention

    def reset(self):
        """
        Reset the simulation at the start of a new episode
        This also randomizes many environment parameters (domain randomization)
        """

        return self.sim.reset()

    def close(self):
        """
        Doesn't do anything,
        but should be used to end the simulation by gym convention
        """

        pass

    def seed(self, seed=None):
        # TODO: for now this function doesn't do anything.
        # The seed is on the side of the server. Therefore we
        # must transmit the seed to the server. But what if the
        # server is already running and seeded?

        # self.np_random, _ = seeding.np_random(seed)
        return [seed]

    def step(self, action):
        """ Steps the simulation. Run action and get observation and reward.

        :param action: tuple|list|ndarray of exactly 2 floating point values
        each in range [-1,1] indicating the forward/backward speed and the
        steering angle respectively.

        :return: tuple of observation (image as ndarray), reward (float
        scalar), done (bool), misc (empty dict)
        """
        assert len(action) == 2
        action = np.array(action)


        obs, rew, done, misc = self.sim.step(action, with_observation=True)

        return obs, rew, done, misc

    def _create_window(self):
        """ Create a new matplotlib window if none exists to render
        observations.

        :return:
        """


        ## This following bit is important because duckietown_slimremote
        ## uses the Agg backend natively to prevent errors on headless
        ## servers. So if we want to use matplotlib we have to switch
        ## the backend

        import matplotlib
        from matplotlib import pyplot as plt
        gui_env = ['TKAgg', 'GTKAgg', 'Qt4Agg', 'WXAgg']
        for gui in gui_env:
            try:
                plt.switch_backend(gui)
                break
            except:
                continue

        if not self.silent:
            print("Using Matplotlib backend:", matplotlib.get_backend())

        ## actually create the render window
        plt.ion()
        img = np.zeros((CAMERA_HEIGHT, CAMERA_WIDTH, 3))
        self._plt_img = plt.imshow(img, interpolation='none', animated=True, label="Duckiebot Camera")
        self._plt_ax = plt.gca()

    def _draw_window(self, obs):
        """ Update the matplotlib observation window.

        :param obs:
        :return:
        """
        if obs is not None:
            self._plt_img.set_data(obs)
            self._plt_ax.plot([0])
            plt.pause(0.001)  # I found this necessary - otherwise no visible img

    def render(self, mode='human', close=False):
        """ Either create a matplotlib window of the last observation
        or return the last observation as ndarray.

        :param mode: string, possible values are "human" for rendering the
        observations in a matplotlib window and "rgb_array" to return the
        last observation as ndarray.
        :param close:
        :return:
        """
        obs, _, _, _ = self.sim.observe()
        if mode == "rgb_array":
            return obs
        else:
            if not self._windows_exists:
                self._create_window()
                self._windows_exists = True
            self._draw_window(obs)
 def render_obs(self):
     return RemoteRobot.observe(self)[0]
class ROSAgent(object):
    def __init__(self):
        # Get the vehicle name, which comes in as HOSTNAME
        self.vehicle = os.getenv('HOSTNAME')

        # Get the hostname to conect to server
        host = os.getenv("DUCKIETOWN_SERVER", "localhost")
        
        # Create ZMQ connection with Server
        self.sim = RemoteRobot(host, silent=False)
        
        # Subscribes to the output of the lane_controller_node and IK node
        self.action_sub = rospy.Subscriber('/{}/lane_controller_node/car_cmd'.format(
            self.vehicle), Twist2DStamped, self._action_cb)
        self.ik_action_sub = rospy.Subscriber('/{}/wheels_driver_node/wheels_cmd'.format(
            self.vehicle), WheelsCmdStamped, self._ik_action_cb)
        
        # Place holder for the action
        self.action = np.array([0, 0])

        # Publishes onto the corrected image topic 
        # since image out of simulator is currently rectified
        self.cam_pub = rospy.Publisher('/{}/corrected_image/compressed'.format(
            self.vehicle), CompressedImage, queue_size=10)
        
        # Publisher for camera info - needed for the ground_projection
        self.cam_info_pub = rospy.Publisher('/{}/camera_node/camera_info'.format(
            self.vehicle), CameraInfo, queue_size=1)

        # Initializes the node
        rospy.init_node('ROSAgent')

        # 10Hz ROS Cycle - TODO: What is this number?
        self.r = rospy.Rate(10)

    def _action_cb(self, msg):
        """
        Callback to listen to last outputted action from lane_controller_node
        Stores it and sustains same action until new message published on topic
        """
        v = msg.v
        omega = msg.omega
        

    def _ik_action_cb(self, msg):
        """
        Callback to listen to last outputted action from lane_controller_node
        Stores it and sustains same action until new message published on topic
        """
        vl = msg.vel_left
        vr = msg.vel_right
        self.action = np.array([vl, vr])
    
    def _publish_info(self):
        """
        Publishes a default CameraInfo - TODO: Fix after distortion applied in simulator
        """
        self.cam_info_pub.publish(CameraInfo())      

    def _publish_img(self, obs):
        """
        Publishes the image to the compressed_image topic, which triggers the lane following loop
        """
        img_msg = CompressedImage()

        time = rospy.get_rostime()
        img_msg.header.stamp.secs = time.secs
        img_msg.header.stamp.nsecs = time.nsecs

        img_msg.format = "jpeg"
        contig = cv2.cvtColor(np.ascontiguousarray(obs), cv2.COLOR_BGR2RGB)
        img_msg.data = np.array(cv2.imencode('.jpg', contig)[1]).tostring()
  
        self.cam_pub.publish(img_msg)    

    def spin(self):
        """
        Main loop
        Steps the sim with the last action at rate of 10Hz
        """
        while not rospy.is_shutdown():
            img, r , d, _ = self.sim.step(self.action)
            self._publish_img(img)
            self._publish_info()
            self.r.sleep()
Exemplo n.º 11
0
from duckietown_slimremote.pc.robot import RemoteRobot

robot = RemoteRobot(
    "10.204.6.223")  # currently in the lab, can't use zeroconf/avahi

# for i in range(5):
#     robot.step(action=[0, 0, 1, 0, 0])
#     time.sleep(0.5)
#     robot.step(action=[0, 0, 0, 1, 0])
#     time.sleep(0.5)
#     robot.step(action=[0, 0, 0, 0, 1])
#     time.sleep(0.5)

robot.step([0] * 5)