Exemple #1
0
async def main_loop():
    manager = await pygazebo.connect(('localhost', 11345))
    publisher = await manager.advertise('/gazebo/default/test',
                                        'gazebo.msgs.GzString')

    message = gz_string_pb2.GzString(data='test!')
    while True:
        await publisher.publish(message)
        await asyncio.sleep(1)
    print('disconnected!')
Exemple #2
0
    async def create_snapshot(self, pause_when_saving=True):
        """
        Creates a snapshot of the world in the output directory.
        This pauses the world.
        :return: the folder of the snapshot
        """
        if not self.output_directory:
            logger.warning("No output directory - no snapshot will be created.")
            return None

        # Pause the world
        if pause_when_saving:
            await self.pause(True)

        # Obtain a copy of the current world SDF from Gazebo and write it to
        # file
        response = await self.request_handler.do_gazebo_request(
            request="world_sdf"
        )
        if response.response == "error":
            logger.warning("WARNING: requesting world state resulted in "
                           "error. Snapshot failed.")
            await self.pause(False)
            return None

        try:
            snapshot_folder = os.path.join(self.output_directory, str(self.last_time))
            os.makedirs(snapshot_folder)

            msg = gz_string_pb2.GzString()
            msg.ParseFromString(response.serialized_data)
            with open(os.path.join(snapshot_folder, 'snapshot.sdf'), 'wb') as f:
                f.write(msg.data.encode())

            # Get the snapshot data and pickle to file
            data = self.get_snapshot_data()

            # It seems pickling causes some issues with the default recursion
            # limit, up it
            sys.setrecursionlimit(10000)
            with open(os.path.join(snapshot_folder, 'snapshot.pickle'), 'wb') as f:
                pickle.dump(data, f, protocol=-1)

            # # WHAT IS THIS?
            # # Flush statistic files and copy them
            # self.poses_file.flush()
            # self.robots_file.flush()
            # shutil.copy(self.poses_filename, self.poses_filename+'.snapshot')
            # shutil.copy(self.robots_filename, self.robots_filename+'.snapshot')
        finally:
            if pause_when_saving:
                await self.pause(False)

        return snapshot_folder
Exemple #3
0
    def test_send(self, manager):
        loop = asyncio.get_event_loop()

        read_future = asyncio.Future()
        manager.server.read_packet(lambda data: read_future.set_result(data))
        publisher_future = manager.manager.advertise('mytopic2', 'msgtype')
        publisher = loop.run_until_complete(publisher_future)

        # Now pretend we are a remote host who wants to subscribe to
        # this topic.
        pipe = Pipe()
        manager.serve_future.set_result((pipe.endpointa, None))

        loop.run_until_complete(read_future)

        subscribe = subscribe_pb2.Subscribe()
        subscribe.topic = 'mytopic2'
        subscribe.msg_type = 'msgtype'
        subscribe.host = 'localhost'
        subscribe.port = 54321

        write_future = asyncio.Future()
        pipe.endpointb.write_packet('sub', subscribe,
                                    lambda: write_future.set_result(None))
        loop.run_until_complete(publisher.wait_for_listener())

        read_data1 = asyncio.Future()
        # At this point, anything we "publish" should end up being
        # written to this pipe.
        pipe.endpointb.read_frame(lambda data: read_data1.set_result(data))

        sample_message = gz_string_pb2.GzString()
        sample_message.data = 'testdata'
        publish_future = publisher.publish(sample_message)

        loop.run_until_complete(read_data1)
        data_frame = read_data1.result()
        assert data_frame == sample_message.SerializeToString()

        assert loop.run_until_complete(publish_future) is None

        # Test sending a very large message, it should require no
        # individual writes which are too large.
        read_data2 = asyncio.Future()
        pipe.endpointb.read_frame(lambda data: read_data2.set_result(data))
        sample_message.data = ' ' * 20000
        publisher.publish(sample_message)

        loop.run_until_complete(read_data2)
        data_frame = read_data2.result()
        assert data_frame == sample_message.SerializeToString()
Exemple #4
0
    def init_sequence(self):
        self.write_packet('version_init',
                          gz_string_pb2.GzString(data='gazebo 2.2 simversion'))

        self.write_packet('topic_namepaces_init',
                          gz_string_v_pb2.GzString_V(data=['a', 'b']))

        self.write_packet(
            'publishers_init',
            publishers_pb2.Publishers(publisher=[
                publish_pb2.Publish(topic='inittopic1',
                                    msg_type='msgs.Fake',
                                    host='myhost',
                                    port=1234),
            ]))
Exemple #5
0
    async def create_snapshot(self):
        """
        Creates a snapshot of the world in the output directory.
        This pauses the world.
        :return:
        """
        if not self.output_directory:
            logger.warning(
                "No output directory - no snapshot will be created.")
            return False

        # Pause the world
        await (self.pause())

        # Obtain a copy of the current world SDF from Gazebo and write it to
        # file
        response = await (self.request_handler.do_gazebo_request(
            request="world_sdf"))
        if response.response == "error":
            logger.warning("WARNING: requesting world state resulted in "
                           "error. Snapshot failed.")
            return False

        msg = gz_string_pb2.GzString()
        msg.ParseFromString(response.serialized_data)
        with open(self.world_snapshot_filename, 'wb') as f:
            f.write(msg.data)

        # Get the snapshot data and pickle to file
        data = await (self.get_snapshot_data())

        # It seems pickling causes some issues with the default recursion
        # limit, up it
        sys.setrecursionlimit(10000)
        with open(self.snapshot_filename, 'wb') as f:
            pickle.dump(data, f, protocol=-1)

        # Flush statistic files and copy them
        self.poses_file.flush()
        self.robots_file.flush()
        shutil.copy(self.poses_filename, self.poses_filename + '.snapshot')
        shutil.copy(self.robots_filename, self.robots_filename + '.snapshot')
        return True
Exemple #6
0
    def test_send(self, manager):
        eventlet.spawn(manager.server.read_packet)
        publisher = manager.manager.advertise('mytopic2', 'msgtype')

        # Now pretend we are a remote host who wants to subscribe to
        # this topic.
        pipe = Pipe()
        eventlet.spawn_n(manager.serve_handle, pipe.endpointa, None)

        subscribe = subscribe_pb2.Subscribe()
        subscribe.topic = 'mytopic2'
        subscribe.msg_type = 'msgtype'
        subscribe.host = 'localhost'
        subscribe.port = 54321

        pipe.endpointb.write_packet('sub', subscribe)

        # At this point, anything we "publish" should end up being
        # written to this pipe.
        read_data1 = eventlet.spawn(pipe.endpointb.read_frame)

        sample_message = gz_string_pb2.GzString()
        sample_message.data = 'testdata'
        publisher.publish(sample_message)

        data_frame = read_data1.wait()
        assert data_frame == sample_message.SerializeToString()

        # Test sending a very large message, it should require no
        # individual writes which are too large.
        read_data2 = eventlet.spawn(pipe.endpointb.read_frame)
        sample_message.data = ' ' * 100000
        publisher.publish(sample_message)

        data_frame = read_data2.wait()
        assert data_frame == sample_message.SerializeToString()
Exemple #7
0
 def init_sequence(self, callback):
     self.write_packet('version_init',
                       gz_string_pb2.GzString(data='gazebo 2.2 simversion'),
                       lambda: self._init_sequence1(callback))