def setup(self): # Make a stream slow enough that we can test async interactions config = spead2.send.StreamConfig(rate=5e6) self.stream = UdpStream(spead2.ThreadPool(), 'localhost', 8888, config) self.ig = spead2.send.ItemGroup() self.ig.add_item(0x1000, 'test', 'Test item', shape=(256 * 1024,), dtype=np.uint8) self.ig['test'].value = np.zeros((256 * 1024,), np.uint8) self.heap = self.ig.get_heap()
def test_send_error(self): """An error in sending must be reported through the future.""" # Create a stream with a packet size that is bigger than the likely # MTU. It should cause an error. stream = UdpStream( spead2.ThreadPool(), "localhost", 8888, spead2.send.StreamConfig(max_packet_size=100000), buffer_size=0) future = stream.async_send_heap(self.heap) asyncio.get_event_loop().run_until_complete(self._test_send_error(future))
class TestUdpStream: def setup(self): # Make a stream slow enough that we can test async interactions config = spead2.send.StreamConfig(rate=5e6) self.stream = UdpStream(spead2.ThreadPool(), 'localhost', 8888, config) self.ig = spead2.send.ItemGroup() self.ig.add_item(0x1000, 'test', 'Test item', shape=(256 * 1024,), dtype=np.uint8) self.ig['test'].value = np.zeros((256 * 1024,), np.uint8) self.heap = self.ig.get_heap() async def _test_async_flush(self): assert_greater(self.stream._active, 0) await self.stream.async_flush() assert_equal(self.stream._active, 0) def test_async_flush(self): for i in range(3): asyncio.ensure_future(self.stream.async_send_heap(self.heap)) # The above only queues up the async sends on the event loop. The rest of the # test needs to be run from inside the event loop asyncio.get_event_loop().run_until_complete(self._test_async_flush()) def test_async_flush_fail(self): """Test async_flush in the case that the last heap sent failed. This is arranged by filling up the queue slots first. """ for i in range(5): asyncio.ensure_future(self.stream.async_send_heap(self.heap)) # The above only queues up the async sends on the event loop. The rest of the # test needs to be run from inside the event loop asyncio.get_event_loop().run_until_complete(self._test_async_flush()) async def _test_send_error(self, future): with assert_raises(IOError): await future def test_send_error(self): """An error in sending must be reported through the future.""" # Create a stream with a packet size that is bigger than the likely # MTU. It should cause an error. stream = UdpStream( spead2.ThreadPool(), "localhost", 8888, spead2.send.StreamConfig(max_packet_size=100000), buffer_size=0) future = stream.async_send_heap(self.heap) asyncio.get_event_loop().run_until_complete(self._test_send_error(future))
async def test_send_error(self): """An error in sending must be reported through the future.""" # Create a stream with a packet size that is bigger than the likely # MTU. It should cause an error. stream = UdpStream(spead2.ThreadPool(), [("localhost", 8888)], spead2.send.StreamConfig(max_packet_size=100000), buffer_size=0) with pytest.raises(IOError): await stream.async_send_heap(self.heap)
class TestUdpStream: def setup(self): # Make a stream slow enough that we can test async interactions config = spead2.send.StreamConfig(rate=5e6) self.stream = UdpStream(spead2.ThreadPool(), [('localhost', 8888)], config) self.ig = spead2.send.ItemGroup() self.ig.add_item(0x1000, 'test', 'Test item', shape=(256 * 1024, ), dtype=np.uint8) self.ig['test'].value = np.zeros((256 * 1024, ), np.uint8) self.heap = self.ig.get_heap() async def _test_async_flush(self): assert self.stream._active > 0 await self.stream.async_flush() assert self.stream._active == 0 async def test_async_flush(self): for i in range(3): asyncio.ensure_future(self.stream.async_send_heap(self.heap)) await self._test_async_flush() async def test_async_flush_fail(self): """Test async_flush in the case that the last heap sent failed. This is arranged by filling up the queue slots first. """ for i in range(5): asyncio.ensure_future(self.stream.async_send_heap(self.heap)) await self._test_async_flush() async def test_send_error(self): """An error in sending must be reported through the future.""" # Create a stream with a packet size that is bigger than the likely # MTU. It should cause an error. stream = UdpStream(spead2.ThreadPool(), [("localhost", 8888)], spead2.send.StreamConfig(max_packet_size=100000), buffer_size=0) with pytest.raises(IOError): await stream.async_send_heap(self.heap) async def test_async_send_heap_refcount(self): """async_send_heap must release the reference to the heap.""" weak = weakref.ref(self.heap) future = self.stream.async_send_heap(weak()) self.heap = None await future gc.collect() assert weak() is None async def test_async_send_heaps_refcount(self): """async_send_heaps must release the reference to the heap.""" weak = weakref.ref(self.heap) future = self.stream.async_send_heaps( [spead2.send.HeapReference(weak())], spead2.send.GroupMode.ROUND_ROBIN) self.heap = None await future gc.collect() assert weak() is None
class TestUdpStream: def setup(self): # Make a stream slow enough that we can test async interactions config = spead2.send.StreamConfig(rate=5e6) self.stream = UdpStream(spead2.ThreadPool(), [('localhost', 8888)], config) self.ig = spead2.send.ItemGroup() self.ig.add_item(0x1000, 'test', 'Test item', shape=(256 * 1024,), dtype=np.uint8) self.ig['test'].value = np.zeros((256 * 1024,), np.uint8) self.heap = self.ig.get_heap() async def _test_async_flush(self): assert self.stream._active > 0 await self.stream.async_flush() assert self.stream._active == 0 async def test_async_flush(self): for i in range(3): asyncio.ensure_future(self.stream.async_send_heap(self.heap)) await self._test_async_flush() async def test_async_flush_fail(self): """Test async_flush in the case that the last heap sent failed. This is arranged by filling up the queue slots first. """ for i in range(5): asyncio.ensure_future(self.stream.async_send_heap(self.heap)) await self._test_async_flush() async def test_send_error(self): """An error in sending must be reported through the future.""" # Create a stream with a packet size that is bigger than the likely # MTU. It should cause an error. stream = UdpStream( spead2.ThreadPool(), [("localhost", 8888)], spead2.send.StreamConfig(max_packet_size=100000), buffer_size=0) with pytest.raises(IOError): await stream.async_send_heap(self.heap) async def test_async_send_heap_refcount(self): """async_send_heap must release the reference to the heap.""" weak = weakref.ref(self.heap) future = self.stream.async_send_heap(weak()) self.heap = None await future gc.collect() assert weak() is None async def test_async_send_heaps_refcount(self): """async_send_heaps must release the reference to the heap.""" weak = weakref.ref(self.heap) future = self.stream.async_send_heaps([spead2.send.HeapReference(weak())], spead2.send.GroupMode.ROUND_ROBIN) self.heap = None await future for i in range(5): # Try extra hard to make PyPy release things gc.collect() assert weak() is None async def test_cancel(self, caplog): """Cancelling the future must work gracefully.""" with caplog.at_level(logging.ERROR): future = self.stream.async_send_heap(self.heap) future.cancel() with pytest.raises(asyncio.CancelledError): await future # Send another heap to ensure that process_callbacks has time to run. await self.stream.async_send_heap(self.heap) # An exception in process_callbacks doesn't propagate anywhere we can # easily access it, but it does cause the event loop to log an error. assert not caplog.records