def test_msg(self): q = Queue(max_size_bytes=1000) py_obj = dict(a=42, b=33, c=(1, 2, 3), d=[1, 2, 3], e='123', f=b'kkk') q.put_nowait(py_obj) res = q.get_nowait() log.debug('Got object %r', res) self.assertEqual(py_obj, res)
def test_queue_usage(self): q = Queue(1000 * 1000) # specify the size of the circular buffer in the ctor # any pickle-able Python object can be added to the queue py_obj = dict(a=42, b=33, c=(1, 2, 3), d=[1, 2, 3], e='123', f=b'kkk') q.put(py_obj) assert q.qsize() == 1 retrieved = q.get() assert q.empty() assert py_obj == retrieved for i in range(100): try: q.put(py_obj, timeout=0.1) except Full: log.debug('Queue is full!') num_received = 0 while num_received < 100: # get multiple messages at once, returns a list of messages for better performance in many-to-few scenarios # get_many does not guarantee that all max_messages_to_get will be received on the first call, in fact # no such guarantee can be made in multiprocessing systems. # get_many() will retrieve as many messages as there are available AND can fit in the pre-allocated memory # buffer. The size of the buffer is increased gradually to match demand. messages = q.get_many(max_messages_to_get=100) num_received += len(messages) try: q.get(timeout=0.1) assert True, 'This won\'t be called' except Empty: log.debug('Queue is empty')
def test_queue_empty(self): q = Queue(max_size_bytes=1000) self.assertTrue(q.empty()) py_obj = dict(a=42, b=33, c=(1, 2, 3), d=[1, 2, 3], e='123', f=b'kkk') q.put_nowait(py_obj) q_empty = q.empty() self.assertFalse(q_empty)
def __init__(self, player_id, make_env_func, env_config, use_multiprocessing=False, reset_on_init=True): self.player_id = player_id self.make_env_func = make_env_func self.env_config = env_config self.reset_on_init = reset_on_init self.task_queue, self.result_queue = Queue(), Queue() if use_multiprocessing: self.process = Process(target=self.start, daemon=False) else: self.process = threading.Thread(target=self.start) self.process.start()
def test_queue_full(self): q = Queue(max_size_bytes=60) self.assertFalse(q.full()) py_obj = (1, 2) while True: try: q.put_nowait(py_obj) except Full: self.assertTrue(q.full()) break
def test_spawn_ctx(self): ctx = multiprocessing.get_context('spawn') data_q = Queue(1000 * 1000) procs = [ ctx.Process(target=spawn_producer, args=(data_q, )) for _ in range(2) ] procs.append(ctx.Process(target=spawn_consumer, args=(data_q, ))) for p in procs: p.start() for p in procs: p.join()
def __init__(self, cfg): super().__init__(cfg) self.processes = [] self.terminate = RawValue(ctypes.c_bool, False) self.start_event = multiprocessing.Event() self.start_event.clear() self.report_queue = Queue() self.report_every_sec = 1.0 self.last_report = 0 self.avg_stats_intervals = (1, 10, 60, 300, 600) self.fps_stats = deque([], maxlen=max(self.avg_stats_intervals))
def test_queue_size(self): q = Queue(max_size_bytes=1000) py_obj_1 = dict(a=10, b=20) py_obj_2 = dict(a=30, b=40) q.put_nowait(py_obj_1) q.put_nowait(py_obj_2) q_size_bef = q.qsize() log.debug('Queue size after put - %d', q_size_bef) num_messages = 0 want_to_read = 2 while num_messages < want_to_read: msgs = q.get_many() print(msgs) num_messages += len(msgs) self.assertEqual(type(q_size_bef), int) q_size_af = q.qsize() log.debug('Queue size after get - %d', q_size_af) self.assertEqual(q_size_af, 0)
def test_msg_many(self): q = Queue(max_size_bytes=100000) py_objs = [ dict(a=42, b=33, c=(1, 2, 3), d=[1, 2, 3], e='123', f=b'kkk') for _ in range(5) ] q.put_many_nowait(py_objs) res = q.get_many_nowait() while not q.empty(): res.extend(q.get_many_nowait()) log.debug('Got object %r', res) self.assertEqual(py_objs, res) q.put_nowait(py_objs) res = q.get_nowait() self.assertEqual(py_objs, res)
def test_spawn_ctx(self): ctx = multiprocessing.get_context('spawn') data_q = Queue(1000 * 1000) procs = [ ctx.Process(target=spawn_producer, args=(data_q, )) for _ in range(2) ] procs.append(ctx.Process(target=spawn_consumer, args=(data_q, ))) # add data to the queue and read some of it back to make sure all buffers are initialized before # the new process is spawned (such that we need to pickle everything) for i in range(10): data_q.put(self.test_spawn_ctx.__name__) msgs = data_q.get_many(max_messages_to_get=2) print(msgs) for p in procs: p.start() for p in procs: p.join()
def test_multiproc(self): q = Queue() consume_many = 1000 producers = [] consumers = [] for j in range(20): p = multiprocessing.Process(target=produce, args=(q, j, 1000001)) producers.append(p) for j in range(3): p = multiprocessing.Process(target=consume, args=(q, j, consume_many)) consumers.append(p) for c in consumers: c.start() for p in producers: p.start() for p in producers: p.join() q.close() for c in consumers: c.join() log.info('Exit...')
def test_singleproc(self): q = Queue() produce(q, 0, num_messages=20) consume(q, 0, consume_many=2, total_num_messages=20) q.close()