def test_multiprocess_tasks(): wait_until_convenient() TAG = "message_q" def fetch_task(queue): pid = os.getpid() count = 0 for dq in q.listen(TAG, timeout=1): s = { 'pid': pid, 'data': dq } if dq: count += 1 queue.put(s) sleep(uniform(0.1, 0.5)) # sleep 0.1~0.5 seconds randomly elif q.count(TAG) == 0: return count # the number of tasks done by this process test_items = range(0, 10000) # enqueue 10000 tasks for i in test_items: q.enqueue(TAG, i + 1) while q.count(TAG) != len(test_items): # wait until test data is ready wait_until_convenient() jobs = [] wait_until_convenient() queue = Queue() start = timer() num_p = 30 # the number of processes to use for i in range(0, num_p): job = Process(target=fetch_task, args=(queue,)) jobs.append(job) job.start() # start task process remaining = q.count(TAG) while remaining > 0: # wait until the queue is consumed completely remaining = q.count(TAG) sys.stdout.write('\rRunning test_multiprocess_tasks - remaining %5d/%5d' % (remaining, len(test_items),)) sys.stdout.flush() wait_until_convenient() processed_data = set() qsize = 0 while not queue.empty(): item = queue.get() data = item.get('data') qsize += 1 assert data not in processed_data, "failed test_multiprocess_tasks - data %s has been processed already" % (data, ) processed_data.add(item.get('data')) queue.close() queue.join_thread() for j in jobs: j.join() assert qsize == len(test_items), "failed test_multiprocess_tasks - tasks are not complete %d/%d" % (qsize, len(test_items), ) end = timer() print("\rOK test_multiprocess_tasks - %d done in %5d seconds" % (qsize, end - start))