def gather_all_unsorted( threadpool: ThreadPool, func: Callable, params: Sequence, *args: Any, **kwargs: Any ) -> Iterator[Tuple[Any, Any]]: """Runs multiple tasks concurrently and returns all results in the order of execution finish as soon as possible. """ for i, param in enumerate(params): threadpool.add_task(i, func, param, *args, **kwargs) for i in range(len(params)): yield threadpool.get()
delays = [randrange(1, 10) for i in range(100)] from time import sleep def wait_delay(d): print('sleeping for (%d)sec' % d) sleep(d) # 1) Init a Thread pool with the desired number of threads pool = ThreadPool(20) for i, d in enumerate(delays): # print the percentage of tasks placed in the queue print('%.2f%c' % ((float(i)/float(len(delays)))*100.0,'%')) # 2) Add the task to the queue pool.add_task(wait_delay, d) # 3) Wait for completion pool.wait_completion() class asynchronous(object): def __init__(self, func): self.func = func def threaded(*args, **kwargs): self.queue.put(self.func(*args, **kwargs)) self.threaded = threaded def __call__(self, *args, **kwargs): return self.func(*args, **kwargs)
self._q = Queue(num_t) for _ in range(num_t): Worker(self._q) def add_task(self, f, *args, **kwargs): self._q.put((f, args, kwargs)) def wait_complete(self): self._q.join() pool = ThreadPool() for _ in range(8): wt = random() pool.add_task(double, wt) time.sleep(wt) pool.wait_complete() """ USE: Thread-202 0.3289221727443976 USE: Thread-203 1.941427406228165 USE: Thread-204 1.4668008298757944 USE: Thread-205 0.23496561121950066 USE: Thread-206 1.8221948868451534 USE: Thread-202