def crawl(self): while len(self.visited) < (self.n + self.new_size_visited): url = self.queue.get() if url in self.visited: self.queue.get() else: self.visited.append(url) task = ThreadPoolExecutor(max_workers=self.threads).submit(requestUrl, url) task.add_done_callback(self.callbackContinue)
def generate(self): if self.id not in self.audio_gen_threads: X = ThreadPoolExecutor() X = X.submit(self._writeAudioToS3) self.audio_gen_threads[self.id] = (X, 0) def gen_audio_complete(x): del self.audio_gen_threads[self.id] X.add_done_callback(gen_audio_complete) else: pass #Audio generation for this file is already processing
def _wrapper(self, *args, **kwargs): def callback(future): return future.result() try: f = lambda future: tornado.ioloop.IOLoop.current( ).add_callback_from_signal(functools.partial(callback, future)) future = ThreadPoolExecutor(max_workers=4).submit( functools.partial(func, self, *args, **kwargs)) future.add_done_callback(f) return future.result() except Exception, e: print e
async def zhanji(session): await session.send('正在查询') v = ThreadPoolExecutor(5).submit(fetch_async) v.add_done_callback(callback(v, session)) ThreadPoolExecutor(5).shutdown(wait=True)