def test_threads_die(): tq = ThreadedQueue(n_threads=40) assert tq.are_threads_alive() tq.kill_threads() assert not tq.are_threads_alive() tq = ThreadedQueue(n_threads=0) assert not tq.are_threads_alive() with ThreadedQueue(n_threads=40) as tq: threads = tq._threads assert not any(map(lambda t: t.isAlive(), threads))
def test_thread_exceptions(): def diediedie(interface): raise NotImplementedError("Not implemented at all.") tq = ThreadedQueue(n_threads=40) for _ in range(1000): tq.put(diediedie) try: tq.wait() except NotImplementedError: pass
def test_gc_stresstest(): with Storage('gs://seunglab-test/cloudvolume/connection_pool/', n_threads=0) as stor: stor.put_file('test', 'some string') n_trials = 500 pbar = tqdm(total=n_trials) @retry def create_conn(interface): # assert GC_POOL.total_connections() <= GC_POOL.max_connections * 5 bucket = GC_POOL.get_connection() blob = bucket.get_blob('cloudvolume/connection_pool/test') blob.download_as_string() GC_POOL.release_connection(bucket) pbar.update() with ThreadedQueue(n_threads=20) as tq: for _ in range(n_trials): tq.put(create_conn) pbar.close()
def test_threading(): execution_count = 1000 executions = [] def reset_executions(): return [ False for _ in range(execution_count) ] def addone(idnum, should_be_none): executions[idnum] = True assert should_be_none is None executions = reset_executions() with ThreadedQueue(n_threads=1) as tq: for idnum in range(execution_count): fn = partial(addone, idnum) tq.put(fn) assert all(executions) executions = reset_executions() tq = ThreadedQueue(n_threads=40) for idnum in range(execution_count): fn = partial(addone, idnum) tq.put(fn) tq.wait().kill_threads() assert tq.processed == execution_count assert all(executions) # Base class with 0 threads on with statement will never terminate try: with ThreadedQueue(n_threads=0) as tq: assert False except ValueError: assert True except Exception: assert False
def test_s3_stresstest(): with Storage('s3://seunglab-test/cloudvolume/connection_pool/', n_threads=0) as stor: stor.put_file('test', 'some string') n_trials = 500 pbar = tqdm(total=n_trials) @retry def create_conn(interface): conn = S3_POOL.get_connection() # assert S3_POOL.total_connections() <= S3_POOL.max_connections * 5 bucket = conn.get_object( Bucket='seunglab-test', Key='cloudvolume/connection_pool/test', ) S3_POOL.release_connection(conn) pbar.update() with ThreadedQueue(n_threads=20) as tq: for _ in range(n_trials): tq.put(create_conn) pbar.close()
def __exit__(self, exception_type, exception_value, traceback): ThreadedQueue.__exit__(self, exception_type, exception_value, traceback) self._interface.release_connection()
def __del__(self): ThreadedQueue.__del__(self) self._interface.release_connection()
def _consume_queue(self, terminate_evt): ThreadedQueue._consume_queue(self, terminate_evt) self._interface.release_connection()
def __init__(self, layer_path, n_threads=20, progress=False): StorageBase.__init__(self, layer_path, progress) ThreadedQueue.__init__(self, n_threads) self._interface = self.get_connection()