def test_run(self): """ Test drain's run function (in a same thread) """ source = range(5) destination = Queue() drain = Drain(source, destination) drain.run() assert destination.qsize() == 5
def test_partially_reversed_data(self, data): results_queue = Queue() chunks = list(random_split(data)) chunks[5], chunks[6] = chunks[6], chunks[5] pipeline = Aggregator( TimeChopper(DataPoller(source=chunks, poll_period=0.1), cache_size=3), AGGR_CONFIG, False) drain = Drain(pipeline, results_queue) drain.run() assert results_queue.qsize() == MAX_TS
def test_partially_reversed_data(self, data): results_queue = Queue() chunks = list(random_split(data)) chunks[5], chunks[6] = chunks[6], chunks[5] pipeline = Aggregator( TimeChopper( DataPoller( source=chunks, poll_period=0.1), cache_size=3), AGGR_CONFIG, False) drain = Drain(pipeline, results_queue) drain.run() assert results_queue.qsize() == MAX_TS
def test_slow_producer(self, data): results_queue = Queue() chunks = list(random_split(data)) chunks[5], chunks[6] = chunks[6], chunks[5] def producer(): for chunk in chunks: if np.random.random() > 0.5: yield None yield chunk pipeline = Aggregator( TimeChopper(DataPoller(source=producer(), poll_period=0.1), cache_size=3), AGGR_CONFIG, False) drain = Drain(pipeline, results_queue) drain.run() assert results_queue.qsize() == MAX_TS
def test_slow_producer(self, data): results_queue = Queue() chunks = list(random_split(data)) chunks[5], chunks[6] = chunks[6], chunks[5] def producer(): for chunk in chunks: if np.random.random() > 0.5: yield None yield chunk pipeline = Aggregator( TimeChopper( DataPoller( source=producer(), poll_period=0.1), cache_size=3), AGGR_CONFIG, False) drain = Drain(pipeline, results_queue) drain.run() assert results_queue.qsize() == MAX_TS