Exemple #1
0
 def test_run(self):
     """
     Test drain's run function (in a same thread)
     """
     source = range(5)
     destination = Queue()
     drain = Drain(source, destination)
     drain.run()
     assert destination.qsize() == 5
Exemple #2
0
    def test_partially_reversed_data(self, data):
        results_queue = Queue()
        results = []
        chunks = list(random_split(data))
        chunks[5], chunks[6] = chunks[6], chunks[5]

        pipeline = Aggregator(TimeChopper(DataPoller(source=chunks, poll_period=0.1), cache_size=3), AGGR_CONFIG, False)
        drain = Drain(pipeline, results_queue)
        drain.run()
        assert results_queue.qsize() == MAX_TS
Exemple #3
0
    def test_partially_reversed_data(self, data):
        results_queue = Queue()
        results = []
        chunks = list(random_split(data))
        chunks[5], chunks[6] = chunks[6], chunks[5]

        pipeline = Aggregator(
            TimeChopper(DataPoller(source=chunks, poll_period=0.1),
                        cache_size=3), AGGR_CONFIG, False)
        drain = Drain(pipeline, results_queue)
        drain.run()
        assert results_queue.qsize() == MAX_TS
Exemple #4
0
    def test_slow_producer(self, data):
        results_queue = Queue()
        results = []
        chunks = list(random_split(data))
        chunks[5], chunks[6] = chunks[6], chunks[5]

        def producer():
            for chunk in chunks:
                if np.random.random() > 0.5:
                    yield None
                yield chunk

        pipeline = Aggregator(
            TimeChopper(DataPoller(source=producer(), poll_period=0.1),
                        cache_size=3), AGGR_CONFIG, False)
        drain = Drain(pipeline, results_queue)
        drain.run()
        assert results_queue.qsize() == MAX_TS
Exemple #5
0
    def test_slow_producer(self, data):
        results_queue = Queue()
        results = []
        chunks = list(random_split(data))
        chunks[5], chunks[6] = chunks[6], chunks[5]

        def producer():
            for chunk in chunks:
                if np.random.random() > 0.5:
                    yield None
                yield chunk

        pipeline = Aggregator(
            TimeChopper(DataPoller(source=producer(), poll_period=0.1), cache_size=3), AGGR_CONFIG, False
        )
        drain = Drain(pipeline, results_queue)
        drain.run()
        assert results_queue.qsize() == MAX_TS
Exemple #6
0
 def test_interrupt_and_wait(self):
     """
     Test we can interrupt the drain
     """
     source = range(1000000)
     destination = Queue()
     drain = Drain(source, destination)
     drain.start()
     drain.wait()
     assert destination.qsize() == 1000000