def test_run(self): """ Test drain's run function (in a same thread) """ source = range(5) destination = Queue() drain = Drain(source, destination) drain.run() assert destination.qsize() == 5
def test_interrupt_and_wait(self): """ Test we can interrupt the drain """ source = range(1000000) destination = Queue() drain = Drain(source, destination) drain.start() drain.join() assert destination.qsize() == 1000000
def test_partially_reversed_data(self, data): results_queue = Queue() chunks = list(random_split(data)) chunks[5], chunks[6] = chunks[6], chunks[5] pipeline = Aggregator( TimeChopper(DataPoller(source=chunks, poll_period=0.1), cache_size=3), AGGR_CONFIG, False) drain = Drain(pipeline, results_queue) drain.run() assert results_queue.qsize() == MAX_TS
def start_test(self): self.reader = self.generator.get_reader() self.stats_reader = self.generator.get_stats_reader() aggregator_config = self.load_config() verbose_histogram = True if verbose_histogram: logger.info("using verbose histogram") if self.reader and self.stats_reader: pipeline = Aggregator( TimeChopper(DataPoller(source=self.reader, poll_period=1), cache_size=3), aggregator_config, verbose_histogram) self.drain = Drain(pipeline, self.results) self.drain.start() self.stats_drain = Drain( Chopper(DataPoller(source=self.stats_reader, poll_period=1)), self.stats) self.stats_drain.start() else: logger.warning( "Generator not found. Generator must provide a reader and a stats_reader interface" )
def start_test(self): self.reader = self.generator.get_reader() self.stats_reader = self.generator.get_stats_reader() aggregator_config = json.loads( resource_string(__name__, 'config/phout.json').decode('utf8')) verbose_histogram = True if verbose_histogram: logger.info("using verbose histogram") if self.reader and self.stats_reader: pipeline = Aggregator( TimeChopper(DataPoller(source=self.reader, poll_period=1), cache_size=3), aggregator_config, verbose_histogram) self.drain = Drain(pipeline, self.results) self.drain.start() self.stats_drain = Drain( Chopper(DataPoller(source=self.stats_reader, poll_period=1)), self.stats) self.stats_drain.start() else: raise PluginImplementationError( "Generator must pass a Reader and a StatsReader" " to Aggregator before starting test")
def test_slow_producer(self, data): results_queue = Queue() chunks = list(random_split(data)) chunks[5], chunks[6] = chunks[6], chunks[5] def producer(): for chunk in chunks: if np.random.random() > 0.5: yield None yield chunk pipeline = Aggregator( TimeChopper(DataPoller(source=producer(), poll_period=0.1), cache_size=3), AGGR_CONFIG, False) drain = Drain(pipeline, results_queue) drain.run() assert results_queue.qsize() == MAX_TS