コード例 #1
0
 def test_run(self):
     """
     Test drain's run function (in a same thread)
     """
     source = range(5)
     destination = Queue()
     drain = Drain(source, destination)
     drain.run()
     assert destination.qsize() == 5
コード例 #2
0
ファイル: test_util.py プロジェクト: takanabe/yandex-tank
 def test_run(self):
     """
     Test drain's run function (in a same thread)
     """
     source = range(5)
     destination = Queue()
     drain = Drain(source, destination)
     drain.run()
     assert destination.qsize() == 5
コード例 #3
0
 def test_interrupt_and_wait(self):
     """
     Test we can interrupt the drain
     """
     source = range(1000000)
     destination = Queue()
     drain = Drain(source, destination)
     drain.start()
     drain.join()
     assert destination.qsize() == 1000000
コード例 #4
0
ファイル: test_pipeline.py プロジェクト: evro23x/yandex-tank
    def test_partially_reversed_data(self, data):
        results_queue = Queue()
        chunks = list(random_split(data))
        chunks[5], chunks[6] = chunks[6], chunks[5]

        pipeline = Aggregator(
            TimeChopper(DataPoller(source=chunks, poll_period=0.1),
                        cache_size=3), AGGR_CONFIG, False)
        drain = Drain(pipeline, results_queue)
        drain.run()
        assert results_queue.qsize() == MAX_TS
コード例 #5
0
    def test_partially_reversed_data(self, data):
        results_queue = Queue()
        chunks = list(random_split(data))
        chunks[5], chunks[6] = chunks[6], chunks[5]

        pipeline = Aggregator(
            TimeChopper(
                DataPoller(
                    source=chunks, poll_period=0.1), cache_size=3),
            AGGR_CONFIG,
            False)
        drain = Drain(pipeline, results_queue)
        drain.run()
        assert results_queue.qsize() == MAX_TS
コード例 #6
0
ファイル: test_pipeline.py プロジェクト: evro23x/yandex-tank
    def test_slow_producer(self, data):
        results_queue = Queue()
        chunks = list(random_split(data))
        chunks[5], chunks[6] = chunks[6], chunks[5]

        def producer():
            for chunk in chunks:
                if np.random.random() > 0.5:
                    yield None
                yield chunk

        pipeline = Aggregator(
            TimeChopper(DataPoller(source=producer(), poll_period=0.1),
                        cache_size=3), AGGR_CONFIG, False)
        drain = Drain(pipeline, results_queue)
        drain.run()
        assert results_queue.qsize() == MAX_TS
コード例 #7
0
    def test_slow_producer(self, data):
        results_queue = Queue()
        chunks = list(random_split(data))
        chunks[5], chunks[6] = chunks[6], chunks[5]

        def producer():
            for chunk in chunks:
                if np.random.random() > 0.5:
                    yield None
                yield chunk

        pipeline = Aggregator(
            TimeChopper(
                DataPoller(
                    source=producer(), poll_period=0.1), cache_size=3),
            AGGR_CONFIG,
            False)
        drain = Drain(pipeline, results_queue)
        drain.run()
        assert results_queue.qsize() == MAX_TS
コード例 #8
0
 def start_test(self):
     self.reader = self.generator.get_reader()
     self.stats_reader = self.generator.get_stats_reader()
     aggregator_config = self.load_config()
     verbose_histogram = True
     if verbose_histogram:
         logger.info("using verbose histogram")
     if self.reader and self.stats_reader:
         pipeline = Aggregator(
             TimeChopper(DataPoller(source=self.reader, poll_period=1),
                         cache_size=3), aggregator_config,
             verbose_histogram)
         self.drain = Drain(pipeline, self.results)
         self.drain.start()
         self.stats_drain = Drain(
             Chopper(DataPoller(source=self.stats_reader, poll_period=1)),
             self.stats)
         self.stats_drain.start()
     else:
         logger.warning(
             "Generator not found. Generator must provide a reader and a stats_reader interface"
         )
コード例 #9
0
 def start_test(self):
     self.reader = self.generator.get_reader()
     self.stats_reader = self.generator.get_stats_reader()
     aggregator_config = json.loads(
         resource_string(__name__, 'config/phout.json').decode('utf8'))
     verbose_histogram = True
     if verbose_histogram:
         logger.info("using verbose histogram")
     if self.reader and self.stats_reader:
         pipeline = Aggregator(
             TimeChopper(DataPoller(source=self.reader, poll_period=1),
                         cache_size=3), aggregator_config,
             verbose_histogram)
         self.drain = Drain(pipeline, self.results)
         self.drain.start()
         self.stats_drain = Drain(
             Chopper(DataPoller(source=self.stats_reader, poll_period=1)),
             self.stats)
         self.stats_drain.start()
     else:
         raise PluginImplementationError(
             "Generator must pass a Reader and a StatsReader"
             " to Aggregator before starting test")
コード例 #10
0
ファイル: test_util.py プロジェクト: takanabe/yandex-tank
 def test_interrupt_and_wait(self):
     """
     Test we can interrupt the drain
     """
     source = range(1000000)
     destination = Queue()
     drain = Drain(source, destination)
     drain.start()
     drain.join()
     assert destination.qsize() == 1000000
コード例 #11
0
class TankAggregator(object):
    """
    Plugin that manages aggregation and stats collection
    """

    SECTION = 'aggregator'

    @staticmethod
    def get_key():
        return __file__

    def __init__(self, generator):
        # AbstractPlugin.__init__(self, core, cfg)
        """

        :type generator: GeneratorPlugin
        """
        self.generator = generator
        self.listeners = []  # [LoggingListener()]
        self.results = q.Queue()
        self.stats = q.Queue()
        self.data_cache = {}
        self.stat_cache = {}

    def start_test(self):
        self.reader = self.generator.get_reader()
        self.stats_reader = self.generator.get_stats_reader()
        aggregator_config = json.loads(
            resource_string(__name__, 'config/phout.json').decode('utf8'))
        verbose_histogram = True
        if verbose_histogram:
            logger.info("using verbose histogram")
        if self.reader and self.stats_reader:
            pipeline = Aggregator(
                TimeChopper(DataPoller(source=self.reader, poll_period=1),
                            cache_size=3), aggregator_config,
                verbose_histogram)
            self.drain = Drain(pipeline, self.results)
            self.drain.start()
            self.stats_drain = Drain(
                Chopper(DataPoller(source=self.stats_reader, poll_period=1)),
                self.stats)
            self.stats_drain.start()
        else:
            raise PluginImplementationError(
                "Generator must pass a Reader and a StatsReader"
                " to Aggregator before starting test")

    def _collect_data(self):
        """
        Collect data, cache it and send to listeners
        """
        data = get_from_queue(self.results)
        stats = get_from_queue(self.stats)
        logger.debug("Data timestamps:\n%s" % [d.get('ts') for d in data])
        logger.debug("Stats timestamps:\n%s" % [d.get('ts') for d in stats])
        for item in data:
            ts = item['ts']
            if ts in self.stat_cache:
                # send items
                data_item = item
                stat_item = self.stat_cache.pop(ts)
                self.__notify_listeners(data_item, stat_item)
            else:
                self.data_cache[ts] = item
        for item in stats:
            ts = item['ts']
            if ts in self.data_cache:
                # send items
                data_item = self.data_cache.pop(ts)
                stat_item = item
                self.__notify_listeners(data_item, stat_item)
            else:
                self.stat_cache[ts] = item

    def is_test_finished(self):
        self._collect_data()
        return -1

    def end_test(self, retcode):
        retcode = self.generator.end_test(retcode)
        if self.reader:
            self.reader.close()
        if self.stats_reader:
            self.stats_reader.close()
        if self.drain:
            self.drain.join()
        if self.stats_drain:
            self.stats_drain.join()
        self._collect_data()
        return retcode

    def add_result_listener(self, listener):
        self.listeners.append(listener)

    def __notify_listeners(self, data, stats):
        """ notify all listeners about aggregate data and stats """
        for listener in self.listeners:
            listener.on_aggregated_data(data, stats)