Esempio n. 1
0
def test_agregator(phout, expected_rps):
    generator = PhantomMock(phout)
    aggregator = TankAggregator(generator)
    listener = ListenerMock(expected_rps)
    aggregator.add_result_listener(listener)
    aggregator.start_test(poll_period=0)
    while not generator.reader.closed:
        aggregator.is_test_finished()
    aggregator.end_test(1)
    assert abs(listener.avg - expected_rps) < 0.1 * expected_rps
Esempio n. 2
0
def test_agregator(phout, expected_rps):
    generator = PhantomMock(phout)
    aggregator = TankAggregator(generator)
    listener = ListenerMock(expected_rps)
    aggregator.add_result_listener(listener)
    aggregator.start_test(poll_period=0)
    generator.finished.set()
    while not aggregator.is_aggr_finished():
        aggregator.is_test_finished()
    aggregator.end_test(1)
    assert abs(listener.avg - expected_rps) < 0.1 * expected_rps
Esempio n. 3
0
 def job(self):
     if not self._job:
         # monitoring plugin
         monitorings = [plugin for plugin in self.plugins.values() if isinstance(plugin, MonitoringPlugin)]
         # generator plugin
         try:
             gen = self.get_plugin_of_type(GeneratorPlugin)
         except KeyError:
             logger.warning("Load generator not found")
             gen = GeneratorPlugin(self, {}, 'generator dummy')
         # aggregator
         aggregator = TankAggregator(gen)
         self._job = Job(monitoring_plugins=monitorings,
                         generator_plugin=gen,
                         aggregator=aggregator,
                         tank=socket.getfqdn())
     return self._job
Esempio n. 4
0
 def job(self):
     if not self._job:
         # monitoring plugin
         try:
             mon = self.get_plugin_of_type(TelegrafPlugin)
         except KeyError:
             logger.debug("Telegraf plugin not found:", exc_info=True)
             mon = None
         # generator plugin
         try:
             gen = self.get_plugin_of_type(GeneratorPlugin)
         except KeyError:
             logger.warning("Load generator not found")
             gen = GeneratorPlugin()
         # aggregator
         aggregator = TankAggregator(gen)
         self._job = Job(monitoring_plugin=mon,
                         generator_plugin=gen,
                         aggregator=aggregator,
                         tank=socket.getfqdn())
     return self._job
Esempio n. 5
0
import json
import numpy as np
import pytest

from queue import Queue
from conftest import MAX_TS, random_split

from yandextank.aggregator import TankAggregator
from yandextank.aggregator.aggregator import Aggregator, DataPoller
from yandextank.aggregator.chopper import TimeChopper
from yandextank.plugins.Phantom.reader import string_to_df

from netort.data_processing import Drain

AGGR_CONFIG = TankAggregator.load_config()


class TestPipeline(object):
    def test_partially_reversed_data(self, data):
        results_queue = Queue()
        chunks = list(random_split(data))
        chunks[5], chunks[6] = chunks[6], chunks[5]

        pipeline = Aggregator(
            TimeChopper(DataPoller(source=chunks, poll_period=0.1),
                        cache_size=3), AGGR_CONFIG, False)
        drain = Drain(pipeline, results_queue)
        drain.run()
        assert results_queue.qsize() == MAX_TS

    def test_slow_producer(self, data):
Esempio n. 6
0
def test_agregator(phout, expected_rps):
    generator = PhantomMock(os.path.join(PATH, phout))
    aggregator = TankAggregator(generator)
    listener = ListenerMock(expected_rps)
    aggregator.add_result_listener(listener)
    aggregator.start_test(poll_period=0)
    generator.finished.set()
    while not aggregator.is_aggr_finished():
        aggregator.is_test_finished()
    aggregator.end_test(1)
    assert abs(listener.avg - expected_rps) < 0.1 * expected_rps
Esempio n. 7
0
import json
import numpy as np
import pytest

from queue import Queue
from conftest import MAX_TS, random_split

from yandextank.aggregator import TankAggregator
from yandextank.aggregator.aggregator import Aggregator, DataPoller
from yandextank.aggregator.chopper import TimeChopper
from yandextank.plugins.Phantom.reader import string_to_df

from netort.data_processing import Drain


AGGR_CONFIG = TankAggregator.load_config()


class TestPipeline(object):
    def test_partially_reversed_data(self, data):
        results_queue = Queue()
        chunks = list(random_split(data))
        chunks[5], chunks[6] = chunks[6], chunks[5]

        pipeline = Aggregator(
            TimeChopper(
                DataPoller(
                    source=chunks, poll_period=0.1), cache_size=3),
            AGGR_CONFIG,
            False)
        drain = Drain(pipeline, results_queue)