Exemple #1
0
def test_dir_created(tmp_path):
    artifacts_base_dir = tmp_path / "logs"
    config = {
        'clients': [{
            'type': 'local_storage',
        }],
        'test_start': int(time.time() * 1e6),
        'artifacts_base_dir': str(artifacts_base_dir)
    }
    data_session = DataSession(config=config)
    # TODO: make this pass. Datasession dir and meta.json should be created as soon as possible
    # assert os.path.isdir(artifacts_base_dir), "Artifacts base dir should exist after datasession have been created"
    # assert os.path.isdir(data_session.artifacts_dir), "Artifacts dir should exist after datasession have been created"
    data_session.close()
    assert os.path.isdir(
        artifacts_base_dir
    ), "Artifacts base dir should exist after datasession have ended"
    assert os.path.isdir(
        data_session.artifacts_dir
    ), "Artifacts dir should exist after datasession have ended"
    assert os.path.isfile(
        pathlib.Path(data_session.artifacts_dir) /
        'meta.json'), "Metadata file should have been created"

    with open(pathlib.Path(data_session.artifacts_dir) /
              'meta.json') as meta_file:
        meta = json.load(meta_file)

    assert 'job_meta' in meta, "Metadata should have been written to meta.json"
Exemple #2
0
def main():
    parser = argparse.ArgumentParser(description='Process phantom output.')
    parser.add_argument('phout', type=str, help='path to phantom output file')
    parser.add_argument(
        '--url',
        type=str,
        default='https://volta-back-testing.common-int.yandex-team.ru/')
    parser.add_argument('--name',
                        type=str,
                        help='test name',
                        default=str(datetime.utcnow()))
    parser.add_argument('--db_name',
                        type=str,
                        help='ClickHouse database name',
                        default='luna_test')
    args = parser.parse_args()

    clients = [{
        'type': 'luna',
        'api_address': args.url,
        'db_name': args.db_name
    }, {
        'type': 'local_storage'
    }]
    data_session = DataSession({'clients': clients})
    data_session.update_job({'name': args.name})
    print('Test name: %s' % args.name)

    col_map_aggr = {
        name: 'metric %s' % name
        for name in [
            'interval_real', 'connect_time', 'send_time', 'latency',
            'receive_time', 'interval_event'
        ]
    }
    uploader = get_uploader(data_session, col_map_aggr, True)

    signal.signal(signal.SIGINT, get_handler(data_session))

    with open(args.phout) as f:
        buffer = ''
        while True:
            parts = f.read(128 * 1024)
            try:
                chunk, new_buffer = parts.rsplit('\n', 1)
                chunk = buffer + chunk + '\n'
                buffer = new_buffer
            except ValueError:
                chunk = buffer + parts
                buffer = ''
            if len(chunk) > 0:
                df = string_to_df_microsec(chunk)
                uploader(df)
            else:
                break
    data_session.close()
Exemple #3
0
class Plugin(AbstractPlugin, MonitoringDataListener):
    SECTION = 'neuploader'

    def __init__(self, core, cfg, name):
        super(Plugin, self).__init__(core, cfg, name)
        self._is_telegraf = None
        self.clients_cfg = [{
            'type': 'luna',
            'api_address': self.cfg.get('api_address'),
            'db_name': self.cfg.get('db_name')
        }]

    def configure(self):
        pass

    def start_test(self):
        try:
            self.reader = self.core.job.generator_plugin.get_reader(
                parser=string_to_df_microsec)
        except TypeError:
            logger.error('Generator plugin does not support NeUploader')
            self.is_test_finished = lambda: -1
            self.reader = []
        else:
            self.data_session = DataSession({'clients': self.clients_cfg})
            self.add_cleanup(self.cleanup)
            self.data_session.update_job({'name': self.cfg.get('test_name')})
            col_map_aggr = {
                name: 'metric %s' % name
                for name in [
                    'interval_real', 'connect_time', 'send_time', 'latency',
                    'receive_time', 'interval_event'
                ]
            }
            self.uploader = get_uploader(self.data_session, col_map_aggr, True)

    def cleanup(self):
        uploader_metainfo = self.map_uploader_tags(
            self.core.status.get('uploader'))
        self.data_session.update_job(uploader_metainfo)
        self.data_session.close()

    def is_test_finished(self):
        df = next(self.reader)
        if df is not None:
            self.uploader(df)
        return -1

    def monitoring_data(self, data_list):
        pass

    def post_process(self, retcode):
        for chunk in self.reader:
            if chunk is not None:
                self.uploader(chunk)
        return retcode

    @property
    def is_telegraf(self):
        return True

    @staticmethod
    def map_uploader_tags(uploader_tags):
        return dict([('component', uploader_tags.get('component')),
                     ('description', uploader_tags.get('job_dsc')),
                     ('name', uploader_tags.get('job_name')),
                     ('person', uploader_tags.get('person')),
                     ('task', uploader_tags.get('task')),
                     ('version', uploader_tags.get('version')),
                     ('lunapark_jobno', uploader_tags.get('job_no'))] +
                    [(k, v) for k, v in uploader_tags.get('meta', {}).items()])
Exemple #4
0
class Plugin(AbstractPlugin, MonitoringDataListener):
    SECTION = 'neuploader'
    importance_high = {'interval_real', 'proto_code', 'net_code'}

    def __init__(self, core, cfg, name):
        super(Plugin, self).__init__(core, cfg, name)
        self._is_telegraf = None
        self.clients_cfg = [{
            'type': 'luna',
            'api_address': self.cfg.get('api_address'),
            'db_name': self.cfg.get('db_name')
        }]
        self.metrics_objs = {}  # map of case names and metric objects
        self.monitoring_metrics = {}

    def configure(self):
        pass

    def start_test(self):
        try:
            self.reader = self.core.job.generator_plugin.get_reader(
                parser=string_to_df_microsec)
        except TypeError:
            logger.error('Generator plugin does not support NeUploader')
            self.is_test_finished = lambda: -1
            self.reader = []
        else:
            self.data_session = DataSession({'clients': self.clients_cfg})
            self.add_cleanup(self._cleanup)
            self.data_session.update_job({
                'name': self.cfg.get('test_name'),
                '__type': 'tank'
            })
            self.col_map = {
                'interval_real': self.data_session.new_true_metric,
                'connect_time': self.data_session.new_true_metric,
                'send_time': self.data_session.new_true_metric,
                'latency': self.data_session.new_true_metric,
                'receive_time': self.data_session.new_true_metric,
                'interval_event': self.data_session.new_true_metric,
                'net_code': self.data_session.new_event_metric,
                'proto_code': self.data_session.new_event_metric
            }

    def _cleanup(self):
        uploader_metainfo = self.map_uploader_tags(
            self.core.status.get('uploader'))
        self.data_session.update_job(uploader_metainfo)
        self.data_session.close()

    def is_test_finished(self):
        df = next(self.reader)
        if df is not None:
            self.upload(df)
        return -1

    def monitoring_data(self, data_list):
        self.upload_monitoring(data_list)

    def post_process(self, retcode):
        for chunk in self.reader:
            if chunk is not None:
                self.upload(chunk)
        return retcode

    @property
    def is_telegraf(self):
        return True

    def get_metric_obj(self, col, case):
        """
        Generator of metric objects:
        Checks existent metrics and creates new metric if it does not exist.
        :param col:  str with column name
        :param case: str with case name
        :return: metric object
        """
        case_metrics = self.metrics_objs.get(case)
        if case_metrics is None:
            # parent = self.metrics_objs.get('__overall__', {}).get(col)
            case_metrics = {
                col: constructor(
                    name='{} {}'.format(col, case),
                    raw=False,
                    aggregate=True,
                    source='tank',
                    importance='high' if col in self.importance_high else '')
                for col, constructor in self.col_map.items()
            }
            self.metrics_objs[case] = case_metrics
Exemple #5
0
class Plugin(AbstractPlugin, MonitoringDataListener):
    SECTION = 'neuploader'

    def __init__(self, core, cfg, name):
        super(Plugin, self).__init__(core, cfg, name)
        self._is_telegraf = None
        self.clients_cfg = [{'type': 'luna',
                             'api_address': self.cfg.get('api_address'),
                             'db_name': self.cfg.get('db_name')}]

    def configure(self):
        pass

    def start_test(self):
        try:
            self.reader = self.core.job.generator_plugin.get_reader(parser=string_to_df_microsec)
        except TypeError:
            logger.error('Generator plugin does not support NeUploader')
            self.is_test_finished = lambda: -1
            self.reader = []
        else:
            self.data_session = DataSession({'clients': self.clients_cfg})
            self.add_cleanup(self.cleanup)
            self.data_session.update_job({'name': self.cfg.get('test_name')})
            col_map_aggr = {name: 'metric %s' % name for name in
                            ['interval_real', 'connect_time', 'send_time', 'latency',
                             'receive_time', 'interval_event']}
            self.uploader = get_uploader(self.data_session, col_map_aggr, True)

    def cleanup(self):
        uploader_metainfo = self.map_uploader_tags(self.core.status.get('uploader'))
        self.data_session.update_job(uploader_metainfo)
        self.data_session.close()

    def is_test_finished(self):
        df = next(self.reader)
        if df is not None:
            self.uploader(df)
        return -1

    def monitoring_data(self, data_list):
        pass

    def post_process(self, retcode):
        for chunk in self.reader:
            if chunk is not None:
                self.uploader(chunk)
        return retcode

    @property
    def is_telegraf(self):
        return True

    @staticmethod
    def map_uploader_tags(uploader_tags):
        return dict(
            [
                ('component', uploader_tags.get('component')),
                ('description', uploader_tags.get('job_dsc')),
                ('name', uploader_tags.get('job_name')),
                ('person', uploader_tags.get('person')),
                ('task', uploader_tags.get('task')),
                ('version', uploader_tags.get('version')),
                ('lunapark_jobno', uploader_tags.get('job_no'))
            ] + [
                (k, v) for k, v in uploader_tags.get('meta', {}).items()
            ]
        )