示例#1
0
def test_dir_created(tmp_path):
    artifacts_base_dir = tmp_path / "logs"
    config = {
        'clients': [{
            'type': 'local_storage',
        }],
        'test_start': int(time.time() * 1e6),
        'artifacts_base_dir': str(artifacts_base_dir)
    }
    data_session = DataSession(config=config)
    # TODO: make this pass. Datasession dir and meta.json should be created as soon as possible
    # assert os.path.isdir(artifacts_base_dir), "Artifacts base dir should exist after datasession have been created"
    # assert os.path.isdir(data_session.artifacts_dir), "Artifacts dir should exist after datasession have been created"
    data_session.close()
    assert os.path.isdir(
        artifacts_base_dir
    ), "Artifacts base dir should exist after datasession have ended"
    assert os.path.isdir(
        data_session.artifacts_dir
    ), "Artifacts dir should exist after datasession have ended"
    assert os.path.isfile(
        pathlib.Path(data_session.artifacts_dir) /
        'meta.json'), "Metadata file should have been created"

    with open(pathlib.Path(data_session.artifacts_dir) /
              'meta.json') as meta_file:
        meta = json.load(meta_file)

    assert 'job_meta' in meta, "Metadata should have been written to meta.json"
示例#2
0
 def start_test(self):
     try:
         self.reader = self.core.job.generator_plugin.get_reader(
             parser=string_to_df_microsec)
     except TypeError:
         logger.error('Generator plugin does not support NeUploader')
         self.is_test_finished = lambda: -1
         self.reader = []
     else:
         self.data_session = DataSession({'clients': self.clients_cfg})
         self.add_cleanup(self._cleanup)
         self.data_session.update_job({
             'name': self.cfg.get('test_name'),
             '__type': 'tank'
         })
         self.col_map = {
             'interval_real': self.data_session.new_true_metric,
             'connect_time': self.data_session.new_true_metric,
             'send_time': self.data_session.new_true_metric,
             'latency': self.data_session.new_true_metric,
             'receive_time': self.data_session.new_true_metric,
             'interval_event': self.data_session.new_true_metric,
             'net_code': self.data_session.new_event_metric,
             'proto_code': self.data_session.new_event_metric
         }
示例#3
0
 def data_session(self):
     """
     :rtype: DataSession
     """
     if self._data_session is None:
         self._data_session = DataSession({'clients': self.clients_cfg},
                                          test_start=self.core.status['generator']['test_start'] * 10**6)
         self.add_cleanup(self._cleanup)
         self._data_session.update_job({'name': self.cfg.get('test_name'),
                                       '__type': 'tank'})
     return self._data_session
示例#4
0
 def data_session(self):
     """
     :rtype: DataSession
     """
     if self._data_session is None:
         self._data_session = DataSession(
             {'clients': self.clients_cfg},
             test_start=self.core.status['generator']['test_start'] * 10**6)
         self.add_cleanup(self._cleanup)
         self._data_session.update_job(
             dict({
                 'name': self.test_name,
                 '__type': 'tank'
             }, **self.meta))
         job_no = self._data_session.clients[0].job_number
         if job_no:
             self.publish('job_no', int(job_no))
             self.publish('web_link', urljoin(self.LUNA_LINK, job_no))
     return self._data_session
示例#5
0
 def start_test(self):
     try:
         self.reader = self.core.job.generator_plugin.get_reader(
             parser=string_to_df_microsec)
     except TypeError:
         logger.error('Generator plugin does not support NeUploader')
         self.is_test_finished = lambda: -1
         self.reader = []
     else:
         self.data_session = DataSession({'clients': self.clients_cfg})
         self.add_cleanup(self.cleanup)
         self.data_session.update_job({'name': self.cfg.get('test_name')})
         col_map_aggr = {
             name: 'metric %s' % name
             for name in [
                 'interval_real', 'connect_time', 'send_time', 'latency',
                 'receive_time', 'interval_event'
             ]
         }
         self.uploader = get_uploader(self.data_session, col_map_aggr, True)
示例#6
0
 def data_session(self):
     """
     :rtype: DataSession
     """
     if self._data_session is None:
         config_filenames = {'validated_conf.yaml', 'configinitial.yaml'}
         self._data_session = DataSession({'clients': self.clients_cfg},
                                          tankapi_info=self.tankapi_info(),
                                          config_filenames=config_filenames,
                                          artifacts_dir=self.core.artifacts_dir,
                                          test_start=self.core.info.get_value(['generator', 'test_start'], 0) * 10**6)
         self.add_cleanup(self._cleanup)
         self._data_session.update_job(dict({'name': self.test_name,
                                             '__type': 'tank'},
                                            **self.meta))
         job_no = self._data_session.clients[0].job_number
         if job_no:
             self.publish('job_no', int(job_no))
             self.publish('web_link', urljoin(self.LUNA_LINK, job_no))
     return self._data_session
示例#7
0
def data_session(tmp_path):
    artifacts_base_dir = tmp_path / "logs"
    config = {
        'clients': [{
            'type': 'local_storage',
        }],
        'test_start': int(time.time() * 1e6),
        'artifacts_base_dir': str(artifacts_base_dir)
    }
    data_session = DataSession(config=config)
    return data_session
示例#8
0
 def start_test(self):
     try:
         self.reader = self.core.job.generator_plugin.get_reader(parser=string_to_df_microsec)
     except TypeError:
         logger.error('Generator plugin does not support NeUploader')
         self.is_test_finished = lambda: -1
         self.reader = []
     else:
         self.data_session = DataSession({'clients': self.clients_cfg})
         self.add_cleanup(self.cleanup)
         self.data_session.update_job({'name': self.cfg.get('test_name')})
         col_map_aggr = {name: 'metric %s' % name for name in
                         ['interval_real', 'connect_time', 'send_time', 'latency',
                          'receive_time', 'interval_event']}
         self.uploader = get_uploader(self.data_session, col_map_aggr, True)
示例#9
0
def main():
    parser = argparse.ArgumentParser(description='Process phantom output.')
    parser.add_argument('phout', type=str, help='path to phantom output file')
    parser.add_argument(
        '--url',
        type=str,
        default='https://volta-back-testing.common-int.yandex-team.ru/')
    parser.add_argument('--name',
                        type=str,
                        help='test name',
                        default=str(datetime.utcnow()))
    parser.add_argument('--db_name',
                        type=str,
                        help='ClickHouse database name',
                        default='luna_test')
    args = parser.parse_args()

    clients = [{
        'type': 'luna',
        'api_address': args.url,
        'db_name': args.db_name
    }, {
        'type': 'local_storage'
    }]
    data_session = DataSession({'clients': clients})
    data_session.update_job({'name': args.name})
    print('Test name: %s' % args.name)

    col_map_aggr = {
        name: 'metric %s' % name
        for name in [
            'interval_real', 'connect_time', 'send_time', 'latency',
            'receive_time', 'interval_event'
        ]
    }
    uploader = get_uploader(data_session, col_map_aggr, True)

    signal.signal(signal.SIGINT, get_handler(data_session))

    with open(args.phout) as f:
        buffer = ''
        while True:
            parts = f.read(128 * 1024)
            try:
                chunk, new_buffer = parts.rsplit('\n', 1)
                chunk = buffer + chunk + '\n'
                buffer = new_buffer
            except ValueError:
                chunk = buffer + parts
                buffer = ''
            if len(chunk) > 0:
                df = string_to_df_microsec(chunk)
                uploader(df)
            else:
                break
    data_session.close()
示例#10
0
class Plugin(AbstractPlugin, MonitoringDataListener):
    SECTION = 'neuploader'
    importance_high = {'interval_real', 'proto_code', 'net_code'}
    OVERALL = '__overall__'
    LUNA_LINK = 'https://luna.yandex-team.ru/tests/'
    PLANNED_RPS_METRICS_NAME = 'planned_rps'
    ACTUAL_RPS_METRICS_NAME = 'actual_rps'

    def __init__(self, core, cfg, name):
        super(Plugin, self).__init__(core, cfg, name)
        self.clients_cfg = [{
            'type': 'luna',
            'api_address': self.cfg.get('api_address'),
            'db_name': self.cfg.get('db_name')
        }]
        self.metrics_objs = {}  # map of case names and metric objects
        self.monitoring_metrics = {}
        self.rps_metrics = {
            'actual_rps_metrics_obj': None,
            'planned_rps_metrics_obj': None,
            'actual_rps_latest': pandas.Series([])
        }
        self.rps_uploader = th.Thread(target=self.upload_planned_rps)

        self._col_map = None
        self._data_session = None
        self._meta = None
        self._test_name = None

    @property
    def meta(self):
        if self._meta is None:
            self._meta = dict(self.cfg.get('meta', {}),
                              component=self.core.status.get(
                                  'uploader', {}).get('component'))
        return self._meta

    @property
    def test_name(self):
        if self._test_name is None:
            self._test_name = self.cfg.get(
                'test_name') or self.core.status.get('uploader',
                                                     {}).get('job_name')
        return self._test_name

    def configure(self):
        pass

    def start_test(self):
        try:
            self.reader = self.core.job.generator_plugin.get_reader(
                parser=string_to_df_microsec)
        except TypeError:
            logger.error('Generator plugin does not support NeUploader')
            self.is_test_finished = lambda: -1
            self.reader = []

    @thread_safe_property
    def col_map(self):
        return {
            'interval_real': self.data_session.new_true_metric,
            'connect_time': self.data_session.new_true_metric,
            'send_time': self.data_session.new_true_metric,
            'latency': self.data_session.new_true_metric,
            'receive_time': self.data_session.new_true_metric,
            'interval_event': self.data_session.new_true_metric,
            'net_code': self.data_session.new_event_metric,
            'proto_code': self.data_session.new_event_metric
        }

    @thread_safe_property
    def data_session(self):
        """
        :rtype: DataSession
        """
        if self._data_session is None:
            self._data_session = DataSession(
                {'clients': self.clients_cfg},
                test_start=self.core.status['generator']['test_start'] * 10**6)
            self.add_cleanup(self._cleanup)
            self._data_session.update_job(
                dict({
                    'name': self.test_name,
                    '__type': 'tank'
                }, **self.meta))
            job_no = self._data_session.clients[0].job_number
            if job_no:
                self.publish('job_no', int(job_no))
                self.publish('web_link', urljoin(self.LUNA_LINK, job_no))
        return self._data_session

    def _cleanup(self):
        self.upload_actual_rps(data=pandas.DataFrame([]), last_piece=True)
        uploader_metainfo = self.map_uploader_tags(
            self.core.status.get('uploader'))
        autostop_info = self.get_autostop_info()
        regressions = self.get_regressions_names(uploader_metainfo)
        lp_link = self.core.status.get('uploader', {}).get('web_link')

        uploader_metainfo.update(self.meta)
        uploader_metainfo.update(autostop_info)
        uploader_metainfo['regression'] = regressions
        uploader_metainfo['lunapark_link'] = lp_link

        self.data_session.update_job(uploader_metainfo)
        self.data_session.close(
            test_end=self.core.status.get('generator', {}).get('test_end', 0) *
            10**6)

    def is_test_finished(self):
        df = next(self.reader)
        if df is not None:
            self.upload(df)
        return -1

    def monitoring_data(self, data_list):
        self.upload_monitoring(data_list)

    def post_process(self, retcode):
        try:
            self.rps_uploader.start()
            for chunk in self.reader:
                if chunk is not None:
                    self.upload(chunk)
            self.upload_actual_rps(data=pandas.DataFrame([]), last_piece=True)
            if self.rps_uploader.is_alive():
                self.rps_uploader.join()
        except KeyboardInterrupt:
            logger.warning('Caught KeyboardInterrupt on Neuploader')
            self._cleanup()
        return retcode

    @property
    def is_telegraf(self):
        return True

    def get_metric_obj(self, col, case):
        """
        Generator of metric objects:
        Checks existent metrics and creates new metric if it does not exist.
        :param col:  str with column name
        :param case: str with case name
        :return: metric object
        """

        case_metrics = self.metrics_objs.get(case)
        if case_metrics is None:
            for col, constructor in self.col_map.items():
                self.metrics_objs.setdefault(case, {})[col] = constructor(
                    dict(self.meta,
                         name=col,
                         source='tank',
                         importance='high'
                         if col in self.importance_high else ''),
                    raw=False,
                    aggregate=True,
                    parent=self.get_metric_obj(col, self.OVERALL)
                    if case != self.OVERALL else None,
                    case=case if case != self.OVERALL else None)
        return self.metrics_objs[case][col]
示例#11
0
class Plugin(AbstractPlugin, MonitoringDataListener):
    SECTION = 'neuploader'
    importance_high = {'interval_real', 'proto_code', 'net_code'}
    OVERALL = '__overall__'

    def __init__(self, core, cfg, name):
        super(Plugin, self).__init__(core, cfg, name)
        self._is_telegraf = None
        self.clients_cfg = [{
            'type': 'luna',
            'api_address': self.cfg.get('api_address'),
            'db_name': self.cfg.get('db_name')
        }]
        self.metrics_objs = {}  # map of case names and metric objects
        self.monitoring_metrics = {}
        self._col_map = None
        self._data_session = None

    def configure(self):
        pass

    def start_test(self):
        try:
            self.reader = self.core.job.generator_plugin.get_reader(
                parser=string_to_df_microsec)
        except TypeError:
            logger.error('Generator plugin does not support NeUploader')
            self.is_test_finished = lambda: -1
            self.reader = []

    @thread_safe_property
    def col_map(self):
        return {
            'interval_real': self.data_session.new_true_metric,
            'connect_time': self.data_session.new_true_metric,
            'send_time': self.data_session.new_true_metric,
            'latency': self.data_session.new_true_metric,
            'receive_time': self.data_session.new_true_metric,
            'interval_event': self.data_session.new_true_metric,
            'net_code': self.data_session.new_event_metric,
            'proto_code': self.data_session.new_event_metric
        }

    @thread_safe_property
    def data_session(self):
        """
        :rtype: DataSession
        """
        if self._data_session is None:
            self._data_session = DataSession(
                {'clients': self.clients_cfg},
                test_start=self.core.status['generator']['test_start'] * 10**6)
            self.add_cleanup(self._cleanup)
            self._data_session.update_job(
                dict({
                    'name': self.cfg.get('test_name'),
                    '__type': 'tank'
                }, **self.cfg.get('meta', {})))
        return self._data_session

    def _cleanup(self):
        uploader_metainfo = self.map_uploader_tags(
            self.core.status.get('uploader'))
        if self.core.status.get('autostop'):
            autostop_rps = self.core.status.get('autostop', {}).get('rps', 0)
            autostop_reason = self.core.status.get('autostop',
                                                   {}).get('reason', '')
            self.log.warning('Autostop: %s %s', autostop_rps, autostop_reason)
            uploader_metainfo.update({
                'autostop_rps': autostop_rps,
                'autostop_reason': autostop_reason
            })
        uploader_metainfo.update(self.cfg.get('meta', {}))
        self.data_session.update_job(uploader_metainfo)
        self.data_session.close(
            test_end=self.core.status.get('generator', {}).get('test_end', 0) *
            10**6)

    def is_test_finished(self):
        df = next(self.reader)
        if df is not None:
            self.upload(df)
        return -1

    def monitoring_data(self, data_list):
        self.upload_monitoring(data_list)

    def post_process(self, retcode):
        try:
            for chunk in self.reader:
                if chunk is not None:
                    self.upload(chunk)
        except KeyboardInterrupt:
            logger.warning('Caught KeyboardInterrupt on Neuploader')
            self._cleanup()
        return retcode

    @property
    def is_telegraf(self):
        return True

    def get_metric_obj(self, col, case):
        """
        Generator of metric objects:
        Checks existent metrics and creates new metric if it does not exist.
        :param col:  str with column name
        :param case: str with case name
        :return: metric object
        """

        case_metrics = self.metrics_objs.get(case)
        if case_metrics is None:
            for col, constructor in self.col_map.items():
                # args = dict(self.cfg.get('meta', {}),
                #             name=col,
                #             case=case,
                #             raw=False,
                #             aggregate=True,
                #             source='tank',
                #             importance='high' if col in self.importance_high else '',
                #             )
                # if case != self.OVERALL:
                #     args.update(parent=self.get_metric_obj(col, self.OVERALL))
                self.metrics_objs.setdefault(case, {})[col] = constructor(
                    dict(self.cfg.get('meta', {}),
                         name=col,
                         source='tank',
                         importance='high'
                         if col in self.importance_high else ''),
                    raw=False,
                    aggregate=True,
                    parent=self.get_metric_obj(col, self.OVERALL)
                    if case != self.OVERALL else None,
                    case=case if case != self.OVERALL else None)
        return self.metrics_objs[case][col]
示例#12
0
class Plugin(AbstractPlugin, MonitoringDataListener):
    SECTION = 'neuploader'

    def __init__(self, core, cfg, name):
        super(Plugin, self).__init__(core, cfg, name)
        self._is_telegraf = None
        self.clients_cfg = [{
            'type': 'luna',
            'api_address': self.cfg.get('api_address'),
            'db_name': self.cfg.get('db_name')
        }]

    def configure(self):
        pass

    def start_test(self):
        try:
            self.reader = self.core.job.generator_plugin.get_reader(
                parser=string_to_df_microsec)
        except TypeError:
            logger.error('Generator plugin does not support NeUploader')
            self.is_test_finished = lambda: -1
            self.reader = []
        else:
            self.data_session = DataSession({'clients': self.clients_cfg})
            self.add_cleanup(self.cleanup)
            self.data_session.update_job({'name': self.cfg.get('test_name')})
            col_map_aggr = {
                name: 'metric %s' % name
                for name in [
                    'interval_real', 'connect_time', 'send_time', 'latency',
                    'receive_time', 'interval_event'
                ]
            }
            self.uploader = get_uploader(self.data_session, col_map_aggr, True)

    def cleanup(self):
        uploader_metainfo = self.map_uploader_tags(
            self.core.status.get('uploader'))
        self.data_session.update_job(uploader_metainfo)
        self.data_session.close()

    def is_test_finished(self):
        df = next(self.reader)
        if df is not None:
            self.uploader(df)
        return -1

    def monitoring_data(self, data_list):
        pass

    def post_process(self, retcode):
        for chunk in self.reader:
            if chunk is not None:
                self.uploader(chunk)
        return retcode

    @property
    def is_telegraf(self):
        return True

    @staticmethod
    def map_uploader_tags(uploader_tags):
        return dict([('component', uploader_tags.get('component')),
                     ('description', uploader_tags.get('job_dsc')),
                     ('name', uploader_tags.get('job_name')),
                     ('person', uploader_tags.get('person')),
                     ('task', uploader_tags.get('task')),
                     ('version', uploader_tags.get('version')),
                     ('lunapark_jobno', uploader_tags.get('job_no'))] +
                    [(k, v) for k, v in uploader_tags.get('meta', {}).items()])
示例#13
0
class Plugin(AbstractPlugin, MonitoringDataListener):
    SECTION = 'neuploader'
    importance_high = {'interval_real', 'proto_code', 'net_code'}

    def __init__(self, core, cfg, name):
        super(Plugin, self).__init__(core, cfg, name)
        self._is_telegraf = None
        self.clients_cfg = [{
            'type': 'luna',
            'api_address': self.cfg.get('api_address'),
            'db_name': self.cfg.get('db_name')
        }]
        self.metrics_objs = {}  # map of case names and metric objects
        self.monitoring_metrics = {}

    def configure(self):
        pass

    def start_test(self):
        try:
            self.reader = self.core.job.generator_plugin.get_reader(
                parser=string_to_df_microsec)
        except TypeError:
            logger.error('Generator plugin does not support NeUploader')
            self.is_test_finished = lambda: -1
            self.reader = []
        else:
            self.data_session = DataSession({'clients': self.clients_cfg})
            self.add_cleanup(self._cleanup)
            self.data_session.update_job({
                'name': self.cfg.get('test_name'),
                '__type': 'tank'
            })
            self.col_map = {
                'interval_real': self.data_session.new_true_metric,
                'connect_time': self.data_session.new_true_metric,
                'send_time': self.data_session.new_true_metric,
                'latency': self.data_session.new_true_metric,
                'receive_time': self.data_session.new_true_metric,
                'interval_event': self.data_session.new_true_metric,
                'net_code': self.data_session.new_event_metric,
                'proto_code': self.data_session.new_event_metric
            }

    def _cleanup(self):
        uploader_metainfo = self.map_uploader_tags(
            self.core.status.get('uploader'))
        self.data_session.update_job(uploader_metainfo)
        self.data_session.close()

    def is_test_finished(self):
        df = next(self.reader)
        if df is not None:
            self.upload(df)
        return -1

    def monitoring_data(self, data_list):
        self.upload_monitoring(data_list)

    def post_process(self, retcode):
        for chunk in self.reader:
            if chunk is not None:
                self.upload(chunk)
        return retcode

    @property
    def is_telegraf(self):
        return True

    def get_metric_obj(self, col, case):
        """
        Generator of metric objects:
        Checks existent metrics and creates new metric if it does not exist.
        :param col:  str with column name
        :param case: str with case name
        :return: metric object
        """
        case_metrics = self.metrics_objs.get(case)
        if case_metrics is None:
            # parent = self.metrics_objs.get('__overall__', {}).get(col)
            case_metrics = {
                col: constructor(
                    name='{} {}'.format(col, case),
                    raw=False,
                    aggregate=True,
                    source='tank',
                    importance='high' if col in self.importance_high else '')
                for col, constructor in self.col_map.items()
            }
            self.metrics_objs[case] = case_metrics
示例#14
0
class Plugin(AbstractPlugin, MonitoringDataListener):
    SECTION = 'neuploader'

    def __init__(self, core, cfg, name):
        super(Plugin, self).__init__(core, cfg, name)
        self._is_telegraf = None
        self.clients_cfg = [{'type': 'luna',
                             'api_address': self.cfg.get('api_address'),
                             'db_name': self.cfg.get('db_name')}]

    def configure(self):
        pass

    def start_test(self):
        try:
            self.reader = self.core.job.generator_plugin.get_reader(parser=string_to_df_microsec)
        except TypeError:
            logger.error('Generator plugin does not support NeUploader')
            self.is_test_finished = lambda: -1
            self.reader = []
        else:
            self.data_session = DataSession({'clients': self.clients_cfg})
            self.add_cleanup(self.cleanup)
            self.data_session.update_job({'name': self.cfg.get('test_name')})
            col_map_aggr = {name: 'metric %s' % name for name in
                            ['interval_real', 'connect_time', 'send_time', 'latency',
                             'receive_time', 'interval_event']}
            self.uploader = get_uploader(self.data_session, col_map_aggr, True)

    def cleanup(self):
        uploader_metainfo = self.map_uploader_tags(self.core.status.get('uploader'))
        self.data_session.update_job(uploader_metainfo)
        self.data_session.close()

    def is_test_finished(self):
        df = next(self.reader)
        if df is not None:
            self.uploader(df)
        return -1

    def monitoring_data(self, data_list):
        pass

    def post_process(self, retcode):
        for chunk in self.reader:
            if chunk is not None:
                self.uploader(chunk)
        return retcode

    @property
    def is_telegraf(self):
        return True

    @staticmethod
    def map_uploader_tags(uploader_tags):
        return dict(
            [
                ('component', uploader_tags.get('component')),
                ('description', uploader_tags.get('job_dsc')),
                ('name', uploader_tags.get('job_name')),
                ('person', uploader_tags.get('person')),
                ('task', uploader_tags.get('task')),
                ('version', uploader_tags.get('version')),
                ('lunapark_jobno', uploader_tags.get('job_no'))
            ] + [
                (k, v) for k, v in uploader_tags.get('meta', {}).items()
            ]
        )