import logging import re from configparser import RawConfigParser, ParsingError from functools import reduce import pkg_resources import yaml from yandextank.common.util import recursive_dict_update from yandextank.validator.validator import load_plugin_schema, load_yaml_schema logger = logging.getLogger(__name__) CORE_SCHEMA = load_yaml_schema( pkg_resources.resource_filename('yandextank.core', 'config/schema.yaml'))['core']['schema'] DEPRECATED_SECTIONS = ['lunaport', 'aggregator'] def old_plugin_mapper(package): MAP = {'Overload': 'DataUploader'} return MAP.get(package, package) def parse_package_name(package_path): if package_path.startswith("Tank/Plugins/"): package = package_path.split('/')[-1].split('.')[0] else: package = package_path.split('.')[-1].split()[0] return old_plugin_mapper(package)
def post_loader(): CONFIG_SCHEMA = load_yaml_schema(pkg_resources.resource_filename('yandextank.plugins.DataUploader', 'config/postloader_schema.yaml')) parser = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter) parser.add_argument('-c', '--config', help='YAML config. Format:\n{}'.format(yaml.dump(CONFIG_SCHEMA))) parser.add_argument('-a', '--api_address', help='service to upload test results to, e.g. https://overload.yandex.net') parser.add_argument('-t', '--target', help='Address of the tested target, host[:port]') parser.add_argument('-o', '--operator', help='User who conducted the test') parser.add_argument('--task', help='task name, for Lunapark users only') parser.add_argument('--job_name', help='Job name') parser.add_argument('--job_dsc', help='Job description') parser.add_argument('--token', help='path to token file, for Overload users only') parser.add_argument('test_dir', help='Directory containing test artifacts') args = parser.parse_args() assert os.path.exists(args.test_dir), 'Directory {} not found'.format(args.test_dir) get_logger() # load cfg if args.config: with open(args.config) as f: config = yaml.load(f) else: config = from_tank_config(args.test_dir) # parse target host and port if args.target is not None: try: target_host, target_port = args.target.rsplit(':', 1) except ValueError: target_host, target_port = args.target, None else: target_host, target_port = None, None # update cfg from cli options for key, value in [('api_address', args.api_address), ('target_host', target_host), ('target_port', target_port), ('operator', args.operator), ('task', args.task), ('job_name', args.job_name), ('job_dsc', args.job_dsc), ('token_file', args.token)]: if value is not None: config[key] = value # Validation v = Validator(schema=CONFIG_SCHEMA, allow_unknown=True) if not v.validate(config): raise ValidationError(v.errors) config = v.normalized(config) # lunapark or overload? backend_type = BackendTypes.identify_backend(config['api_address']) if backend_type == BackendTypes.LUNAPARK: client = APIClient api_token = None elif backend_type == BackendTypes.OVERLOAD: client = OverloadClient try: api_token = DataUploader.read_token(config["token_file"]) except KeyError: raise ConfigError('Token file required') else: raise RuntimeError("Backend type doesn't match any of the expected") user_agent = ' '.join(('Uploader/{}'.format(DataUploader.VERSION), TankCore.get_user_agent())) api_client = client(base_url=config['api_address'], user_agent=user_agent, api_token=api_token, core_interrupted=threading.Event() # todo: add timeouts ) lp_job = LPJob( client=api_client, target_host=config.get('target_host'), target_port=config.get('target_port'), person=config.get('operator') or pwd.getpwuid(os.geteuid())[0], task=config.get('task'), name=config['job_name'], description=config['job_dsc'], tank=socket.getfqdn()) edit_metainfo(config, lp_job) upload_data(args.test_dir, DATA_LOG, lp_job) send_config_snapshot(config, lp_job) try: upload_monitoring(args.test_dir, MONITORING_LOG, lp_job) except AssertionError as e: logger.error(e) lp_job.close(0) make_symlink(args.test_dir, lp_job.number) logger.info( 'LP job created: {}'.format( urljoin( api_client.base_url, str( lp_job.number))))
def post_loader(): CONFIG_SCHEMA = load_yaml_schema( pkg_resources.resource_filename('yandextank.plugins.DataUploader', 'config/postloader_schema.yaml')) parser = argparse.ArgumentParser( formatter_class=argparse.RawTextHelpFormatter) parser.add_argument('-c', '--config', help='YAML config. Format:\n{}'.format( yaml.dump(CONFIG_SCHEMA))) parser.add_argument( '-a', '--api_address', help= 'service to upload test results to, e.g. https://overload.yandex.net') parser.add_argument('-t', '--target', help='Address of the tested target, host[:port]') parser.add_argument('-o', '--operator', help='User who conducted the test') parser.add_argument('--task', help='task name, for Lunapark users only') parser.add_argument('--job_name', help='Job name') parser.add_argument('--job_dsc', help='Job description') parser.add_argument('--token', help='path to token file, for Overload users only') parser.add_argument('test_dir', help='Directory containing test artifacts') args = parser.parse_args() assert os.path.exists(args.test_dir), 'Directory {} not found'.format( args.test_dir) get_logger() # load cfg if args.config: with open(args.config) as f: config = yaml.load(f) else: config = from_tank_config(args.test_dir) # parse target host and port if args.target is not None: try: target_host, target_port = args.target.rsplit(':', 1) except ValueError: target_host, target_port = args.target, None else: target_host, target_port = None, None # update cfg from cli options for key, value in [('api_address', args.api_address), ('target_host', target_host), ('target_port', target_port), ('operator', args.operator), ('task', args.task), ('job_name', args.job_name), ('job_dsc', args.job_dsc), ('token_file', args.token)]: if value is not None: config[key] = value # Validation v = Validator(schema=CONFIG_SCHEMA, allow_unknown=True) if not v.validate(config): raise ValidationError(v.errors) config = v.normalized(config) # lunapark or overload? backend_type = BackendTypes.identify_backend(config['api_address']) if backend_type == BackendTypes.LUNAPARK: client = APIClient api_token = None elif backend_type == BackendTypes.OVERLOAD: client = OverloadClient try: api_token = DataUploader.read_token(config["token_file"]) except KeyError: raise ConfigError('Token file required') else: raise RuntimeError("Backend type doesn't match any of the expected") user_agent = ' '.join(('Uploader/{}'.format(DataUploader.VERSION), TankCore.get_user_agent())) api_client = client(base_url=config['api_address'], user_agent=user_agent, api_token=api_token, core_interrupted=threading.Event() # todo: add timeouts ) lp_job = LPJob(client=api_client, target_host=config.get('target_host'), target_port=config.get('target_port'), person=config.get('operator') or pwd.getpwuid(os.geteuid())[0], task=config.get('task'), name=config['job_name'], description=config['job_dsc'], tank=socket.getfqdn()) edit_metainfo(config, lp_job) upload_data(args.test_dir, DATA_LOG, lp_job) send_config_snapshot(config, lp_job) try: upload_monitoring(args.test_dir, MONITORING_LOG, lp_job) except AssertionError as e: logger.error(e) lp_job.close(0) make_symlink(args.test_dir, lp_job.number) logger.info('LP job created: {}'.format( urljoin(api_client.base_url, str(lp_job.number))))
from ConfigParser import ConfigParser import re import logging import pkg_resources import yaml from functools import reduce from yandextank.common.util import recursive_dict_update from yandextank.validator.validator import load_plugin_schema, load_yaml_schema logger = logging.getLogger(__name__) CORE_SCHEMA = load_yaml_schema(pkg_resources.resource_filename('yandextank.core', 'config/schema.yaml'))['core']['schema'] DEPRECATED_SECTIONS = ['lunaport', 'aggregator'] def old_plugin_mapper(package): MAP = {'Overload': 'DataUploader'} return MAP.get(package, package) def parse_package_name(package_path): if package_path.startswith("Tank/Plugins/"): package = package_path.split('/')[-1].split('.')[0] else: package = package_path.split('.')[-1].split()[0] return old_plugin_mapper(package) SECTIONS_PATTERNS = { 'tank': 'core|tank',