def __init__(self, configs, cli_options=None, cfg_patches=None, cli_args=None, no_local=False, log_handlers=None, wait_lock=False, files=None, ammo_file=None, api_start=False, manager=None, debug=False): self.api_start = api_start self.wait_lock = wait_lock self.log_handlers = log_handlers if log_handlers is not None else [] self.files = [] if files is None else files self.ammo_file = ammo_file self.config_paths = configs self.interrupted = ProcessEvent() if api_start else ThreadEvent() self.info = TankInfo(manager.dict()) if api_start else TankInfo(dict()) self.config_list = self._combine_configs(configs, cli_options, cfg_patches, cli_args, no_local) self.core = TankCore(self.config_list, self.interrupted, self.info) self.folder = self.init_folder() self.init_logging(debug or self.core.get_option(self.core.SECTION, 'debug')) is_locked = Lock.is_locked(self.core.lock_dir) if is_locked and not self.core.config.get_option( self.SECTION, 'ignore_lock'): raise LockError(is_locked)
def test_core_load_plugins(config, expected): core = TankCore(configs=[ load_yaml(os.path.join(os.path.dirname(__file__), '../config'), '00-base.yaml'), config ]) core.load_plugins() assert set(core.plugins.keys()) == expected
def test_ammo(stepper_kwargs, expected_stpd): stepper = Stepper( TankCore([{}], threading.Event(), TankInfo({})), rps_schedule=["const(10,10s)"], http_ver="1.1", instances_schedule=None, instances=10, loop_limit=1000, ammo_limit=1000, enum_ammo=False, **stepper_kwargs ) stepper_output = io.BytesIO() stepper.write(stepper_output) stepper_output.seek(0) expected_lines = read_resource(os.path.join(get_test_path(), expected_stpd), 'rb').split(b'\n') for i, (result, expected) in enumerate(zip(stepper_output, expected_lines)): assert result.strip() == expected.strip(), 'Line {} mismatch'.format(i)
def test_ammo(): AMMO_FILE = os.path.join(PATH, 'yandextank/stepper/tests/test-ammo.txt') stepper = Stepper( TankCore([{}], threading.Event(), TankInfo({})), rps_schedule=["const(10,30s)"], http_ver="1.1", ammo_file=AMMO_FILE, instances_schedule=None, instances=10, loop_limit=1000, ammo_limit=1000, ammo_type='phantom', autocases=0, enum_ammo=False, ) stepper_output = io.StringIO() stepper.write(stepper_output) stepper_output.seek(0) expected_lines = read_resource( os.path.join(PATH, 'yandextank/stepper/tests/expected.stpd')).split('\n') for i, (result, expected) in enumerate(zip(stepper_output, expected_lines)): assert result.strip() == expected.strip(), 'Line {} mismatch'.format(i)
def test_core_plugins_configure(config, expected): core = TankCore(configs=[config]) core.plugins_configure()
def post_loader(): CONFIG_SCHEMA = load_yaml_schema( pkg_resources.resource_filename('yandextank.plugins.DataUploader', 'config/postloader_schema.yaml')) parser = argparse.ArgumentParser( formatter_class=argparse.RawTextHelpFormatter) parser.add_argument('-c', '--config', help='YAML config. Format:\n{}'.format( yaml.dump(CONFIG_SCHEMA))) parser.add_argument( '-a', '--api_address', help= 'service to upload test results to, e.g. https://overload.yandex.net') parser.add_argument('-t', '--target', help='Address of the tested target, host[:port]') parser.add_argument('-o', '--operator', help='User who conducted the test') parser.add_argument('--task', help='task name, for Lunapark users only') parser.add_argument('--job_name', help='Job name') parser.add_argument('--job_dsc', help='Job description') parser.add_argument('--token', help='path to token file, for Overload users only') parser.add_argument('test_dir', help='Directory containing test artifacts') args = parser.parse_args() assert os.path.exists(args.test_dir), 'Directory {} not found'.format( args.test_dir) get_logger() # load cfg if args.config: with open(args.config) as f: config = yaml.load(f) else: config = from_tank_config(args.test_dir) # parse target host and port if args.target is not None: try: target_host, target_port = args.target.rsplit(':', 1) except ValueError: target_host, target_port = args.target, None else: target_host, target_port = None, None # update cfg from cli options for key, value in [('api_address', args.api_address), ('target_host', target_host), ('target_port', target_port), ('operator', args.operator), ('task', args.task), ('job_name', args.job_name), ('job_dsc', args.job_dsc), ('token_file', args.token)]: if value is not None: config[key] = value # Validation v = Validator(schema=CONFIG_SCHEMA, allow_unknown=True) if not v.validate(config): raise ValidationError(v.errors) config = v.normalized(config) # lunapark or overload? backend_type = BackendTypes.identify_backend(config['api_address']) if backend_type == BackendTypes.LUNAPARK: client = APIClient api_token = None elif backend_type == BackendTypes.OVERLOAD: client = OverloadClient try: api_token = DataUploader.read_token(config["token_file"]) except KeyError: raise ConfigError('Token file required') else: raise RuntimeError("Backend type doesn't match any of the expected") user_agent = ' '.join(('Uploader/{}'.format(DataUploader.VERSION), TankCore.get_user_agent())) api_client = client(base_url=config['api_address'], user_agent=user_agent, api_token=api_token, core_interrupted=threading.Event() # todo: add timeouts ) lp_job = LPJob(client=api_client, target_host=config.get('target_host'), target_port=config.get('target_port'), person=config.get('operator') or pwd.getpwuid(os.geteuid())[0], task=config.get('task'), name=config['job_name'], description=config['job_dsc'], tank=socket.getfqdn()) edit_metainfo(config, lp_job) upload_data(args.test_dir, DATA_LOG, lp_job) send_config_snapshot(config, lp_job) try: upload_monitoring(args.test_dir, MONITORING_LOG, lp_job) except AssertionError as e: logger.error(e) lp_job.close(0) make_symlink(args.test_dir, lp_job.number) logger.info('LP job created: {}'.format( urljoin(api_client.base_url, str(lp_job.number))))
def test_start_test(config): core = TankCore(configs=[config]) core.plugins_prepare_test() core.plugins_start_test() core.plugins_end_test(1)
def test_plugins_prepare_test(config, expected): core = TankCore([config], threading.Event()) core.plugins_prepare_test()
def test_core_plugins_configure(config, expected): core = TankCore([config], threading.Event()) core.plugins_configure()
def get_core(self): self.core = TankCore() self.core.artifacts_base_dir = tempfile.mkdtemp() self.core.artifacts_dir = self.core.artifacts_base_dir return self.core
class TankWorker(): SECTION = 'core' FINISH_FILENAME = 'finish_status.yaml' DEFAULT_CONFIG = 'load.yaml' def __init__(self, configs, cli_options=None, cfg_patches=None, cli_args=None, no_local=False, log_handlers=None, wait_lock=False, files=None, ammo_file=None, api_start=False, manager=None, debug=False): self.api_start = api_start self.wait_lock = wait_lock self.log_handlers = log_handlers if log_handlers is not None else [] self.files = [] if files is None else files self.ammo_file = ammo_file self.config_paths = configs self.interrupted = ProcessEvent() if api_start else ThreadEvent() self.info = TankInfo(manager.dict()) if api_start else TankInfo(dict()) self.config_list = self._combine_configs(configs, cli_options, cfg_patches, cli_args, no_local) self.core = TankCore(self.config_list, self.interrupted, self.info) self.folder = self.init_folder() self.init_logging(debug or self.core.get_option(self.core.SECTION, 'debug')) is_locked = Lock.is_locked(self.core.lock_dir) if is_locked and not self.core.config.get_option( self.SECTION, 'ignore_lock'): raise LockError(is_locked) @staticmethod def _combine_configs(run_cfgs, cli_options=None, cfg_patches=None, cli_args=None, no_local=False): if cli_options is None: cli_options = [] if cfg_patches is None: cfg_patches = [] if cli_args is None: cli_args = [] run_cfgs = run_cfgs if len(run_cfgs) > 0 else [ TankWorker.DEFAULT_CONFIG ] if no_local: configs = [load_cfg(cfg) for cfg in run_cfgs] + \ parse_options(cli_options) + \ parse_and_check_patches(cfg_patches) + \ cli_args else: configs = [load_core_base_cfg()] + \ load_local_base_cfgs() + \ [load_cfg(cfg) for cfg in run_cfgs] + \ parse_options(cli_options) + \ parse_and_check_patches(cfg_patches) + \ cli_args return configs def init_folder(self): folder = self.core.artifacts_dir if self.api_start > 0: for cfg in self.config_paths: shutil.move(cfg, folder) for f in self.files: shutil.move(f, folder) if self.ammo_file: shutil.move(self.ammo_file, folder) os.chdir(folder) return folder def stop(self): self.interrupted.set() logger.warning('Interrupting') def get_status(self): return { 'status_code': self.status, 'left_time': None, 'exit_code': self.retcode, 'lunapark_id': self.get_info('uploader', 'job_no'), 'tank_msg': self.msg, 'lunapark_url': self.get_info('uploader', 'web_link'), 'luna_id': self.get_info('neuploader', 'job_no'), 'luna_url': self.get_info('neuploader', 'web_link') } def save_finish_status(self): with open(os.path.join(self.folder, self.FINISH_FILENAME), 'w') as f: yaml.safe_dump(self.get_status(), f, encoding='utf-8', allow_unicode=True) def get_info(self, section_name, key_name): return self.info.get_value([section_name, key_name]) def init_logging(self, debug=False): filename = os.path.join(self.core.artifacts_dir, 'tank.log') open(filename, 'a').close() current_file_mode = os.stat(filename).st_mode os.chmod( filename, current_file_mode | stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH) logger.handlers = [] logger.setLevel(logging.DEBUG if debug else logging.INFO) file_handler = logging.FileHandler(filename) file_handler.setLevel(logging.DEBUG) file_handler.setFormatter( logging.Formatter( "%(asctime)s [%(levelname)s] %(name)s %(filename)s:%(lineno)d\t%(message)s" )) file_handler.addFilter(TankapiLogFilter()) logger.addHandler(file_handler) logger.info("Log file created") for handler in self.log_handlers: logger.addHandler(handler) logger.info("Logging handler {} added".format(handler)) def get_lock(self): while not self.interrupted.is_set(): try: lock = Lock(self.test_id, self.folder).acquire( self.core.lock_dir, self.core.config.get_option(self.SECTION, 'ignore_lock')) break except LockError as e: self.upd_msg(e.message) if not self.wait_lock: raise RuntimeError("Lock file present, cannot continue") logger.warning("Couldn't get lock. Will retry in 5 seconds...") time.sleep(5) else: raise KeyboardInterrupt return lock def upd_msg(self, msg): if msg: self.msg = self.msg + '\n' + msg
def test_plugins_prepare_test(config, expected): core = TankCore([config], threading.Event()) core.plugins_prepare_test()
def test_core_plugins_configure(config, expected): core = TankCore([config], threading.Event(), TankInfo({})) core.plugins_configure()
def test_core_load_plugins(config, expected): core = TankCore( [load_yaml(os.path.join(PATH, '../config'), '00-base.yaml'), config], threading.Event(), TankInfo({})) core.load_plugins() assert set(core.plugins.keys()) == expected
def test_plugins_prepare_test(config, expected): core = TankCore(configs=[config]) core.plugins_prepare_test()
def test_core_load_plugins(config, expected): core = TankCore([load_yaml(os.path.join(os.path.dirname(__file__), '../config'), '00-base.yaml'), config], threading.Event()) core.load_plugins() assert set(core.plugins.keys()) == expected
def test_start_test(config): core = TankCore(configs=[config]) core.plugins_prepare_test() core.plugins_start_test() core.plugins_end_test(1)
def post_loader(): CONFIG_SCHEMA = load_yaml_schema(pkg_resources.resource_filename('yandextank.plugins.DataUploader', 'config/postloader_schema.yaml')) parser = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter) parser.add_argument('-c', '--config', help='YAML config. Format:\n{}'.format(yaml.dump(CONFIG_SCHEMA))) parser.add_argument('-a', '--api_address', help='service to upload test results to, e.g. https://overload.yandex.net') parser.add_argument('-t', '--target', help='Address of the tested target, host[:port]') parser.add_argument('-o', '--operator', help='User who conducted the test') parser.add_argument('--task', help='task name, for Lunapark users only') parser.add_argument('--job_name', help='Job name') parser.add_argument('--job_dsc', help='Job description') parser.add_argument('--token', help='path to token file, for Overload users only') parser.add_argument('test_dir', help='Directory containing test artifacts') args = parser.parse_args() assert os.path.exists(args.test_dir), 'Directory {} not found'.format(args.test_dir) get_logger() # load cfg if args.config: with open(args.config) as f: config = yaml.load(f) else: config = from_tank_config(args.test_dir) # parse target host and port if args.target is not None: try: target_host, target_port = args.target.rsplit(':', 1) except ValueError: target_host, target_port = args.target, None else: target_host, target_port = None, None # update cfg from cli options for key, value in [('api_address', args.api_address), ('target_host', target_host), ('target_port', target_port), ('operator', args.operator), ('task', args.task), ('job_name', args.job_name), ('job_dsc', args.job_dsc), ('token_file', args.token)]: if value is not None: config[key] = value # Validation v = Validator(schema=CONFIG_SCHEMA, allow_unknown=True) if not v.validate(config): raise ValidationError(v.errors) config = v.normalized(config) # lunapark or overload? backend_type = BackendTypes.identify_backend(config['api_address']) if backend_type == BackendTypes.LUNAPARK: client = APIClient api_token = None elif backend_type == BackendTypes.OVERLOAD: client = OverloadClient try: api_token = DataUploader.read_token(config["token_file"]) except KeyError: raise ConfigError('Token file required') else: raise RuntimeError("Backend type doesn't match any of the expected") user_agent = ' '.join(('Uploader/{}'.format(DataUploader.VERSION), TankCore.get_user_agent())) api_client = client(base_url=config['api_address'], user_agent=user_agent, api_token=api_token, core_interrupted=threading.Event() # todo: add timeouts ) lp_job = LPJob( client=api_client, target_host=config.get('target_host'), target_port=config.get('target_port'), person=config.get('operator') or pwd.getpwuid(os.geteuid())[0], task=config.get('task'), name=config['job_name'], description=config['job_dsc'], tank=socket.getfqdn()) edit_metainfo(config, lp_job) upload_data(args.test_dir, DATA_LOG, lp_job) send_config_snapshot(config, lp_job) try: upload_monitoring(args.test_dir, MONITORING_LOG, lp_job) except AssertionError as e: logger.error(e) lp_job.close(0) make_symlink(args.test_dir, lp_job.number) logger.info( 'LP job created: {}'.format( urljoin( api_client.base_url, str( lp_job.number))))