def parse_and_check_patches(patches): parsed = [yaml.load(p) for p in patches] for patch in parsed: if not isinstance(patch, dict): raise ValidationError( 'Config patch "{}" should be a dict'.format(patch)) return parsed
def __init__(self, configs, interrupted_event, artifacts_base_dir=None, artifacts_dir_name=None): """ :param configs: list of dict :param interrupted_event: threading.Event """ self.output = {} self.raw_configs = configs self.status = {} self._plugins = None self._artifacts_dir = None self.artifact_files = {} self.artifacts_to_send = [] self._artifacts_base_dir = None self.manual_start = False self.scheduled_start = None self.taskset_path = None self.taskset_affinity = None self._job = None self._cfg_snapshot = None self.interrupted = interrupted_event self.error_log = None error_output = 'validation_error.yaml' self.config, self.errors, self.configinitial = TankConfig( self.raw_configs, with_dynamic_options=True, core_section=self.SECTION, error_output=error_output).validate() if not self.config: raise ValidationError(self.errors) self.test_id = self.get_option( self.SECTION, 'artifacts_dir', datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S.%f")) self.lock_dir = self.get_option(self.SECTION, 'lock_dir') with open(os.path.join(self.artifacts_dir, CONFIGINITIAL), 'w') as f: yaml.dump(self.configinitial, f) self.add_artifact_file(error_output) self.add_artifact_to_send(LPRequisites.CONFIGINITIAL, yaml.dump(self.configinitial)) configinfo = self.config.validated.copy() configinfo.setdefault(self.SECTION, {}) configinfo[self.SECTION][self.API_JOBNO] = self.test_id self.add_artifact_to_send(LPRequisites.CONFIGINFO, yaml.dump(configinfo)) with open(os.path.join(self.artifacts_dir, VALIDATED_CONF), 'w') as f: yaml.dump(configinfo, f) logger.info('New test id %s' % self.test_id)
def post_loader(): CONFIG_SCHEMA = load_yaml_schema( pkg_resources.resource_filename('yandextank.plugins.DataUploader', 'config/postloader_schema.yaml')) parser = argparse.ArgumentParser( formatter_class=argparse.RawTextHelpFormatter) parser.add_argument('-c', '--config', help='YAML config. Format:\n{}'.format( yaml.dump(CONFIG_SCHEMA))) parser.add_argument( '-a', '--api_address', help= 'service to upload test results to, e.g. https://overload.yandex.net') parser.add_argument('-t', '--target', help='Address of the tested target, host[:port]') parser.add_argument('-o', '--operator', help='User who conducted the test') parser.add_argument('--task', help='task name, for Lunapark users only') parser.add_argument('--job_name', help='Job name') parser.add_argument('--job_dsc', help='Job description') parser.add_argument('--token', help='path to token file, for Overload users only') parser.add_argument('test_dir', help='Directory containing test artifacts') args = parser.parse_args() assert os.path.exists(args.test_dir), 'Directory {} not found'.format( args.test_dir) get_logger() # load cfg if args.config: with open(args.config) as f: config = yaml.load(f) else: config = from_tank_config(args.test_dir) # parse target host and port if args.target is not None: try: target_host, target_port = args.target.rsplit(':', 1) except ValueError: target_host, target_port = args.target, None else: target_host, target_port = None, None # update cfg from cli options for key, value in [('api_address', args.api_address), ('target_host', target_host), ('target_port', target_port), ('operator', args.operator), ('task', args.task), ('job_name', args.job_name), ('job_dsc', args.job_dsc), ('token_file', args.token)]: if value is not None: config[key] = value # Validation v = Validator(schema=CONFIG_SCHEMA, allow_unknown=True) if not v.validate(config): raise ValidationError(v.errors) config = v.normalized(config) # lunapark or overload? backend_type = BackendTypes.identify_backend(config['api_address']) if backend_type == BackendTypes.LUNAPARK: client = APIClient api_token = None elif backend_type == BackendTypes.OVERLOAD: client = OverloadClient try: api_token = DataUploader.read_token(config["token_file"]) except KeyError: raise ConfigError('Token file required') else: raise RuntimeError("Backend type doesn't match any of the expected") user_agent = ' '.join(('Uploader/{}'.format(DataUploader.VERSION), TankCore.get_user_agent())) api_client = client(base_url=config['api_address'], user_agent=user_agent, api_token=api_token, core_interrupted=threading.Event() # todo: add timeouts ) lp_job = LPJob(client=api_client, target_host=config.get('target_host'), target_port=config.get('target_port'), person=config.get('operator') or pwd.getpwuid(os.geteuid())[0], task=config.get('task'), name=config['job_name'], description=config['job_dsc'], tank=socket.getfqdn()) edit_metainfo(config, lp_job) upload_data(args.test_dir, DATA_LOG, lp_job) send_config_snapshot(config, lp_job) try: upload_monitoring(args.test_dir, MONITORING_LOG, lp_job) except AssertionError as e: logger.error(e) lp_job.close(0) make_symlink(args.test_dir, lp_job.number) logger.info('LP job created: {}'.format( urljoin(api_client.base_url, str(lp_job.number))))
def __init__(self, configs, interrupted_event, local_configs=None, user_configs=None, artifacts_base_dir=None, artifacts_dir_name=None): """ :param configs: list of dict :param interrupted_event: threading.Event """ self.output = {} self.raw_configs = configs self.status = {} self._plugins = None self._artifacts_dir = artifacts_dir_name self.artifact_files = {} self.artifacts_to_send = [] self._artifacts_base_dir = artifacts_base_dir self.manual_start = False self.scheduled_start = None self.taskset_path = None self.taskset_affinity = None self._job = None self._cfg_snapshot = None self.local_configs = load_multiple(local_configs) self.user_configs = load_multiple(user_configs) self.configinitial = self.user_configs self.interrupted = interrupted_event self.error_log = None self.config, self.errors = TankConfig( self.raw_configs, with_dynamic_options=True, core_section=self.SECTION, error_output=ERROR_OUTPUT).validate() if not self.config: raise ValidationError(self.errors) self.test_id = self.get_option( self.SECTION, 'artifacts_dir', datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S.%f")) self.lock_dir = self.get_option(self.SECTION, 'lock_dir') with open(os.path.join(self.artifacts_dir, LOCAL_CONFIG), 'w') as f: yaml.dump(self.local_configs, f) with open(os.path.join(self.artifacts_dir, USER_CONFIG), 'w') as f: yaml.dump(self.user_configs, f) configinfo = self.config.validated.copy() configinfo.setdefault(self.SECTION, {}) configinfo[self.SECTION][self.API_JOBNO] = self.test_id with open(os.path.join(self.artifacts_dir, VALIDATED_CONF), 'w') as f: yaml.dump(configinfo, f) self.add_artifact_file(os.path.join(self.artifacts_dir, USER_CONFIG)) self.add_artifact_file(os.path.join(self.artifacts_dir, LOCAL_CONFIG)) self.add_artifact_file(os.path.join(self.artifacts_dir, VALIDATED_CONF)) if self.errors: self.add_artifact_file( os.path.join(self.artifacts_dir, ERROR_OUTPUT))