def loadYaml(config_file): # We might be able to remove this and us it like the config_properties # skipcq: PYL-W0603 global yamldata import yaml from cerberus import Validator import os with open(config_file) as yamlfile: try: yamldata = yaml.safe_load(yamlfile) yamlfile.close() except yaml.YAMLError as exception: raise exception with open(os.path.dirname(os.path.abspath(__file__)) + '/' + 'schema.py', 'r') as sma: # skipcq: PYL-W0123 schema = eval(sma.read()) sma.close() v = Validator(schema) if v.validate(yamldata, schema): test = v.normalized(yamldata) yamldata = test else: raise ValueError(v.errors) # Set default values for key, val in yamldata.items(): config_properties[str(key)] = val
def from_dict(data: Dict) -> Model: """Build Model from dict Parameters ---------- data : Dict Serialized Dictionary Returns ------- Model """ # Validate data using cerberus schema = Validator(ModelAssembler.SCHEMA) data = schema.normalized(data) if not schema.validate(data): raise ValueError(schema.errors) # Create vocab vocab = Vocab(data["vocab"]) # Create layers layers = [] for layer_data in data["layers"]: layer_type = layer_data["layer_type"] layer_name = layer_data["layer_name"] if layer_type == "TimesTwoLayer": layer = TimesTwoLayer(layer_name, vocab) elif layer_type == "PlusOneLayer": layer = PlusOneLayer(layer_name, vocab) else: raise ValueError(f"{layer_type} not recognized.") layers.append(layer) return Model(layers)
def __validate_core(self): v = Validator(self.BASE_SCHEMA) result = v.validate(self.__raw_config_dict, self.BASE_SCHEMA) if not result: raise ValidationError(v.errors) normalized = v.normalized(self.__raw_config_dict) return self.__set_core_dynamic_options(normalized) if self.with_dynamic_options else normalized
def check_config(config): v = Validator() if not v.validate(config, CONFIG_SCHEMA, normalize=False): raise ValidationError(v.errors) return v.normalized(config)
def wrapper(*args, **kwargs): validation_schema = { 'dataset': { 'type': 'string', 'required': True }, 'variable': { 'type': 'string', 'required': True }, 'years': { 'type': 'list', 'required': True }, 'depth': { 'type': 'string', 'required': True }, 'geometry': { 'type': 'dictionary', 'required': True } } try: logging.debug(f"[VALIDATOR - data and mask params]: {kwargs}") validator = Validator(validation_schema, allow_unknown=True, purge_unknown=True) if not validator.validate(kwargs['params']): return error(status=400, detail=validator.errors) kwargs['sanitized_params'] = validator.normalized(kwargs['params']) return func(*args, **kwargs) except Exception as err: return error(status=502, detail=f'{err}')
def linode_action_input_validated(schema, definition, args): from cerberus import Validator from json import dumps log.vvvvv('linode_action_input_validated(%s): %s' % (definition, str(args))) v = Validator( schema=schema.get(definition), purge_unknown=True, ) normalized = v.normalized(args) log.vvvvv('linode_action_input_validated(%s): normalized %s' % (definition, str(normalized))) if not v.validate(normalized): for err in dumps(v.errors, indent=2).split("\n"): log.warning('linode_action_input_validated(%s): %s' % (definition, err)) raise AnsibleError('while validating %s got errors: %s' % (definition, str(v.errors))) validated = v.validated(args) log.vvv('linode_action_input_validated(%s): validated %s' % (definition, str(validated))) return validated
def _parse_config(self) -> dict: """ Parses configuration from the config file, checked against _Constants.cfg_schema. This will throw an exception if the config is invalid. After checking, default values will be inserted and the config saved, *whether it was found valid or not*. :return: The config dict """ v = Validator(self.constants.config_schema) loaded = None try: with open(self.constants.cfg_file, 'r') as f: loaded = json.load(f) if not v.validate(loaded): with open(self.constants.cache_dir + "cfg_errors.json", "w") as f: json.dump(v.errors, f, indent=2) raise EarlyExitException( 'There were errors with your config!\n' 'Error details were dumped to cfg_errors.json') except FileNotFoundError: if not os.path.isdir(self.constants.cache_dir): os.mkdir(self.constants.cache_dir) raise EarlyExitException( "Config file doesnt exist! Default config generated at " + self.constants.cfg_file) finally: normalized = v.normalized(loaded) \ if loaded is not None else self.constants.default_config # Insert default values for missing keys with open(self.constants.cfg_file, 'w') as f: json.dump(normalized, f, indent=2) return normalized
def validate_skills(self) -> None: """ Validates skill.yaml file. :rtype: None """ v = Validator(skills_validation_schema) for key, value in self.skills.items(): if not v.validate(value, skills_validation_schema): self.error_handler(v.errors, key) normalized_skill = v.normalized(value) if not normalized_skill.get('job', None) in self.jobs: error_string = 'The skill {skill} has an unknown job assigned to it.'.format(skill=normalized_skill.get('name')) self._logger.error(error_string) raise ConfigFileError(error_string) if 'side_effects' in list(normalized_skill.keys()): for element in normalized_skill.get('side_effects', []): if element not in self.side_effects.keys(): error_string = 'The skill {skill} has an unknown side-effect assigned to it.'.format( skill=normalized_skill.get('name')) self._logger.error(error_string) raise ConfigFileError(error_string) self.skills[key] = normalized_skill
def validate_settings(settings: dict) -> Union[dict, bool]: """Validate the configuration settings against the configuration schema. :param settings: The settings parsed from the configuration file :type settings: dict :return: The validated and normalised settings if they are valid, otherwise ``False`` :rtype: boolean or dict """ validator = Validator(schema) if settings and validator.validate(settings): return validator.normalized(settings) elif settings is None: return ['Your configuration file is empty'] else: error_list = [] def walk_error_tree(err, path): if isinstance(err, dict): for key, value in err.items(): walk_error_tree(value, path + (str(key), )) elif isinstance(err, list): for sub_err in err: walk_error_tree(sub_err, path) else: error_list.append(f'{".".join(path)}: {err}') walk_error_tree(validator.errors, ()) return error_list
def wrapper(*args, **kwargs): validation_schema = { 'dataset_names': { 'type': 'string', 'required': True, 'default': None }, 'geojson': { 'type': 'string', 'excludes': 'geostore', 'required': False } } try: logging.debug(f"[VALIDATOR - prediction params]: {kwargs}") validator = Validator(validation_schema, allow_unknown=True, purge_unknown=True) logging.info( f"[VALIDATOR - prediction params]: {validator.validate(kwargs['params'])}" ) if not validator.validate(kwargs['params']): return error(status=400, detail=validator.errors) kwargs['sanitized_params'] = validator.normalized(kwargs['params']) return func(*args, **kwargs) except Exception as err: return error(status=502, detail=f'{err}')
def validate_config(config): schema = { 'name': { 'type': 'string', 'required': True }, 'restart': { 'type': 'string', 'required': True }, 'virtualenv': { 'type': 'string', 'required': True }, 'config': { 'type': 'string', 'required': True }, 'user': { 'type': 'string', 'required': True }, 'group': { 'type': 'string', 'required': True } } v = Validator(schema) if v.validate(config): return (v.normalized(config), False) else: return (config, v.errors)
def load_config(name: str = "settings"): """Load TOML config file and validate it with the associated schema""" # Enforce standard directory structure and a schema for each config file filename = f"{name}.toml" schema = load(join("config", "schemas", filename)) # Allow non-existent files to handle default-valued only schemas file_not_found = None try: config = load(join("config", filename)) except FileNotFoundError as error: config, file_not_found = {}, error validator = Validator(_preprocess_schema_rec(schema, name)) if not validator.validate(config): if file_not_found is not None: raise FileNotFoundError(file_not_found) else: errors = validator.errors raise ValidationError(_format_validation_errors(name, errors)) config_with_empty_sections = _create_empty_sections_rec(schema, config) return validator.normalized(config_with_empty_sections)
async def post(self): schema = { 'BeerType': { 'type': 'string', 'required': True, 'allowed': ['Caña', 'Jarra', 'Doble'] }, 'TotalML': { 'type': 'float', 'required': True }, 'Date': { 'type': 'datetime', 'required': True, 'coerce': lambda s: parse(s) } } v = Validator(schema) data = tornado.escape.json_decode(self.request.body) if not v.validate(data): raise HTTPError(status_code=400, log_message=json.dumps(v.errors), reason=json.dumps(v.errors)) normalized_data = v.normalized(data) beer_type = normalized_data['BeerType'] total_ml = normalized_data['TotalML'] date = normalized_data['Date'] try: store_beer(beer_type, total_ml, date) except BeerMLException as e: raise HTTPError(status_code=400, log_message=str(e), reason=str(e)) send_ws_message(normalized_data)
async def get(self): schema = { 'beginDate': { 'type': 'date', 'required': True, 'coerce': lambda s: parse(s).date() }, 'endDate': { 'type': 'date', 'required': True, 'coerce': lambda s: parse(s).date() }, 'beerType': { 'type': 'string', 'nullable': True, 'allowed': ['Caña', 'Jarra', 'Doble'] } } v = Validator(schema) params = { 'beginDate': self.get_query_argument("beginDate"), 'endDate': self.get_query_argument("endDate"), 'beerType': self.get_query_argument("beerType", None) } if not v.validate(params): raise HTTPError(400, log_message=json.dumps(v.errors)) normalized_params = v.normalized(params) begin_date = normalized_params['beginDate'] end_date = normalized_params['endDate'] beer_type = normalized_params.get('beerType', None) response = get_beer_records(begin_date, end_date, beer_type) self.write(json.dumps(response))
def validate(document, schema): validator = Validator(_schema[schema], purge_unknown=True, allow_unknown=False) normalized = validator.normalized(document) return validator.validate(normalized), validator.errors, normalized
def get_data(self, task, driver): config = DriverConfig(driver=driver) validator = Validator(config.get_task_schema(), purge_unknown=True) if not validator.validate(task): raise InvalidTask(errors=validator.errors) return validator.normalized(task)
def validate(self, schema=None, allow_unknown=False, purge_unknown=False): schema = schema if schema else self.DefaultSchema validator = Validator(schema, allow_unknown=allow_unknown, purge_unknown=purge_unknown) is_valid = validator.validate(self.__dict__) if not is_valid: raise ValidatorError('Validation failed', validator.errors) return validator.normalized(self.__dict__)
def test_raises(schema, data): validator = Validator(schema) document = validator.normalized(data) self.assertIsNotNone(document) r = Redis() db = CerbeRedis(r) with self.assertRaises(TypeError): db.save('test', validator.schema, 1, document)
def get_driver(self, task): validator = Validator(BASE_TASK_SCHEMA, purge_unknown=True) if not validator.validate(task): raise InvalidTask(errors=validator.errors) data = validator.normalized(task) driver_name = data['driver'] self.validate_driver(driver_name=driver_name) return driver_name
def _validate_data(self, data): """ Function to validate and normalize data. :param data: Dictionary with the info to write on log. :type data: dict :return: Dictionary with the nomalized data. :rtype: dict """ if self.schema is not None: val = Validator(self.schema, purge_unknown=True) val.validate(data) val.normalized(data) return json.dumps(val.document) else: return data
def post(self): data = request.get_json() validator = Validator(self.new_user_schema) is_valid = validator.validate(data) if not is_valid: return self.json_response(validator.errors, HTTPStatus.BAD_REQUEST) user = User.create(**validator.normalized(data)) if not user: return self.json_response({'username': ['already exists']}, HTTPStatus.CONFLICT) return self.json_response({'id': user.id}, HTTPStatus.CREATED)
def validate_dashboard_query(self, query): """ Validate the dashboard search query :query: the search query to get the dashboard data """ validator = Validator(Schemas().dashboard, purge_unknown=True) query = validator.normalized(query) validation = validator.validate(query) if not validation: raise ValidationError(validator.errors) return query
def validate_search(self, query): """ Validate the search query payload :query: the search query object """ validator = Validator(Schemas().search, purge_unknown=True) query = validator.normalized(query) validation = validator.validate(query) if not validation: raise ValidationError(validator.errors) return query
def read_config(): """ Parse YAML config from the file. """ config = None # If the file is absent, FileNotFoundError exception will be thrown. with open(CONFIG, 'r') as file: config = yaml.load(file, Loader=yaml.FullLoader) # Validate config. v = Validator(allow_unknown=True) if not v.validate(config, CONFIG_SCHEMA): raise Exception(f"Config validation failed: {v.errors}") return v.normalized(config)
def validate_step_add(self, report_id, step): """ Validate a step object :report_id: the report id :step: the step data """ validator = Validator(Schemas().step, purge_unknown=True) step = validator.normalized(step) validation = validator.validate(step) if not validation: raise ValidationError(validator.errors) return step
def validate_scenario_add(self, report_id, scenario): """ Validate a scenario object :report_id: the report id :scenario: the scenario data """ validator = Validator(Schemas().scenario, purge_unknown=True) scenario = validator.normalized(scenario) validation = validator.validate(scenario) if not validation: raise ValidationError(validator.errors) return scenario
def normalize_inputs(current_file, config: dict): spec_path = os.path.join(os.path.dirname(current_file), 'spec.yml') if not os.path.isfile(spec_path): raise FileNotFoundError(f"Can't find spec.yml file for {current_file}.") with open(spec_path, 'r') as f: spec_schema = yaml.safe_load(f) spec_schema['inputs'] = spec_schema['inputs'] and TemplateDictionaryBuilder(spec_schema['inputs']).build() spec = Validator(schema).normalized(spec_schema)['inputs'] validator = Validator(spec) config = validator.normalized(config) if not validator.validate(config): raise ValidationError(f"{','.join(validator.errors)}") return config
async def get(self): schema = { 'date': { 'type': 'date', 'required': True, 'coerce': lambda s: parse(s).date() } } v = Validator(schema) params = {'date': self.get_query_argument("date")} if not v.validate(params): raise HTTPError(400, log_message=json.dumps(v.errors)) normalized_params = v.normalized(params) date = normalized_params['date'] response = get_daily_report(date) self.write(json.dumps(response))
def put(self): data = request.get_json() validator = Validator(self.login_schema) is_valid = validator.validate(data) if not is_valid: return self.json_response(validator.errors, HTTPStatus.BAD_REQUEST) clean_data = validator.normalized(data) user = self.logged_user if 'fullname' in clean_data: user.fullname = clean_data['fullname'] user.set_password(clean_data['password']) db.session.add(user) db.session.commit() return self.json_response({'id': user.id}, HTTPStatus.ACCEPTED)
def decorator(*args, **kwargs): body = request.get_json() if not isinstance(body, dict): return abort(400, 'Request body must be an object') validator = Validator(schema) normalized = validator.normalized(body) if not validator.validate(normalized): return abort(400, validator.errors) g.body = normalized # ! return func(*args, **kwargs)
def validate_value(v: cerberus.Validator, value: Dict[str, Any]): validation_result = v.validate(value, normalize=True) if not validation_result: raise ValidationError(v.errors) # pragma: no cover else: return v.normalized(value)
def post_loader(): CONFIG_SCHEMA = load_yaml_schema(pkg_resources.resource_filename('yandextank.plugins.DataUploader', 'config/postloader_schema.yaml')) parser = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter) parser.add_argument('-c', '--config', help='YAML config. Format:\n{}'.format(yaml.dump(CONFIG_SCHEMA))) parser.add_argument('-a', '--api_address', help='service to upload test results to, e.g. https://overload.yandex.net') parser.add_argument('-t', '--target', help='Address of the tested target, host[:port]') parser.add_argument('-o', '--operator', help='User who conducted the test') parser.add_argument('--task', help='task name, for Lunapark users only') parser.add_argument('--job_name', help='Job name') parser.add_argument('--job_dsc', help='Job description') parser.add_argument('--token', help='path to token file, for Overload users only') parser.add_argument('test_dir', help='Directory containing test artifacts') args = parser.parse_args() assert os.path.exists(args.test_dir), 'Directory {} not found'.format(args.test_dir) get_logger() # load cfg if args.config: with open(args.config) as f: config = yaml.load(f) else: config = from_tank_config(args.test_dir) # parse target host and port if args.target is not None: try: target_host, target_port = args.target.rsplit(':', 1) except ValueError: target_host, target_port = args.target, None else: target_host, target_port = None, None # update cfg from cli options for key, value in [('api_address', args.api_address), ('target_host', target_host), ('target_port', target_port), ('operator', args.operator), ('task', args.task), ('job_name', args.job_name), ('job_dsc', args.job_dsc), ('token_file', args.token)]: if value is not None: config[key] = value # Validation v = Validator(schema=CONFIG_SCHEMA, allow_unknown=True) if not v.validate(config): raise ValidationError(v.errors) config = v.normalized(config) # lunapark or overload? backend_type = BackendTypes.identify_backend(config['api_address']) if backend_type == BackendTypes.LUNAPARK: client = APIClient api_token = None elif backend_type == BackendTypes.OVERLOAD: client = OverloadClient try: api_token = DataUploader.read_token(config["token_file"]) except KeyError: raise ConfigError('Token file required') else: raise RuntimeError("Backend type doesn't match any of the expected") user_agent = ' '.join(('Uploader/{}'.format(DataUploader.VERSION), TankCore.get_user_agent())) api_client = client(base_url=config['api_address'], user_agent=user_agent, api_token=api_token, core_interrupted=threading.Event() # todo: add timeouts ) lp_job = LPJob( client=api_client, target_host=config.get('target_host'), target_port=config.get('target_port'), person=config.get('operator') or pwd.getpwuid(os.geteuid())[0], task=config.get('task'), name=config['job_name'], description=config['job_dsc'], tank=socket.getfqdn()) edit_metainfo(config, lp_job) upload_data(args.test_dir, DATA_LOG, lp_job) send_config_snapshot(config, lp_job) try: upload_monitoring(args.test_dir, MONITORING_LOG, lp_job) except AssertionError as e: logger.error(e) lp_job.close(0) make_symlink(args.test_dir, lp_job.number) logger.info( 'LP job created: {}'.format( urljoin( api_client.base_url, str( lp_job.number))))