def make_schema(self, *keys) -> None: if len(keys) == 0: # create full schema self._schema = yamale.make_schema(schema_path, validators=validators) self._keys = None return self._keys = keys # create schema for specified keys kv = {k: v for k, v in self.yaml_.items() if k in keys} self._schema = yamale.make_schema(content=yaml.dump(kv), validators=validators)
def make_from_config(cls, config_path: str, **kwargs) -> YABU: # Loads the config schema to validate the config schema = yamale.make_schema( path.join(path.dirname(__file__), "resources/config.schema.yaml")) # Tries to load config file try: config = yamale.make_data(config_path) except FileNotFoundError: _LOGGER.error( "Configuration file '{}' not found".format(config_path)) raise exceptions.ConfigNotFound(config_path) except ScannerError as e: _LOGGER.error("Invalid configuration file '{}'\n{}".format( config_path, e)) raise exceptions.InvalidConfig(e) # Tries to validate the configuration with the schema try: yamale.validate(schema, config) except YamaleError as e: _LOGGER.error("Invalid configuration file '{}'\n{}".format( config_path, e)) raise exceptions.InvalidConfig(e) _LOGGER.info("Configuration loaded") # create instance form config config, _ = config[0] return cls(**config, **kwargs)
def load_yaml(file): with open(file) as f: spec = yaml.load(f.read()) schema = yamale.make_schema('../schema.yaml') schema.validate(spec, file, True) file_name = os.path.split(file)[1].split('.')[0] return file_name, spec
def _validate(schema_path, data_path, parser, strict, _raise_error): schema = schemas.get(schema_path) if not schema: schema = yamale.make_schema(schema_path, parser) schemas[schema_path] = schema data = yamale.make_data(data_path, parser) return yamale.validate(schema, data, strict, _raise_error)
def validate(self, validators=None): schema = self.schema yaml = self.yaml base_dir = self.base_dir if schema is None: return if type(yaml) != list: yaml = [yaml] if base_dir is not None: schema = os.path.join(base_dir, schema) yaml = {os.path.join(base_dir, y) for y in yaml} # Run yaml through glob and flatten list yaml = set(itertools.chain(*map(glob.glob, yaml))) # Remove schema from set of data files yaml = yaml - {schema} yamale_schema = yamale.make_schema(schema, validators=validators) yamale_data = itertools.chain(*map(yamale.make_data, yaml)) return yamale.validate(yamale_schema, yamale_data) is not None
def test_config_file_schema(): schema = yamale.make_schema(os.path.join(ROOT_DIR, 'src/config/schema.yaml')) source_files = os.listdir(SOURCE_DIR) for source_file in source_files: print(source_file) data = yamale.make_data(os.path.join(SOURCE_DIR, source_file)) yamale.validate(schema, data, strict=True)
def recipe_with_schema(filename): """Check if the recipe content matches schema.""" schema_file = os.path.join(os.path.dirname(__file__), 'recipe_schema.yml') logger.debug("Checking recipe against schema %s", schema_file) recipe = yamale.make_data(filename) schema = yamale.make_schema(schema_file) yamale.validate(schema, recipe)
def validate(cls, cfg): """Validate arguments :param cfg: project configuration :type cfg: dict """ schema_yml = """ measures_to_ignore: list(str(), required=False) build_existing_model: map(required=False) emissions: list(include('scenario-spec'), required=False) reporting_measures: list(include('measure-spec'), required=False) simulation_output_report: map(required=False) server_directory_cleanup: map(required=False) --- scenario-spec: scenario_name: str(required=True) type: str(required=True) elec_folder: str(required=True) gas_value: num(required=False) propane_value: num(required=False) oil_value: num(required=False) wood_value: num(required=False) measure-spec: measure_dir_name: str(required=True) arguments: map(required=False) """ workflow_generator_args = cfg['workflow_generator']['args'] schema_yml = re.sub(r'^ {8}', '', schema_yml, flags=re.MULTILINE) schema = yamale.make_schema(content=schema_yml, parser='ruamel') data = yamale.make_data(content=json.dumps(workflow_generator_args), parser='ruamel') yamale.validate(schema, data, strict=True) return True
def validate(cls, cfg): """Validate arguments :param cfg: project configuration :type cfg: dict """ schema_yml = """ measures_to_ignore: list(str(), required=False) residential_simulation_controls: map(required=False) measures: list(include('measure-spec'), required=False) reporting_measures: list(include('measure-spec'), required=False) simulation_output: map(required=False) timeseries_csv_export: map(required=False) server_directory_cleanup: map(required=False) --- measure-spec: measure_dir_name: str(required=True) arguments: map(required=False) """ workflow_generator_args = cfg['workflow_generator']['args'] schema_yml = re.sub(r'^ {8}', '', schema_yml, flags=re.MULTILINE) schema = yamale.make_schema(content=schema_yml, parser='ruamel') data = yamale.make_data(content=json.dumps(workflow_generator_args), parser='ruamel') yamale.validate(schema, data, strict=True) return cls.validate_measures_and_arguments(cfg)
def validate(self, yaml_path): # TODO: this is weird, obsever should handle this instead validated_at = self.validated_timestamps.get(yaml_path) if validated_at is not None and (datetime.now() - validated_at).seconds < 3: return self.validated_timestamps[yaml_path] = datetime.now() schema_path = self.schema base_dir = self.base_dir if schema_path is None: return if base_dir is not None: schema_path = os.path.join(base_dir, schema_path) # Run yaml through glob and flatten list yamale_schema = yamale.make_schema(schema_path) try: yamale_data = yamale.make_data(yaml_path) for result in yamale.validate(yamale_schema, yamale_data, _raise_error=False): if result.isValid(): logging.info('[valid] %s', yaml_path) else: logging.error( '[invalid] %s', str(yaml_path) + '\n\t' + '\n\t'.join(result.errors)) except ScannerError as se: logging.error('[exception] %s', str(yaml_path) + '\n\t' + str(se))
def _run(schema_path, output_path): """Code generator""" init_name = 'pcgen_loader' type_name = 'types' schema = yamale.make_schema(schema_path) c_types_header = C_TypesGenerator(schema) c_types_header.define_types() c_types_header.dump_types_definition(type_name + '.h') c_loader = C_DataLoader(schema, init_name, type_name) c_loader.gen_init_code() c_free_code, c_free_header = c_free_root(schema) c_loader.init_code.extend(c_free_code) c_loader.init_header.extend(c_free_header) c_loader.dump_code() c_loader.dump_header() if not output_path == None: f = open(output_path + '.c', 'w') f.write(MAIN_FUNCTION) f.close()
def qc_metadata(metadatafile): print("Start metadata validation...") schema = yamale.make_schema('../example/dummyschema.yaml') data = yamale.make_data(metadatafile) # Validate data against the schema. Throws a ValueError if data is invalid. yamale.validate(schema, data) print("...complete!")
def validate(data: Dict, schema: Path) -> None: schema = yamale.make_schema(schema, validators=yamale_validators.all_validators()) try: yamale.validate(schema, [(data, "<dict literal>")], strict=True) except ValueError as validation_error: raise YamaleValidationException(validation_error)
def validate_yaml(input: str) -> list: """ Validate the input YAML data in the request """ if not input: return None try: raw_data = yaml.safe_load(input) except yaml.YAMLError: LOGGER.exception("YAML parsing error") return None # format yamale.validate() is expecting data = [(raw_data, None)] schema_file = os.path.dirname(os.path.realpath(__file__)) + "/schema.yml" schema = yamale.make_schema(schema_file) try: data = yamale.validate(schema, data, strict=True) except ValueError: LOGGER.exception("YAML validation error") return None if not isinstance(raw_data, list): LOGGER.error("Request data is not a list") return None return raw_data
def main(directory=None, file=None, schema=None, recursive=False, quiet=False): 'Program entry point.' yamale_schema = yamale.make_schema(schema) search = "**/*.yaml" if recursive else "*.yaml" has_errors = [] files = list(file) for d in directory: files = files + glob.glob(os.path.join(d, search), recursive=recursive) for document in files: yamale_data = yamale.make_data(document) try: yamale.validate(yamale_schema, yamale_data) if quiet: pass else: print(f'✅ {document} -> {os.path.basename(schema)}') except ValueError as e: has_errors.append(document) print(e) print(f'❌ {document} -> {os.path.basename(schema)}') if len(has_errors) > 0: raise SystemExit(f"❌ Errors found in {len(has_errors)} documents.")
def test_site_infra(self): schema = yamale.make_schema( './config_validation_engine/validators/schema/site_infra_schema.yaml', validators=val.all_config_validators()) data = yamale.make_data( './config_validation_engine/validators/tests/site_infra_data.yaml') assert yamale.validate(schema, data)
def test_config_file_schema(): schema = yamale.make_schema(path_utils.path_to('schema_yaml')) source_files = os.listdir(path_utils.path_to('sources_dir')) for source_file in source_files: print(source_file) data = yamale.make_data( os.path.join(path_utils.path_to('sources_dir'), source_file)) yamale.validate(schema, data, strict=True)
def validate_schema(file): validators = DefaultValidators.copy() PathValidator.configuration_file = file validators[PathValidator.tag] = PathValidator data = yamale.make_data(file) schema = yamale.make_schema(V2_SCHEMA, validators=validators) yamale.validate(schema, data)
def _validate_config(cls, name): config_file = ConfigSet.__CONFIG_MAP[name] schema_file = ConfigSet.__SCHEMA_MAP[name] logger.info("validating config " + config_file + " against schema " + schema_file + " for " + name) schema = yamale.make_schema(schema_file) data = yamale.make_data(config_file) yamale.validate(schema, data)
def __init__(self, schema_file): validators_list = validators.DefaultValidators validators_list[ RequiredReferenceValidator.__name__] = RequiredReferenceValidator validators_list[ RequiredReferenceValidator.tag] = RequiredReferenceValidator self.yamale_schema = yamale.make_schema(schema_file, validators=validators_list)
def __init__(self, configFile): self.loadedYaml = yamale.make_data(configFile) loadedYamlSchema = yamale.make_schema( str(Path(__file__).parent) + "/resources/configuration_schema.yml") try: yamale.validate(loadedYamlSchema, self.loadedYaml) except yamale.YamaleError as e: print('Validation failed!\n%s' % str(e)) exit(1)
def validate_yaml(schema_file: str, data_file: str): if not os.path.isfile(schema_file): raise RuntimeError(f"Schema yaml file is missing: {schema_file}") if not os.path.isfile(data_file): raise RuntimeError(f"Data yaml file is missing: {data_file}") schema = yamale.make_schema(schema_file) data = yamale.make_data(data_file) yamale.validate(schema, data)
def test_validate_errors(use_schema_string, use_data_string, expected_message_re): schema_path = get_fixture('types.yaml') data_path = get_fixture('types_bad_data.yaml') if use_schema_string: with io.open(schema_path, encoding='utf-8') as f: schema = yamale.make_schema(content=f.read()) else: schema = yamale.make_schema(schema_path) if use_data_string: with io.open(data_path, encoding='utf-8') as f: data = yamale.make_data(content=f.read()) else: data = yamale.make_data(data_path) with pytest.raises(yamale.yamale_error.YamaleError) as excinfo: yamale.validate(schema, data) assert re.match(expected_message_re, excinfo.value.message, re.MULTILINE), \ 'Message {} should match {}'.format( excinfo.value.message, expected_message_re )
def validate_schema(file): validators = DefaultValidators.copy() PathValidator.configuration_file = file validators[PathValidator.tag] = PathValidator data = yamale.make_data(file) schema = yamale.make_schema( V2_SCHEMA, validators=validators ) yamale.validate(schema, data)
def load_yaml_to_dict(self): """ Load default runconfig, override with user input, and convert to dict Leading namespaces can be stripped off down the line """ # assign default config and yamale schema # assume defaults have already been yamale validated try: default_cfg = f'{helpers.WORKFLOW_SCRIPTS_DIR}/defaults/{self.workflow_name}.yaml' schema = yamale.make_schema( f'{helpers.WORKFLOW_SCRIPTS_DIR}/schemas/{self.workflow_name}.yaml', parser='ruamel') except: err_str = f'workflow {self.workflow_name} does not have a schema.' raise ValueError(err_str) # set run config type run_config_is_txt = False # if newlines then run_config is YAML string (primarily for unit test) if self.args.run_config_path is not None: if '\n' in self.args.run_config_path: run_config_is_txt = True # validate yaml file taken from command line try: if run_config_is_txt: data = yamale.make_data(content=self.args.run_config_path, parser='ruamel') else: data = yamale.make_data(self.args.run_config_path, parser='ruamel') except yamale.YamaleError as e: err_str = f'Yamale unable to load {self.workflow_name} runconfig yaml {self.args.run_config_path} for validation.' raise yamale.YamaleError(err_str) from e try: yamale.validate(schema, data) except yamale.YamaleError as e: err_str = f'Validation fail for {self.workflow_name} runconfig yaml {self.args.run_config_path}.' raise yamale.YamaleError(err_str) from e # load default config parser = YAML(typ='safe') with open(default_cfg, 'r') as f: self.cfg = parser.load(f) # load user config based on input type if run_config_is_txt: self.user = parser.load(self.args.run_config_path) else: with open(self.args.run_config_path) as f_yaml: self.user = parser.load(f_yaml) # copy user suppiled config into default config helpers.deep_update(self.cfg, self.user)
def __init__(self): if YamlVerifier.__instance is None: YamlVerifier.__instance = self else: raise Exception("Multiple YamlVerifiers created") path = os.path.join(os.path.dirname(__file__), "../schemas/recipe.yaml") self.recipe_schema = yamale.make_schema(path, parser='ruamel', validators=validators)
def main(assets_glob, assets_schema, custom_validators): # Run only if schema is set if assets_schema: schema = yamale.make_schema(assets_schema, validators=validators.load(custom_validators)) for f in sorted(glob.glob(assets_glob, recursive=True)): log('INFO', f"Validating {f} against schema {assets_schema}") yamale.validate(schema, yamale.make_data(f)) log('INFO', "... finished")
def valid_conf(schema_file, config_file): schema_yamale = yamale.make_schema(schema_file) config_yamale = yamale.make_data(config_file) try: yamale.validate(schema_yamale, config_yamale) except ValueError as e: for r in e.results: for err in r.errors: print(f"[ERROR] {err}") sys.exit(1)
def validate_project_schema(project_file): cfg = get_project_configuration(project_file) schema_version = cfg.get('schema_version') version_schema = os.path.join(os.path.dirname(__file__), 'schemas', f'v{schema_version}.yaml') if not os.path.isfile(version_schema): logger.error( f'Could not find validation schema for YAML version {schema_version}' ) raise FileNotFoundError(version_schema) schema = yamale.make_schema(version_schema) data = yamale.make_data(project_file, parser='ruamel') return yamale.validate(schema, data, strict=True)
def read_config(config_file: str) -> Config: config_file_path = Path(config_file) if not config_file_path.is_file(): logger.error(f'Config file does not exist at {config_file_path.resolve()}') sys.exit(1) schema = yamale.make_schema(Path('schema.yml')) data = yamale.make_data(config_file_path) try: yamale.validate(schema, data) except ValueError: logger.exception('Config file validation failed') sys.exit(1) return parse_config_file(config_file_path)
def _validate(schema_path, data_path, parser, strict, _raise_error): schema = schemas.get(schema_path) try: if not schema: schema = yamale.make_schema(schema_path, parser) schemas[schema_path] = schema except (SyntaxError, ValueError) as e: results = [Result([str(e)])] if not _raise_error: return results raise YamaleError(results) data = yamale.make_data(data_path, parser) return yamale.validate(schema, data, strict, _raise_error)
def _validate(schema_path, data_path, parser): schema = schemas.get(schema_path) try: if not schema: schema = yamale.make_schema(schema_path, parser) schemas[schema_path] = schema data = yamale.make_data(data_path, parser) yamale.validate(schema, data) except Exception as e: error = '\nError!\n' error += 'Schema: %s\n' % schema_path error += 'Data file: %s\n' % data_path error += traceback.format_exc() print(error) raise ValueError('Validation failed!')
def test_bad_schema(): with pytest.raises(SyntaxError) as excinfo: yamale.make_schema(get_fixture('bad_schema.yaml')) assert 'fixtures/bad_schema.yaml' in str(excinfo.value)
'bad': 'keywords_bad.yaml', 'good': 'keywords_good.yaml' } lists = { 'schema': 'lists.yaml', 'bad': 'lists_bad.yaml', 'good': 'lists_good.yaml' } test_data = [types, nested, custom, keywords, lists] for d in test_data: for key in d.keys(): if key == 'schema': d[key] = sch.make_schema(get_fixture(d[key])) else: d[key] = sch.make_data(get_fixture(d[key])) def test_tests(): ''' Make sure the test runner is working.''' assert 1 + 1 == 2 def test_flat_make_schema(): assert isinstance(types['schema']['string'], val.String) def test_nested_schema(): nested_schema = nested['schema']
'bad': 'lists_bad.yaml', 'good': 'lists_good.yaml' } maps = { 'schema': 'map.yaml', 'bad': 'map_bad.yaml', 'good': 'map_good.yaml' } test_data = [types, nested, custom, keywords, lists, maps] for d in test_data: for key in d.keys(): if key == 'schema': d[key] = yamale.make_schema(get_fixture(d[key])) else: d[key] = yamale.make_data(get_fixture(d[key])) def test_tests(): ''' Make sure the test runner is working.''' assert 1 + 1 == 2 def test_flat_make_schema(): assert isinstance(types['schema']['string'], val.String) def test_nested_schema(): nested_schema = nested['schema']
from glob import glob import sys import yamale schema = yamale.make_schema('./docs/schema.yaml') data = yamale.make_data('./docs/current.settings.yaml') yamale.validate(schema, data) templates = glob('mia/templates/*/settings.yaml') for template in templates: sys.stdout.write('Checking %s against schema... ' % template) data = yamale.make_data(template) yamale.validate(schema, data) print("done!")