def all_config_validators(): validators = DefaultValidators.copy() validators[Email.tag] = Email validators[Longitude.tag] = Longitude validators[Latitude.tag] = Latitude validators[URL.tag] = URL return validators
def validate_schema(file): validators = DefaultValidators.copy() PathValidator.configuration_file = file validators[PathValidator.tag] = PathValidator data = yamale.make_data(file) schema = yamale.make_schema(V2_SCHEMA, validators=validators) yamale.validate(schema, data)
def all_validators() -> dict: validators = DefaultValidators.copy() validators[StringBool.tag] = StringBool validators[StringInt.tag] = StringInt validators[StringFloat.tag] = StringFloat validators[ReplicaList.tag] = ReplicaList validators[StringEnum.tag] = StringEnum return validators
def all_config_validators(): validators = DefaultValidators.copy() validators[Email.tag] = Email validators[Longitude.tag] = Longitude validators[Latitude.tag] = Latitude validators[URL.tag] = URL validators[ipAddress.tag] = ipAddress validators[String.tag] = String validators[Range.tag] = Range return validators
def validate_schema(file): validators = DefaultValidators.copy() PathValidator.configuration_file = file validators[PathValidator.tag] = PathValidator data = yamale.make_data(file) schema = yamale.make_schema( V2_SCHEMA, validators=validators ) yamale.validate(schema, data)
def main(): parser = argparse.ArgumentParser() parser.add_argument( "doc_gen", help="The folder that contains schema and metadata files.") args = parser.parse_args() with open(os.path.join(args.doc_gen, 'metadata/sdks.yaml')) as sdks_file: sdks_yaml = yaml.safe_load(sdks_file) with open(os.path.join(args.doc_gen, 'metadata/services.yaml')) as services_file: services_yaml = yaml.safe_load(services_file) validators = DefaultValidators.copy() ServiceName.services = services_yaml ExampleId.services = services_yaml BlockContent.block_names = os.listdir( os.path.join(args.doc_gen, 'cross-content')) validators[ServiceName.tag] = ServiceName validators[ServiceVersion.tag] = ServiceVersion validators[ExampleId.tag] = ExampleId validators[BlockContent.tag] = BlockContent validators[String.tag] = StringExtension # Validate sdks.yaml file. schema_name = os.path.join(args.doc_gen, 'validation/sdks_schema.yaml') meta_names = glob.glob(os.path.join(args.doc_gen, 'metadata/sdks.yaml')) success = validate_files(schema_name, meta_names, validators) # Validate services.yaml file. schema_name = os.path.join(args.doc_gen, 'validation/services_schema.yaml') meta_names = glob.glob(os.path.join(args.doc_gen, 'metadata/services.yaml')) success &= validate_files(schema_name, meta_names, validators) # Validate example (*_metadata.yaml) files. schema_name = os.path.join(args.doc_gen, 'validation/example_schema.yaml') meta_names = glob.glob( os.path.join(args.doc_gen, 'metadata/*_metadata.yaml')) success &= validate_files(schema_name, meta_names, validators) if success: print("Validation succeeded! 👍👍👍") else: print("\n********************************************") print("* Validation failed, please check the log! *") print("********************************************") exit(1)
def __init__(self, settings, mode): self.settings = settings self.validators = DefaultValidators.copy() Netmask.settings = self.settings Netmask.mode = mode self.validators[Netmask.tag] = Netmask NetworkTag.settings = self.settings NetworkTag.mode = mode self.validators[NetworkTag.tag] = NetworkTag ServiceAccount.settings = self.settings ServiceAccount.mode = mode self.validators[ServiceAccount.tag] = ServiceAccount NetworkPorts.settings = self.settings NetworkPorts.mode = mode self.validators[NetworkPorts.tag] = NetworkPorts
def load(files=None): validators = DefaultValidators.copy() # Load builtin validators. for name in dir(builtin): obj = getattr(builtin, name) if _is_validator(obj): validators[obj.tag] = obj # Load custom validators from external files. for f_path in files or []: log('INFO', f'Loading validators from user script {f_path!r}') with open(f_path, 'rb') as f: code = compile(f.read(), f_path, 'exec') globals_ = {} locals_ = {} exec(code, globals_, locals_) for obj in locals_.values(): if _is_validator(obj): validators[obj.tag] = obj return validators
def _get_yamale_errors( data: Any, schema_path: str, extra_validators: Optional[List[Type[Validator]]] = None) -> List[str]: extra_validators = extra_validators or [] validators = DefaultValidators.copy() for validator in extra_validators: validators[validator.tag] = validator schema = yamale.make_schema(schema_path, validators=validators, parser="ruamel") formatted_data = [(data, None)] # This is an array of `ValidationResult`s, each of which has an # array of errors in its `errors` field validation_results = yamale.validate(schema, formatted_data, _raise_error=False) all_errors = [] for result in validation_results: all_errors.extend(result.errors) return all_errors
compile(str(value), '<string>', 'eval') return True except Exception as err: traceback.print_exc(limit=0) self.failure_reason = self.ERROR_STRING_INVALID_PYTHON_EXPRESSION return False def get_name(self) -> str: return 'Expression' def fail(self, value): return self.failure_reason % value VALIDATORS = { **DefaultValidators.copy(), DataType.TAG: DataType, DimensionDataType.TAG: DimensionDataType, Identifier.TAG: Identifier, Expression.TAG: Expression, } PACKAGE_DIR = os.path.dirname(os.path.abspath(__file__)) STREAMING_SCHEMA = yamale.make_schema(os.path.join(PACKAGE_DIR, 'dtc_streaming_schema.yml'), validators=VALIDATORS) WINDOW_SCHEMA = yamale.make_schema(os.path.join(PACKAGE_DIR, 'dtc_window_schema.yml'), validators=VALIDATORS)
import logging import os import yamale from yamale import YamaleError from yamale.schema import Schema from yamale.validators import DefaultValidators validators = DefaultValidators.copy() class YamlVerifier: __instance = None recipe_schema: Schema def __init__(self): if YamlVerifier.__instance is None: YamlVerifier.__instance = self else: raise Exception("Multiple YamlVerifiers created") path = os.path.join(os.path.dirname(__file__), "../schemas/recipe.yaml") self.recipe_schema = yamale.make_schema(path, parser='ruamel', validators=validators) @staticmethod def get_instance() -> 'YamlVerifier': return YamlVerifier.__instance
def runTest(self): validators = DefaultValidators.copy() validators['card'] = Card self.assertTrue(self.validate(validators))
def single_dir_validation(yaml_fp, working_dir, silent=False): # ######### Initial Formatting Check ######### # validators = DefaultValidators.copy() # This is a dictionary validators[Date.tag] = Date schema = yamale.make_schema(os.path.join(BASE_DIR, 'schema.yaml'), validators=validators) data = yamale.make_data(yaml_fp) is_valid = yamale.validate(schema, data) if not is_valid: raise Exception("Empty input") if not silent: print("Yaml file passed initial formatting tests") # ######### Deep Formatting Check ######### # re_uuid = re.compile(r'^[0-9a-fA-F]{8}\-[0-9a-fA-F]{4}\-[0-9a-fA-F]{4}\-[0-9a-fA-F]{4}\-[0-9a-fA-F]{12}$') with open(yaml_fp) as f: competition = yaml.load(f.read(), Loader=yaml.Loader) validation_errors = [] warnings = [] # ###### Index Validation ###### # duplicated_indexes = { "task": find_duplicate_indexes(competition["tasks"]), "solution": find_duplicate_indexes(competition["solutions"]) if 'solutions' in competition else None, "phase": find_duplicate_indexes(competition["phases"]), "leaderboard": find_duplicate_indexes(competition["leaderboards"]), } for key, value in duplicated_indexes.items(): validation_errors.append(f'Duplicate {key} index(es): {value}') if value else None for leaderboard in competition['leaderboards']: column_indexes = [] for column in leaderboard['columns']: if column["index"] not in column_indexes: column_indexes.append(column["index"]) else: validation_errors.append(f"Duplicate column index: {column['index']} on leaderboard: {leaderboard['title']}") # Tasks Validation for task in competition['tasks']: task_keys = task.keys() if 'key' in task_keys: if any([key not in ['key', 'index'] for key in task_keys]): warnings.append(f'Task with index {task["index"]}: If specifying a key, all other fields will be ignored on upload') else: for required_field in ['name', 'description', 'scoring_program']: if required_field not in task_keys: validation_errors.append(f'Task with index {task["index"]}: missing required field - {required_field}') # Solution Tasks if 'solutions' in competition: for solution in competition['solutions']: for index in set([task for task in solution['tasks']]).difference(set(map(lambda t: t['index'], competition['tasks']))): validation_errors.append(f'Task index: "{index}" on solution: "{solution["name"]}" not present in tasks') # Phase Tasks for phase in competition['phases']: for index in set([task for task in phase['tasks']]).difference(set(map(lambda t: t['index'], competition['tasks']))): validation_errors.append(f'Task index: "{index}" on phase: "{phase["name"]}" not present in tasks') # Phase Solutions for phase in competition['phases']: if 'solutions' in phase: for index in set([solution for solution in phase['solutions']]).difference(set(map(lambda s: s['index'], competition['solutions']))): validation_errors.append(f'Solution index: "{index}" on phase: "{phase["name"]}" not present in solutions') # ###### Leaderboard Key Validation ###### # leaderboard_keys = [] duplicate_leaderboard_keys = [] column_errors = [] for leaderboard in competition['leaderboards']: if leaderboard['key'] in leaderboard_keys: duplicate_leaderboard_keys.append(leaderboard['key']) else: leaderboard_keys.append(leaderboard['key']) column_keys = [] for column in leaderboard['columns']: if column["key"] not in column_keys: column_keys.append(column["key"]) else: column_errors.append(f"Duplicate column key: {column['key']} on leaderboard: {leaderboard['title']}") if duplicate_leaderboard_keys: validation_errors.append(f'Duplicate leaderboard keys: {duplicate_leaderboard_keys}') validation_errors += column_errors # ###### File Path Validation ###### # if not os.path.exists(os.path.join(working_dir, competition['image'])): validation_errors.append(f'Image file - ({competition["image"]}) - not found') for page in competition['pages']: if not os.path.exists(os.path.join(working_dir, page['file'])): validation_errors.append(f'File for page "{page["title"]}" - ({page["file"]}) - not found') task_files = [ 'scoring_program', 'ingestion_program', 'reference_data', 'input_data', ] for task in competition['tasks']: for file in task_files: if file in task: if not os.path.exists(os.path.join(working_dir, task[file])): # File doesn't exist, see if it was a UUID if re.match(re_uuid, task[file]) is None: validation_errors.append(f'File for {file} - ({task[file]}) - not found') if 'solutions' in competition: for solution in competition['solutions']: if not os.path.exists(os.path.join(working_dir, solution['path'])): validation_errors.append(f'File for "{solution["name"]}" - ({solution["path"]}) - not found') # ###### Warning Printing ###### # if warnings and not silent: print('WARNINGS:') for warning in warnings: print(f'- {warning}') # ###### Error Printing ###### # if validation_errors: print("ERRORS:") for error in validation_errors: print(f'- {error}') return False else: if not silent: print("Yaml bundle is valid") return competition
parser="ruamel") formatted_data = [(data, None)] # This is an array of `ValidationResult`s, each of which has an # array of errors in its `errors` field validation_results = yamale.validate(schema, formatted_data, _raise_error=False) all_errors = [] for result in validation_results: all_errors.extend(result.errors) return all_errors vanilla_validators = DefaultValidators.copy() vanilla_validators[Opta.tag] = Opta aws_validators = DefaultValidators.copy() aws_validators[AwsOpta.tag] = AwsOpta gcp_validators = DefaultValidators.copy() gcp_validators[GcpOpta.tag] = GcpOpta azure_validators = DefaultValidators.copy() azure_validators[AureOpta.tag] = AureOpta local_validators = DefaultValidators.copy() local_validators[LocalOpta.tag] = LocalOpta helm_validators = DefaultValidators.copy() helm_validators[HelmOpta.tag] = HelmOpta with NamedTemporaryFile(mode="w") as f: yaml.dump(REGISTRY["validator"], f) f.flush()
def runTest(self): validators = DefaultValidators.copy() validators['card'] = Card self.assertTrue(self.validate(validators))
def __init__(self): self._validators = DefaultValidators.copy() self._validators[Path.tag] = Path self._validators[File.tag] = File
def __load_schema(): validators = DefaultValidators.copy() validators[Options.tag] = Options return yamale.make_schema(ConfigurationParser.__SCHEMA_PATH, validators=validators)
def get_validators(cfg_file=None, approved_apis_file=None, backend=False): if not cfg_file: if os.path.exists('config.yaml'): cfg_file = 'config.yaml' if os.path.exists('../config.yaml'): cfg_file = '../config.yaml' if cfg_file is None: print('Could not find config file in current directory or ../', file=sys.stderr) sys.exit(1) if not approved_apis_file: if os.path.exists('projectApprovedApis.yaml'): approved_apis_file = 'projectApprovedApis.yaml' if os.path.exists('../projectApprovedApis.yaml'): approved_apis_file = '../projectApprovedApis.yaml' if approved_apis_file is None: print( 'Could not find approved APIs file in current directory or ../', file=sys.stderr) sys.exit(1) if isinstance(cfg_file, dict): # Coming from Project Factory frontend projectFactoryConfig = cfg_file else: with open(cfg_file, 'r') as stream: try: projectFactoryConfig = yaml.safe_load(stream) except yaml.YAMLError as exc: print(exc, file=sys.stderr) sys.exit(1) if backend: projectFactoryConfig['terraformServiceAccount'] = projectFactoryConfig[ 'app']['backend']['serviceAccount'] if isinstance(approved_apis_file, dict): approvedApisConfig = approved_apis_file else: with open(approved_apis_file, 'r') as stream: try: approvedApisConfig = yaml.safe_load(stream) except yaml.YAMLError as exc: print(exc, file=sys.stderr) sys.exit(1) validators = DefaultValidators.copy() Apis.config = approvedApisConfig validators[Apis.tag] = Apis Environments.config = projectFactoryConfig validators[Environments.tag] = Environments Environment.config = projectFactoryConfig validators[Environment.tag] = Environment Folder.config = projectFactoryConfig validators[Folder.tag] = Folder Teams.config = projectFactoryConfig validators[Teams.tag] = Teams User.config = projectFactoryConfig validators[User.tag] = User validators[Email.tag] = Email validators[_False.tag] = _False validators[_True.tag] = _True Labels.config = projectFactoryConfig validators[Labels.tag] = Labels return validators