def _validate_dataset_file_tags(self, manifest_kind, manifest): """Validate the tags of the files of each dataset in the manifest against the file tags template in the corresponding dataset field in the given manifest field of the twine. :param str manifest_kind: the kind of manifest that's being validated (so the correct schema can be accessed) :param octue.resources.manifest.Manifest manifest: the manifest whose datasets' files are to be validated :return None: """ # This is the manifest schema included in the twine.json file, not the schema for `manifest.json` files. manifest_schema = getattr(self.twine, manifest_kind) for dataset_name, dataset_schema in manifest_schema["datasets"].items( ): dataset = manifest.datasets.get(dataset_name) file_tags_template = dataset_schema.get("file_tags_template") # Allow optional datasets in future (not currently allowed by `twined`). if not (dataset and file_tags_template): continue for file in dataset.files: try: jsonschema_validate(instance=dict(file.tags), schema=file_tags_template) except ValidationError as e: message = ( e.message + f" for files in the {dataset_name!r} dataset. The affected datafile is " f"{file.path!r}. Add the property to the datafile as a tag to fix this." ) raise twined.exceptions.invalid_contents_map[ manifest_kind](message)
def assertValidResponse(self, item, schema=[]): """ Validates that response is valid json response @param schema: optional json schema. """ if not schema: schema = [] else: # Copy schema schema = [i for i in schema] schema.append(JSONString('status', required=True)) d = { 'type': 'object', 'properties': {}, "additionalProperties": False, "required": [] } for i in schema: d['properties'][i.name] = i.as_property() for req in i.as_required(): if req not in d["required"]: d["required"].append(req) try: jsonschema_validate(item.json, schema=d) except JSONValidationError: self.fail("Response JSON is not valid")
def validate( self, declaration: Union[dict, str], version: Union[str, None] = None ) -> None: """Method: Validates a declaration against the AS3 Schema. Raises a AS3ValidationError on failure. :param declaration: The declaration to be validated against the AS3 Schema. :param version: Allows to validate the declaration agaisnt the specified version instead of this AS3 Schema instance version. If set to "auto", the version of the declaration is used. """ if isinstance(declaration, str): declaration = json.loads(declaration) if not version: version = self.version elif version == "auto": version = declaration["declaration"]["schemaVersion"] else: version = self._check_version(version=version) try: jsonschema_validate( declaration, schema=self._schema_ref_updated(version=version) ) except (SchemaError, ValidationError, RefResolutionError) as exc: if isinstance(exc, ValidationError): raise AS3ValidationError("AS3 Validation Error", exc) if isinstance(exc, SchemaError): raise AS3SchemaError("JSON Schema Error", exc)
def validate(content: dict) -> bool: try: jsonschema_validate(instance=content, schema=PreferenceValidator.__json_validator) return True except Exception as exception: Logger.error("PreferenceValidator.validate", str(exception)) return False
def main(args): with open(args.sample_path) as sample_data, open( args.spec_path) as spec_data: sample_dict = json_load(sample_data) spec_dict = yaml_safe_load(spec_data) schema_dict = {**spec_dict, "$ref": "#/$defs/SystemProfile"} jsonschema_validate(instance=sample_dict, schema=schema_dict)
def system_profile_is_valid(self, system_profile): try: jsonschema_validate(system_profile, self.system_profile_normalizer.schema) except JsonSchemaValidationError as error: raise MarshmallowValidationError(f"System profile does not conform to schema.\n{error}") from error for dd_i, disk_device in enumerate(system_profile.get("disk_devices", [])): if not check_empty_keys(disk_device.get("options")): raise MarshmallowValidationError(f"Empty key in /system_profile/disk_devices/{dd_i}/options.")
def validate_yaml(data): # attempt to yaml_decode the file (will throw exception if it fails): data = yaml.load(data) print data with open('yaml-schema.yml','r') as f: yaml_schema = yaml.load(f.read()) jsonschema_validate(data, yaml_schema) return True
def validate_yaml(data): # attempt to yaml_decode the file (will throw exception if it fails): data = yaml.load(data) print data with open('yaml-schema.yml', 'r') as f: yaml_schema = yaml.load(f.read()) jsonschema_validate(data, yaml_schema) return True
def validate(request: Union[Dict, List], schema: dict) -> Union[Dict, List]: """使用jsonschema验证请求json格式 Wraps jsonschema.validate, returning the same object passed in. Args: request: The deserialized-from-json request. schema: The jsonschema schema to validate against. Raises: jsonschema.ValidationError """ jsonschema_validate(request, schema) return request
def validate(data, schema_name=None): """Validate the given dictionary against the given schema. :param data: Dict to validate. :type data: dict :param schema_name: String with the name of the schema to validate, for example, 'authors' or 'jobs'. If `None` passed it will expect for the data to have the schema specified in the `$ref` key. :type schema_name: str :return: None :raises inspire_schemas.errors.SchemaNotFound: if the given schema was not found. :raises inspire_schemas.errors.SchemaKeyNotFound: if the given schema was not found. :raises jsonschema.SchemaError: if the schema is invalid :raises jsonschema.ValidationError: if the data is invalid """ if schema_name is None: if '$schema' not in data: raise SchemaKeyNotFound(data=data) schema_name = data['$schema'] schema = load_schema(schema_name=schema_name) return jsonschema_validate( instance=data, schema=schema, resolver=LocalRefResolver.from_schema(schema), )
def validate(data, schema=None): """Validate the given dictionary against the given schema. Args: data (dict): record to validate. schema (Union[dict, str]): schema to validate against. If it is a string, it is intepreted as the name of the schema to load (e.g. ``authors`` or ``jobs``). If it is ``None``, the schema is taken from ``data['$schema']``. If it is a dictionary, it is used directly. Raises: SchemaNotFound: if the given schema was not found. SchemaKeyNotFound: if ``schema`` is ``None`` and no ``$schema`` key was found in ``data``. jsonschema.SchemaError: if the schema is invalid. jsonschema.ValidationError: if the data is invalid. """ schema = _load_schema_for_record(data, schema) return jsonschema_validate( instance=data, schema=schema, resolver=LocalRefResolver.from_schema(schema), format_checker=inspire_format_checker, )
def main(args): # Handle input arguments arg_syntax = "python schema_validator.py schema_filepath input_filepath" schema_filepath = args[0] data_filepath = args[1] with open(data_filepath) as f: data = yaml.load(f.read()) with open(schema_filepath) as f: yaml_schema = yaml.load(f.read()) jsonschema_validate(data, yaml_schema) print("Your YAML file is compliant with the schema!")
def validate_openapi_document(instance_dict): """ Validate an OpenAPI document against the OpenAPI schema :param instance_dict: dict of OpenAPI instance :returns: `bool` of validation """ schema_file = os.path.join(THISDIR, 'schemas', 'openapi', 'openapi-3.0.x.json') with open(schema_file) as fh2: schema_dict = json.load(fh2) jsonschema_validate(instance_dict, schema_dict) return True
def is_valid(self, entry): """ Validate the object against the HRIS schema json file included in the project. :param entry: a single json entry from the hris.json :return: bool (truthy) """ try: hris_schema_json = os.path.join( os.path.abspath(os.path.dirname(__file__)), 'schema.json') with open(hris_schema_json, 'r') as schema_data: hris_schema_json = json.load(schema_data) jsonschema_validate(entry, hris_schema_json) return True except ValidationError: return False
def _load_data_sources_interactive( data_sources_file_path: Path) -> List[Dict[str, str]]: """ Shared method for loading and validating data sources from a configuration file See the project README and referenced JSON Schema for how data sources should be defined. :param data_sources_file_path: file path to a data sources file :return: list of data source dictionaries """ echo( f"Loading sources from {click_style(str(data_sources_file_path), fg='blue')}" ) with open(Path(data_sources_file_path), "r") as data_sources_file: data_sources_data = data_sources_file.read() try: data_sources = json.loads(data_sources_data) except ValueError: echo( f"* data sources in {click_style(str(data_sources_file_path.absolute()), fg='blue')} contains " f"{click_style('invalid JSON', fg='red')} and cannot be validated") raise ValueError( f"{str(data_sources_file_path.absolute())} is invalid JSON") with resources.path( "bas_web_map_inventory.resources.json_schemas", "data-sources-schema.json") as data_sources_schema_file_path: with open(data_sources_schema_file_path, "r") as data_sources_schema_file: data_sources_schema_data = data_sources_schema_file.read() try: data_sources_schema = json.loads(data_sources_schema_data) jsonschema_validate(instance=data_sources, schema=data_sources_schema) echo( f"* data sources in {click_style(str(data_sources_file_path.absolute()), fg='blue')} have " f"{click_style('valid', fg='green')} syntax") return data_sources["servers"] except ValidationError: echo( f"* data sources in {click_style(str(data_sources_file_path.absolute()), fg='blue')} have " f"{click_style('invalid', fg='red')} syntax") raise ValueError( f"{str(data_sources_file_path.absolute())} does not validate against JSON schema" )
def validate_config(instance_dict): """ Validate pygeoapi configuration against pygeoapi schema :param instance_dict: dict of configuration :returns: `bool` of validation """ schema_file = os.path.join(THISDIR, 'schemas', 'config', 'pygeoapi-config-0.x.yml') with open(schema_file) as fh2: schema_dict = yaml_load(fh2) jsonschema_validate(json.loads(to_json(instance_dict)), schema_dict) return True
def __validate_json_schema(self, schema_filename): schema_dir = os.path.join( os.path.dirname(os.path.dirname(inspect.getmodule(self).__file__)), "config", "schemas") schema_file = os.path.join(schema_dir, schema_filename) resolver = RefResolver('file://' + schema_dir + "/", None) schema_obj = json.load(open(schema_file, "r")) instance_obj = json.load(open(self.filepath, "r")) result = {"status": Status.SUCCESS, "message": ""} try: jsonschema_validate(instance=instance_obj, schema=schema_obj, resolver=resolver) except ValidationError as e: result["status"] = Status.FAILURE, result["message"] = str(e) return result
def _validate_against_schema(self, strand, data): """Validate data against a schema, raises exceptions of type Invalid<strand>Json if not compliant. Can be used to validate: - the twine file contents itself against the present version twine spec - children data against the required schema for the present version twine spec - values data for compliance with schema written in the twine (for strands like input_values_schema) :param str strand: :param dict data: :return None: """ schema = self._get_schema(strand) try: jsonschema_validate(instance=data, schema=schema) logger.debug("Validated %s against schema", strand) except ValidationError as e: raise exceptions.invalid_contents_map[strand](str(e))
def validate_mcf(instance_dict: dict) -> bool: """ Validate an MCF document against the MCF schema :param instance_dict: dict of MCF instance :returns: `bool` of validation """ schema_file = os.path.join(SCHEMAS, 'mcf', 'core.yml') with open(schema_file) as fh2: schema_dict = yaml.load(fh2, Loader=yaml.FullLoader) try: jsonschema_validate(instance_dict, schema_dict) except ValidationError as err: raise MCFValidationError(err) return True
def validate_metadata_json(metadata_json: Dict[str, Any]) -> None: jsonschema_validate(instance=metadata_json, schema=metadata_schema)
def validate_package(zf): package_info = json.loads(zf.read(BASE_PACKAGE_NAME)) package_schema = json.loads(zf.read(BASE_PACKAGE_JSON_SCHEMA)) jsonschema_validate(package_info, package_schema)
def validate_token_metadata_json(metadata_json: Dict[str, Any]) -> None: """Validate token metadata JSON with JSONSchema""" jsonschema_validate(instance=metadata_json, schema=token_metadata_schema)
def run(publisher, user, profile_json): # Check for content validity against jsonschema jsonschema_validate(profile_json, cis_schema) return True
def run(publisher, vault_json, profile_json): # Check for content validity against jsonschema jsonschema_validate(profile_json, cis_schema)
def validate(instance, schema): return jsonschema_validate( instance, schema, resolver=LocalRefResolver('', {}))
from sys import argv from json import load as json_load from jsonschema import validate as jsonschema_validate from yaml import safe_load as yaml_safe_load spec_path = argv[1] sample_path = argv[2] with open(sample_path) as sample_data, open(spec_path) as spec_data: sample_dict = json_load(sample_data) spec_dict = yaml_safe_load(spec_data) schema_dict = {**spec_dict, "$ref": "#/$defs/SystemProfile"} jsonschema_validate(instance=sample_dict, schema=schema_dict)
def prepare_export( evaluations: List[Evaluation], summary_evaluations: Dict[str, dict], operations: List[Operation], ) -> dict: """ Structures data results of evaluations for use in an export. The intention of this method is to structure information in a way that makes it easy to use in reporting tools (i.e. exports), as a result there is lots f duplication and simplification of data types for example. Note: The structure and contents of this data have not yet been discussed or agreed. :type evaluations: List[Evaluation] :param evaluations: list of evaluations :type summary_evaluations: Dict :param summary_evaluations: summarised evaluations :type operations: List[Operation] :param operations: list of operations :rtype dict :return: processes data ready for use in exports """ export_format_version: int = 1 export_format_schema_name: str = "export_format_v1_schema.json" _operations: List[dict] = list() _operations_by_id: Dict[str, dict] = dict() _countries: Dict[str, str] = dict() for operation in operations: _operations.append(operation.export()) _operations_by_id[operation.operation_id] = operation.export() _countries[operation.affected_country. alpha_3] = operation.affected_country.name _results_by_operation: Dict[str, dict] = dict() _results_by_layer: Dict[str, dict] = dict() _results_by_result: Dict[str, List[Dict[str, str]]] = { EvaluationResult.NOT_EVALUATED.name: [], EvaluationResult.PASS.name: [], EvaluationResult.PASS_WITH_WARNINGS.name: [], EvaluationResult.FAIL.name: [], EvaluationResult.ERROR.name: [], } _ungrouped_results: List[Dict[str, str]] = list() for evaluation in evaluations: if evaluation.operation_id not in _results_by_operation.keys(): _results_by_operation[evaluation.operation_id] = dict() _results_by_operation[evaluation.operation_id][ evaluation.layer.layer_id] = evaluation.result.name if evaluation.layer.layer_id not in _results_by_layer.keys(): _results_by_layer[evaluation.layer.layer_id] = dict() _results_by_layer[evaluation.layer.layer_id][ evaluation.operation_id] = evaluation.result.name _results_by_result[evaluation.result.name].append({ "operation_id": evaluation.operation_id, "layer_id": evaluation.layer.layer_id, }) _ungrouped_results.append({ "operation_id": evaluation.operation_id, "layer_id": evaluation.layer.layer_id, "result": evaluation.result.name, }) export_data: Dict = { "meta": { "app_version": __version__, "export_version": export_format_version, "export_datetime": datetime.utcnow().isoformat(timespec="milliseconds"), "display_labels": { "result_types": { EvaluationResult.NOT_EVALUATED.name: "Not Evaluated", EvaluationResult.PASS.name: "Pass", EvaluationResult.PASS_WITH_WARNINGS.name: "Warning", EvaluationResult.FAIL.name: "Fail", EvaluationResult.ERROR.name: "Error", }, "layer_aggregation_categories": { "admn": "Admin", "carto": "Cartographic", "elev": "Elevation", "phys": "Physical features", "stle": "Settlements", "tran": "Transport", }, }, }, "data": { "operations": _operations, "operations_by_id": _operations_by_id, "countries": _countries, "results_by_operation": _results_by_operation, "results_by_layer": _results_by_layer, "results_by_result": _results_by_result, "ungrouped_results": _ungrouped_results, "summary_statistics": summary_evaluations, }, } with resource_path( package="mapy_rds_dashboard", resource=export_format_schema_name) as export_format_schema_path: with open(str(export_format_schema_path), mode="r") as export_format_schema_file: export_format_schema: Dict = json.load( fp=export_format_schema_file) jsonschema_validate(instance=export_data, schema=export_format_schema) return export_data
def clean(self): try: jsonschema_validate(self.data, self.ORDER_DATA_JSONSCHEMA) except JsonSchemaValidationError as e: raise ValidationError(message=e.message)
def validate(instance, schema): return jsonschema_validate(instance, schema, resolver=LocalRefResolver('', {}))
def _load_data(data_file_path: Path) -> None: """ Shared method for loading and validating data from a resources file This data consists of components (layers, repositories, etc.) previously fetched from data sources. See the project README and JSON Schema `resources/json_schemas/data-resources-schema.json` for how components should be defined. :param data_file_path: :return: """ app.logger.info(f"Loading data from {str(data_file_path.absolute())} ...") with open(Path(data_file_path), "r") as data_file: _data = data_file.read() try: data = json.loads(_data) except ValueError: echo( f"* data in {click_style(str(data_file_path.absolute()), fg='blue')} contains " f"{click_style('invalid JSON', fg='red')} and cannot be validated") raise ValueError(f"{str(data_file_path.absolute())} is invalid JSON") with resources.path( "bas_web_map_inventory.resources.json_schemas", "data-resources-schema.json") as data_resources_schema_file_path: with open(data_resources_schema_file_path, "r") as data_resources_schema_file: data_resources_schema_data = data_resources_schema_file.read() try: data_resources_schema = json.loads(data_resources_schema_data) jsonschema_validate(instance=data, schema=data_resources_schema) echo( f"* data resources in {click_style(str(data_file_path.absolute()), fg='blue')} have " f"{click_style('valid', fg='green')} syntax") except ValidationError: echo( f"* data sources in {click_style(str(data_file_path.absolute()), fg='blue')} have " f"{click_style('invalid', fg='red')} syntax") raise ValueError( f"{str(data_file_path.absolute())} does not validate against JSON schema" ) servers = Servers() for server in data["servers"]: server = Server( server_id=server["id"], label=server["label"], hostname=server["hostname"], server_type=server["type"], version=server["version"], ) servers[server.id] = server namespaces = Namespaces() for namespace in data["namespaces"]: namespace = Namespace( namespace_id=namespace["id"], label=namespace["label"], title=namespace["title"], namespace=namespace["namespace"], server=servers[namespace["relationships"]["servers"]], ) namespaces[namespace.id] = namespace repositories = Repositories() for repository in data["repositories"]: repository = Repository( repository_id=repository["id"], label=repository["label"], title=repository["title"], repository_type=repository["type"], hostname=repository["hostname"], database=repository["database"], schema=repository["schema"], namespace=namespaces[repository["relationships"]["namespaces"]], ) repositories[repository.id] = repository styles = Styles() for style in data["styles"]: _namespace = None if style["relationships"]["namespaces"] is not None: _namespace = namespaces[style["relationships"]["namespaces"]] style = Style( style_id=style["id"], label=style["label"], title=style["title"], style_type=style["type"], namespace=_namespace, ) styles[style.id] = style layers = Layers() for layer in data["layers"]: _styles = [] for style_id in layer["relationships"]["styles"]: _styles.append(styles[style_id]) layer = Layer( layer_id=layer["id"], label=layer["label"], title=layer["title"], layer_type=layer["type"], geometry_type=layer["geometry"], services=layer["services"], table_view=layer["table_view"], namespace=namespaces[layer["relationships"]["namespaces"]], repository=repositories[layer["relationships"]["repositories"]], styles=_styles, ) layers[layer.id] = layer layer_groups = LayerGroups() for layer_group in data["layer-groups"]: _namespace = None if layer_group["relationships"]["namespaces"] is not None: _namespace = namespaces[layer_group["relationships"]["namespaces"]] _layers = [] for layer_id in layer_group["relationships"]["layers"]: _layers.append(layers[layer_id]) _styles = [] for style_id in layer_group["relationships"]["styles"]: _styles.append(styles[style_id]) layer_group = LayerGroup( layer_group_id=layer_group["id"], label=layer_group["label"], title=layer_group["title"], services=layer_group["services"], namespace=_namespace, layers=_layers, styles=_styles, ) layer_groups[layer_group.id] = layer_group app.config["data"] = { "servers": servers, "namespaces": namespaces, "repositories": repositories, "styles": styles, "layers": layers, "layer_groups": layer_groups, }