def __init__(self, kernel_name): self.kernel = jupyter_client.KernelManager(kernel_name=kernel_name) self.pending = {} with open( os.path.join(os.path.dirname(__file__), "message-schema.json")) as f: message_schema = json.load(f) jsonschema.Draft7Validator.check_schema(message_schema) self.message_validator = jsonschema.Draft7Validator(message_schema)
def keyboard_api_validate(data): """Validates data against the api_keyboard jsonschema. """ base = load_jsonschema('keyboard') relative = load_jsonschema('api_keyboard') resolver = jsonschema.RefResolver.from_schema(base) validator = jsonschema.Draft7Validator(relative, resolver=resolver).validate return validator(data)
class ScanConfigHandler(ApiHandlerBase): config_validtor = jsonschema.Draft7Validator(ApiHandlerBase.config_schema) async def handle_request(self, data): """ 请求格式: { "host":"1.2.3.4", "port": 80, "config": { "scan_plugin_status": { "command_basic": { "enable": true, "show_name": "命令注入检测插件", "description": "xxxx" }, ... }, "scan_rate": { "max_concurrent_request": 20, "max_request_interval": 1000, "min_request_interval": 0 }, "white_url_reg": "^/logout", "scan_proxy": "http://127.0.0.1:8080" } } """ try: module_params = { "host": data["host"], "port": data.get("port", 80), "config": data.get("config", {}) } self.config_validtor.validate(module_params["config"]) if "white_url_reg" in module_params["config"]: re.compile(module_params["config"]["white_url_reg"]) if "scan_proxy" in module_params["config"]: proxy_url = module_params["config"]["scan_proxy"] if proxy_url != "": scheme = urllib.parse.urlparse(proxy_url).scheme if scheme not in ("http", "https"): assert False except (KeyError, TypeError, jsonschema.exceptions.ValidationError): ret = {"status": 1, "description": "请求json格式非法!"} except re.error: ret = {"status": 2, "description": "白名单正则不合法!"} except AssertionError: ret = {"status": 3, "description": "代理协议应为http或https!"} else: ScannerManager().mod_config(module_params) ret = {"status": 0, "description": "ok"} return ret
def validate_schema(parent_key, instance, schema): """ Given a parent key, an instance of a spec section, and a json schema for that section, validate the instance against the schema. """ validator = jsonschema.Draft7Validator(schema) errors = validator.iter_errors(instance) for error in errors: path = ".".join(list(error.path)) if error.validator == "additionalProperties": unrecognized = (re.search(r"'.+'", error.message).group(0).strip("'")) raise jsonschema.ValidationError( "Unrecognized key '{0}' found in {1}.".format( unrecognized, parent_key)) elif error.validator == "type": expected_type = (re.search( r"is not of type '.+'", error.message).group(0).strip("is not of type ").strip("'") ) raise jsonschema.ValidationError( "In {0}, {1} must be of type '{2}'.".format( parent_key, path, expected_type)) elif error.validator == "required": missing = re.search(r"'.+'", error.message) missing = missing.group(0) missing = missing.strip("'") raise jsonschema.ValidationError( "Key '{0}' is missing from {1}.".format( missing, parent_key)) elif error.validator == "uniqueItems": raise jsonschema.ValidationError( "Non-unique step names in {0}.run.depends.".format( parent_key)) elif error.validator == "minLength": raise jsonschema.ValidationError( "In {0}, empty string found as value for {1}.".format( parent_key, path)) elif error.validator == "anyOf": path = ".".join(list(error.path)) context_message = error.context[0].message context_message = re.sub(r"'.+' ", "'{0}' ".format(path), context_message) raise jsonschema.ValidationError( ("The value '{0}' in field {1} of {2} is not of type " "'{3}' or does not conform to the format '$(VARNAME)'." ).format(error.instance, path, parent_key, error.validator_value[0]["type"])) else: raise ValueError("Validation error: " + error.message)
def test_invalid_solar_electric(hpxml_filebase): schema = get_json_schema() js_schema = jsonschema.Draft7Validator( schema, format_checker=jsonschema.FormatChecker()) js = get_example_json(hpxml_filebase) js1 = copy.deepcopy(js) js1['building']['systems'] = { 'generation': { 'solar_electric': { 'capacity_known': False, 'system_capacity': 50 } } } errors = get_error_messages(js1, js_schema) assert "'num_panels' is a required property" in errors assert "'year' is a required property" in errors assert "'array_azimuth' is a required property" in errors assert "'array_tilt' is a required property" in errors assert ( "{'required': ['system_capacity']} is not allowed for {'capacity_known': False, " "'system_capacity': 50}") in errors js2 = copy.deepcopy(js) js2['building']['systems'] = { 'generation': { 'solar_electric': { 'capacity_known': True, 'num_panels': 5 } } } errors = get_error_messages(js2, js_schema) assert "'system_capacity' is a required property" in errors assert "'year' is a required property" in errors assert "'array_azimuth' is a required property" in errors assert "'array_tilt' is a required property" in errors assert ( "{'required': ['num_panels']} is not allowed for {'capacity_known': True, 'num_panels': 5}" ) in errors js3 = copy.deepcopy(js) js3['building']['systems'] = { 'generation': { 'solar_electric': { 'year': 2021 } } } errors = get_error_messages(js3, js_schema) assert "'capacity_known' is a required property" in errors assert "'array_azimuth' is a required property" in errors assert "'array_tilt' is a required property" in errors assert "'num_panels' is a required property" not in errors assert "'system_capacity' is a required property" not in errors
def handle(self, *args, **options): file_path = options["file_path"] write_to_file = bool(options["write_to_file_boolean"]) # for testing. print( f"Ensure generate_schema was recently ran, so your schema is up to date." ) print(f"Reading schema from {file_path}") f = open(file_path, ) schema = json.load(f) f.close() for application in Application.objects.filter( last_updated__gte=datetime.datetime( 2021, 6, 14).astimezone(), ).order_by('id'): steps_json = json.loads( settings.ENCRYPTOR.decrypt(application.key_id, application.steps).decode("utf-8")) if write_to_file: filename = f"inspect\\before\\{application.id}.json" os.makedirs(os.path.dirname(filename), exist_ok=True) f = open(filename, "w") json.dump(steps_json, skipkeys=False, fp=f, sort_keys=True, indent=4) f.close() else: print('Before: ') print(json.dumps(steps_json, indent=4).replace('\r\n', '')) print( f"Validating steps schema for application Id: {application.id}" ) steps_json = Migration_1_0_to_1_1().migrate(steps_json) steps_json = Migration_1_1_to_1_2_1().migrate(steps_json) if write_to_file: filename = f"inspect\\after\\{application.id}.json" os.makedirs(os.path.dirname(filename), exist_ok=True) f = open(filename, "w") json.dump(steps_json, fp=f, skipkeys=False, sort_keys=True, indent=4) f.close() else: print('After: ') print(json.dumps(steps_json, indent=4).replace('\r\n', '')) validator = jsonschema.Draft7Validator(schema) errors = validator.iter_errors({"steps": steps_json}) for error in errors: print(error) print("------")
def validate_schema(REPO_PATH, detection_type, objects, verbose): #Default regex does NOT match ssa___*.yml files: "^(?!ssa___).*\.yml$" #The following search will match ssa___*.yml files: "^ssa___.*\.yml$" if detection_type.startswith("ba_"): filename_regex = "^ssa___.*\.yml$" else: filename_regex = "^(?!ssa___).*\.yml$" error = False errors = [] schema_file = path.join(path.expanduser(REPO_PATH), 'spec/' + detection_type + '.spec.json') #remove the prefix if the detection type starts with ba_ so we can #get the files from the proper folders and proceed correctly if detection_type.startswith("ba_"): detection_type = detection_type[3:] try: schema = json.loads(open(schema_file, 'rb').read()) except IOError: print("ERROR: reading schema file {0}".format(schema_file)) manifest_files = [] for root, dirs, files in walk(REPO_PATH + "/" + detection_type): for file in files: if re.search(filename_regex, path.basename(file)) is not None: manifest_files.append((path.join(root, file))) for manifest_file in manifest_files: if verbose: print("processing manifest {0}".format(manifest_file)) with open(manifest_file, 'r') as stream: try: object = list(yaml.safe_load_all(stream))[0] except yaml.YAMLError as exc: print(exc) print("Error reading {0}".format(manifest_file)) errors.append("ERROR: Error reading {0}".format(manifest_file)) error = True continue validator = jsonschema.Draft7Validator(schema, format_checker=jsonschema.FormatChecker()) for schema_error in validator.iter_errors(object): errors.append("ERROR: {0} at:\n\t{1}".format(json.dumps(schema_error.message), manifest_file)) error = True if detection_type in objects: objects[detection_type].append(object) else: arr = [] arr.append(object) objects[detection_type] = arr print("***END OF VALIDATE SCHEMA ***") return objects, error, errors
def __init__(self, schema_path): with open(schema_path) as meta_schema_file: uri_path = os.path.abspath(os.path.dirname(schema_path)) if os.sep != posixpath.sep: uri_path = posixpath.sep + uri_path resolver = jsonschema.RefResolver(f'file://{uri_path}/', meta_schema_file) self.validator = jsonschema.Draft7Validator( json.load(meta_schema_file), resolver=resolver)
def __init__( self, path_schema: str, error: str ) -> None: self.schema = path_schema self.schema_name = path_schema.split(os.sep)[-1] self.error = error self.validator = jsonschema.Draft7Validator(self.schema) self.sef = SchemaErrorFactory()
def _validate_data(self, data): schema = self._load_schema() validator = jsonschema.Draft7Validator(schema) errors = validator.iter_errors({"steps": data}) for error in errors: print(error) print("------") self.assertEqual(len(list(errors)), 0)
def test_invalid_wall_window_construction_same(hpxml_filebase): schema = get_json_schema() js_schema = jsonschema.Draft7Validator( schema, format_checker=jsonschema.FormatChecker()) js = get_example_json(hpxml_filebase) del js['building']['zone']['wall_construction_same'] del js['building']['zone']['window_construction_same'] errors = get_error_messages(js, js_schema) assert "'wall_construction_same' is a required property" in errors assert "'window_construction_same' is a required property" in errors
def create_data_dict(c): ''' creates a div for each file ''' data = base64.b64decode(c[c.find(',')+1:].encode('ascii')) data = ast.literal_eval(data.decode("UTF-8")) v = jsonschema.Draft7Validator(schema) for error in v.iter_errors(data): # TODO: Levantar un error para archivos de formato incorrecto error = {'JSON_FORMAT_ERROR': str(error.message)} return return data
def validate_dict_against_schema_url(value: dict, schema_url: str) -> list[ValidationError]: schema: dict = json.loads(requests.get(schema_url).text) errors: list = [] v = jsonschema.Draft7Validator(schema) err = sorted(v.iter_errors(value), key=lambda e: str(e.path)) for error in err: errors.append(error) return errors
def _build_validator(filename: str) -> jsonschema.Draft7Validator: """ Return a validator that checks against a given schema. :param filename: The relative path to the schema, e.g. "base_node.schema.json" """ path = os.path.join(HERE, filename) with open(path) as json_schema: schema = json.load(json_schema) return jsonschema.Draft7Validator(schema, resolver=RESOLVER)
def get_validator(self, url) -> jsonschema.Draft7Validator: """ Creates the validator for a given schema :param url: name of the schema validator to create :type url: str :return: jsonschema validator :rtype: jsonschema.Draft7Validator """ return jsonschema.Draft7Validator(self.get_schema(url), resolver=self.resolver)
def _fetch_validator(name): schema_root = SCHEMA_DIR schema_path = os.path.join(SCHEMA_DIR, "assays/%s_assay.json" % name) if name == "clinical_data": schema_path = os.path.join(SCHEMA_DIR, "%s.json" % name) schema = load_and_validate_schema(schema_path, schema_root) # create validator assert schemas are valid. return jsonschema.Draft7Validator(schema)
def validate_path(schema_url, resource_url): with schema_url.open(encoding="utf8") as f: schema = json.load(f) with resource_url.open(encoding="utf8") as f: _data = json.load(f) v = jsonschema.Draft7Validator(schema) sorted_errors = sorted(v.iter_errors(_data), key=str) return _errors(sorted_errors)
def _validate_jsonschema(self): error_messages = None draft = self._get_sub_plugin_options("draft") error_messages = [] for criteria in self._criteria: if draft == "draft3": validator = jsonschema.Draft3Validator(criteria) elif draft == "draft4": validator = jsonschema.Draft4Validator(criteria) elif draft == "draft6": validator = jsonschema.Draft6Validator(criteria) else: validator = jsonschema.Draft7Validator(criteria) validation_errors = sorted(validator.iter_errors(self._data), key=lambda e: e.path) if validation_errors: if "errors" not in self._result: self._result["errors"] = [] for validation_error in validation_errors: if isinstance(validation_error, jsonschema.ValidationError): error = { "message": validation_error.message, "data_path": to_path(validation_error.absolute_path), "json_path": json_path(validation_error.absolute_path), "schema_path": to_path(validation_error.relative_schema_path), "relative_schema": validation_error.schema, "expected": validation_error.validator_value, "validator": validation_error.validator, "found": validation_error.instance, } self._result["errors"].append(error) error_message = "At '{schema_path}' {message}. ".format( schema_path=error["schema_path"], message=error["message"], ) error_messages.append(error_message) if error_messages: if "msg" not in self._result: self._result["msg"] = "\n".join(error_messages) else: self._result["msg"] += "\n".join(error_messages)
def verify(json_dir, schema_dir): logs = {} for filename in os.listdir(json_dir): log = logs[filename] = [] try: data = json_load(json_dir + filename) if not data: log.append("JSON file is empty.") continue if 'event' not in data or not data['event']: log.append("Event is not specified.") continue if 'data' not in data or not data['data']: log.append("Data is not specified.") continue event = data['event'] schema_file = schema_dir + event + '.schema' if not os.path.exists(schema_file): log.append("Schema doesn't exist for a '" + event + "' event.") continue except json.decoder.JSONDecodeError as e: log.append("Invalid JSON. " + e.args[0]) continue try: schema = json_load(schema_file) except json.decoder.JSONDecodeError as e: log.append("Invalid '" + schema_file + "' schema JSON." + e.args[0]) continue validator = jsonschema.Draft7Validator(schema) errors = validator.iter_errors(data['data']) for error in errors: cause = error.message path = error.path if path: readable_path = "data" for i, value in enumerate(path): if isinstance(value, int): readable_path += " (entry " + str(value) + ")" else: readable_path += "/" + str(value) cause += " in " + readable_path log.append(cause) with open('log.txt', 'w') as file: for filename, errors in logs.items(): if errors: file.write(filename + "\n") for error in errors: file.write("\t - " + error + "\n") file.write("\n")
def _validate_obj_json(data, schema, missing_ok=False): validator = jsonschema.Draft7Validator( schema, format_checker=jsonschema.draft7_format_checker ) error_list = [] for error in sorted(validator.iter_errors(data), key=str): if missing_ok and "is a required property" in error.message: continue error_list.append(error) if error_list: raise JsonschemaValidationError(error_list)
def test_example_files(): rootdir = pathlib.Path(__file__).resolve().parent.parent examplefiles = str(rootdir / 'examples' / '*.json') for examplefile in glob.glob(examplefiles): hpxml_filebase = os.path.basename(examplefile).split('.')[0] schema = get_json_schema() js_schema = jsonschema.Draft7Validator( schema, format_checker=jsonschema.FormatChecker()) js = get_example_json(hpxml_filebase) errors = get_error_messages(js, js_schema) assert len(errors) == 0
def create_validator(schema): """Creates a validator for the given schema id. """ schema_store = compile_schema_store() resolver = jsonschema.RefResolver.from_schema(schema_store[schema], store=schema_store) # TODO: Remove this after the jsonschema>=4 requirement had time to reach users try: return jsonschema.Draft202012Validator(schema_store[schema], resolver=resolver).validate except AttributeError: return jsonschema.Draft7Validator(schema_store[schema], resolver=resolver).validate
def _validate_all(data: InDataExT, schema: InDataT, **_options) -> ResultT: """ See the descritpion of :func:`validate` for more details of parameters and return value. :seealso: https://python-jsonschema.readthedocs.io/en/latest/validate/, a section of 'iter_errors' especially """ vldtr = jsonschema.Draft7Validator(schema) # :raises: SchemaError, ... errors = list(vldtr.iter_errors(data)) return (not errors, [err.message for err in errors])
def validate_json(json_message): schemas = load_schemas() validator = jsonschema.Draft7Validator(schemas["example"], format_checker=FormatChecker()) errors = validator.iter_errors(json_message) for error in errors: logging.warning('{}'.format(error)) return validator.is_valid(json_message)
def validate_data(finalized_data): for item in finalized_data: try: jsonschema.validate(item, schema) except jsonschema.exceptions.ValidationError: v = jsonschema.Draft7Validator(schema) errors = sorted(v.iter_errors(item), key=lambda e: e.path) for error in errors: print(f'{error.message} in {item["model"]}, {item["url"]}') return "Error in data" print('data passes') return "data passes"
def __validate(self): validation = jsonschema.Draft7Validator(self.__schema) for error in sorted(validation.iter_errors(self.config), key=str): param = error.message.split("'")[1] hiearchy = list(error.relative_path) hiearchy.append(param) self.contain_errors = True self.invalid_params_objs.append({param : hiearchy}) self.validation_errors.append(PollinatorPlatformConfigError(error.relative_path, error.message).message) self.is_validated = True if self.errors: raise PollinatorPlatformConfigErrorList(self.validation_errors)
def load_schema(self): self.schema_filepath = os.path.join(self.schema_path, self.schema_filename) with open(self.schema_filepath, 'r') as json_schema_file: self.schema = json.load(json_schema_file) # create validator resolver = jsonschema.RefResolver(base_uri='file:///{}/'.format( self.schema_path), referrer=self.schema) self.validator = jsonschema.Draft7Validator(self.schema, resolver=resolver)
def _add_validation_errors(doc): ''' >>> from pprint import pprint >>> doc = {'entity_type': 'JUST WRONG'} >>> try: ... _add_validation_errors(doc) ... except FileNotFoundError as e: ... assert 'just wrong.schema.yaml' in str(e) >>> doc = {'entity_type': 'dataset'} >>> _add_validation_errors(doc) >>> pprint(doc['mapper_metadata']['validation_errors'][0]) {'absolute_path': '/entity_type', 'absolute_schema_path': '/properties/entity_type/enum', 'message': "'dataset' is not one of ['Collection', 'Dataset', 'Donor', " "'Sample']"} >>> doc = { ... 'entity_type': 'Donor', ... 'create_timestamp': 'FAKE', ... 'created_by_user_displayname': 'FAKE', ... 'created_by_user_email': 'FAKE', ... 'data_access_level': 'public', ... 'group_name': 'FAKE', ... 'group_uuid': 'FAKE', ... 'last_modified_timestamp': 'FAKE', ... 'uuid': 'FAKE', ... 'access_group': 'FAKE', ... 'ancestor_ids': 'FAKE', ... 'ancestors': 'FAKE', ... 'descendant_ids': 'FAKE', ... 'descendants': 'FAKE' ... } >>> _add_validation_errors(doc) >>> pprint(doc['mapper_metadata']['validation_errors']) [] ''' schema = _get_schema(doc) if not schema.keys(): doc['mapper_metadata'] = {'validation_errors': ["Can't load schema"]} return validator = jsonschema.Draft7Validator(schema) errors = [ { 'message': e.message, 'absolute_schema_path': _as_path_string(e.absolute_schema_path), 'absolute_path': _as_path_string(e.absolute_path) } for e in validator.iter_errors(doc) ] doc['mapper_metadata'] = {'validation_errors': errors}
def validate_against_template(input_dict, template, index): """ This is a function for validating a dictionary against a template. Given an input_dict and a template object, it will create a JSON schema validator and construct an object that is a list of error dictionaries. It will write a JSON file to the specified error_log_path and return the validation_errors object as well as log each error.message to log.errors :param input_dict: a dictionary of representing a row of a spreadsheet to be validated :param template: a template dictionary to validate against :param index: an index for the row :return: validation_errors, an object containing information on validation errors """ # Initialize json schema validator validator = jsonschema.Draft7Validator(template) # Initialize list object for storing validation errors validation_errors = [] for error in sorted(validator.iter_errors(input_dict), key=str): # Create a temporary dictionary for the individual error tmp_dict = dict() # add row number to the dictionary tmp_dict['row'] = row # Get error type tmp_dict['error_type'] = error.validator # Get error message and log it tmp_dict['error_message'] = error.message log.error(error.message) # Required field errors are a little special and need to be handled # separately to get the field. We don't get the schema because it # will print the entire template schema if error.validator == "required": # Get the item failing validation from the error message tmp_dict['item'] = 'info.' + error.message.split("'")[1] # Get additional information for pattern and type errors elif error.validator in ("pattern", "type"): # Get the value of the field that failed validation tmp_dict['error_value'] = error.instance # Get the field that failed validation tmp_dict['item'] = 'Column: {}'.format(str(error.path.pop())) # Get the schema object used to validate in failed validation tmp_dict['schema'] = error.schema elif error.validator == "anyOf": tmp_dict['schema'] = {"anyOf": error.schema['anyOf']} else: pass # revalidate key so that validation errors can be revalidated in the future tmp_dict['revalidate'] = False # Append individual error object to the return validation_errors object validation_errors.append(tmp_dict) return validation_errors
def validate_json(data, schema, input_name): validator = jsonschema.Draft7Validator(schema) errors = list(validator.iter_errors(data)) if len(errors) > 0: for error in errors: print(f"::error::JSON validation error: {error}") raise AMLConfigurationException( f"JSON validation error for '{input_name}'. Provided object does not match schema. Please check the output for more details." ) else: print( f"::debug::JSON validation passed for '{input_name}'. Provided object does match schema." )