def test_should_validate(self): """Check that our data validates given the JSON schemas.""" self.assertEqual(True, Validator(SCHEMAS[ARBITRARY]).is_valid(VALID_ARBITRARY)) self.assertEqual(True, Validator(SCHEMAS[DAY]).is_valid(VALID_DAY)) self.assertEqual(True, Validator(SCHEMAS[MONTH]).is_valid(VALID_MONTH)) self.assertEqual(True, Validator(SCHEMAS[WEEK]).is_valid(VALID_WEEK)) self.assertEqual(True, Validator(CATEGORIES_SCHEMA).is_valid(VALID_CATEGORIES))
def test_should_fail_missing_parameters(self): """Missing parameters should cause JSONSchema validation failure.""" validator = Validator(SCHEMAS[ARBITRARY]) for required in ['start', 'end']: data = copy.deepcopy(VALID_ARBITRARY) del data[required] self.assertEqual(False, validator.is_valid(data)) validator = Validator(SCHEMAS[DAY]) for required in ['day', 'month', 'year']: data = copy.deepcopy(VALID_DAY) del data[required] self.assertEqual(False, validator.is_valid(data)) validator = Validator(SCHEMAS[MONTH]) for required in ['month', 'year']: data = copy.deepcopy(VALID_MONTH) del data[required] self.assertEqual(False, validator.is_valid(data)) validator = Validator(SCHEMAS[WEEK]) for required in ['isoweek', 'isoyear']: data = copy.deepcopy(VALID_WEEK) del data[required] self.assertEqual(False, validator.is_valid(data)) validator = Validator(CATEGORIES_SCHEMA) for required in ['main_slug', 'sub_slug']: data = copy.deepcopy(VALID_CATEGORIES) del data[0][required] self.assertEqual(False, validator.is_valid(data))
def validate_doc(data, schema): """Validate *data* against *schema*, raising descriptive errors""" v = Validator() errors = list(v.iter_errors(data, schema)) if errors: errors = ', '.join((str(error) for error in errors)) raise ForbiddenError("Validation errors: {0}".format(errors)) _validate_formats(data, schema)
def validate(instance,schema, **kargs): v = Validator() errorObj = [] #iter_errors was deprecated in the jsonschema code for error in v.iter_errors(instance, schema): newstr = error.path[0] errorMsg = "ERROR #" + str(error.errorCode) + " in '" + newstr + "': " + str(error) errorObj.append({ 'code' : error.errorCode, 'msg' : errorMsg, 'key' : newstr, 'field' : error.validator }) if len(errorObj) != 0: # Create exception load withj errorOBj and throw it raise ValidationError("Validation error...", errorObj)
def load(self, filename: str, use=None, encoding="utf-8"): path = Path(filename) if use is not None: load = use else: if path.suffix[1:] not in self.formats: print(path.suffix[1:]) raise ValueError( "Don't know how to load this file extension: {!r}".format( path.suffix)) else: load = self.formats[path.suffix[1:]] with path.open(encoding=encoding) as fp: instance = load(fp) resolver = RefResolver( self.id, self.definition, {sch.id: sch.definition for sch in self.directory.schemas()}, ) validator = Validator(self.definition, resolver=resolver) validator.validate(instance) return instance
def test_multiple_nesting(self): instance = [1, {"foo" : 2, "bar" : {"baz" : [1]}}, "quux"] schema = { "type" : "string", "items" : { "type" : ["string", "object"], "properties" : { "foo" : {"enum" : [1, 3]}, "bar" : { "type" : "array", "properties" : { "bar" : {"required" : True}, "baz" : {"minItems" : 2}, } } } } } errors = Validator().iter_errors(instance, schema) e1, e2, e3, e4, e5, e6 = sorted_errors(errors) self.assertEqual(e1.path, []) self.assertEqual(e2.path, [0]) self.assertEqual(e3.path, ["bar", 1]) self.assertEqual(e4.path, ["bar", "bar", 1]) self.assertEqual(e5.path, ["baz", "bar", 1]) self.assertEqual(e6.path, ["foo", 1]) self.assertEqual(e1.validator, "type") self.assertEqual(e2.validator, "type") self.assertEqual(e3.validator, "type") self.assertEqual(e4.validator, "required") self.assertEqual(e5.validator, "minItems") self.assertEqual(e6.validator, "enum")
def get(self, section_name): self.set_header("Content-Type", "application/json") schema = _get_schema(self.schemas_dir, section_name, self.overrides) path = _path(self.settings_dir, section_name, _file_extension) raw = "{}" settings = dict() if os.path.exists(path): with open(path) as fid: # Attempt to load and parse the settings file. try: raw = fid.read() or raw settings = json.loads(json_minify(raw)) except Exception as e: self.log.warn(str(e)) # Validate the parsed data against the schema. if len(settings): validator = Validator(schema) try: validator.validate(settings) except ValidationError as e: self.log.warn(str(e)) raw = "{}" # Send back the raw data to the client. resp = dict(id=section_name, raw=raw, schema=schema) self.finish(json.dumps(resp))
def _get_settings(settings_dir, schema_name, schema): """ Returns a tuple containing the raw user settings, the parsed user settings, and a validation warning for a schema. """ path = _path(settings_dir, schema_name, False, SETTINGS_EXTENSION) raw = '{}' settings = dict() warning = '' validation_warning = 'Failed validating settings (%s): %s' parse_error = 'Failed loading settings (%s): %s' if os.path.exists(path): with open(path) as fid: try: # to load and parse the settings file. raw = fid.read() or raw settings = json5.loads(raw) except Exception as e: raise web.HTTPError(500, parse_error % (schema_name, str(e))) # Validate the parsed data against the schema. if len(settings): validator = Validator(schema) try: validator.validate(settings) except ValidationError as e: warning = validation_warning % (schema_name, str(e)) raw = '{}' return (raw, settings, warning)
def validate(data, schema=None): if schema is None: schema = generate() Validator.check_schema(schema) validator = Validator(schema) errors = list(validator.iter_errors(data)) if not errors: counter = Counter([p['name'] for p in data.get('policies')]) dupes = [] for k, v in counter.items(): if v > 1: dupes.append(k) if dupes: return [ ValueError( "Only one policy with a given name allowed, duplicates: %s" % (", ".join(dupes))) ] return [] try: resp = specific_error(errors[0]) name = isinstance(errors[0].instance, dict) and errors[0].instance.get( 'name', 'unknown') or 'unknown' return [resp, name] except Exception: logging.exception( "specific_error failed, traceback, followed by fallback") return list( filter(None, [ errors[0], best_match(validator.iter_errors(data)), ]))
def check_syntax(config, double_validate=True): """ check the given configuration against the schema definition :param config: :param double_validate: validate invalid syntax a second time with raising exception :return: True if the syntax is valid, False otherwise """ try: with open(ROOT_SCHEMA, 'r') as f: schema = json.load(f) except FileNotFoundError as fnfe: factory_logger.error( 'JSON root schema could not be found: {}'.format(ROOT_SCHEMA_NAME)) factory_logger.error(fnfe) sys.exit() except Exception as e: factory_logger.error(e) sys.exit() # resolver: reference sub-schemas from relative URIs # -> https://github.com/Julian/jsonschema/issues/313 resolver = RefResolver(base_uri=ROOT_SCHEMA_BASE_URI, referrer=schema) validator = Validator(schema, resolver=resolver) is_valid = validator.is_valid(config) if not is_valid and double_validate: # if configuration is invalid, run validate to get error message try: validator.validate(config) except jsonschema.exceptions.ValidationError as err: factory_logger.error(err) return is_valid
def test_iter_errors(self): instance = [1, 2] schema = { "disallow" : "array", "enum" : [["a", "b", "c"], ["d", "e", "f"]], "minItems" : 3 } if PY3: errors = sorted([ "'array' is disallowed for [1, 2]", "[1, 2] is too short", "[1, 2] is not one of [['a', 'b', 'c'], ['d', 'e', 'f']]", ]) else: errors = sorted([ "u'array' is disallowed for [1, 2]", "[1, 2] is too short", "[1, 2] is not one of [[u'a', u'b', u'c'], [u'd', u'e', u'f']]", ]) self.assertEqual( sorted(str(e) for e in Validator().iter_errors(instance, schema)), errors, )
def patch(self, section_name): if not self.settings_dir: raise web.HTTPError(404, "No current settings directory") path = os.path.join(self.schemas_dir, section_name + '.json') if not os.path.exists(path): raise web.HTTPError(404, "Schema not found for: %r" % section_name) data = self.get_json_body() # Will raise 400 if content is not valid JSON # Validate the data against the schema. if Validator is not None: with open(path) as fid: schema = json.load(fid) validator = Validator(schema) try: validator.validate(data) except ValidationError as e: raise web.HTTPError(400, str(e)) # Create the settings dir as needed. if not os.path.exists(self.settings_dir): os.makedirs(self.settings_dir) path = os.path.join(self.settings_dir, section_name + '.json') with open(path, 'w') as fid: json.dump(data, fid) self.set_status(204)
def _get_user_settings(settings_dir, schema_name, schema): """ Returns a dictionary containing the raw user settings, the parsed user settings, a validation warning for a schema, and file times. """ path = _path(settings_dir, schema_name, False, SETTINGS_EXTENSION) raw = '{}' settings = {} warning = '' validation_warning = 'Failed validating settings (%s): %s' parse_error = 'Failed loading settings (%s): %s' last_modified = None created = None if os.path.exists(path): stat = os.stat(path) with open(path) as fid: try: # to load and parse the settings file. raw = fid.read() or raw settings = json5.loads(raw) except Exception as e: raise web.HTTPError(500, parse_error % (schema_name, str(e))) # Validate the parsed data against the schema. if len(settings): validator = Validator(schema) try: validator.validate(settings) except ValidationError as e: warning = validation_warning % (schema_name, str(e)) raw = '{}' return dict(raw=raw, settings=settings, warning=warning)
def validate(data): schema = generate() Validator.check_schema(schema) validator = Validator(schema) errors = list(validator.iter_errors(data)) if not errors: counter = Counter([p['name'] for p in data.get('policies')]) dupes = [] for k, v in counter.items(): if v > 1: dupes.append(k) if dupes: return [ ValueError( "Only one policy with a given name allowed, duplicates: %s" % (", ".join(dupes))) ] return [] try: return [specific_error(errors[0])] except Exception: logging.exception( "specific_error failed, traceback, followed by fallback") return filter(None, [ errors[0], best_match(validator.iter_errors(data)), ])
def get(self, section_name): schema = _get_schema(self.schemas_dir, section_name, self.overrides) path = _path(self.settings_dir, section_name, _file_extension) raw = '{}' settings = dict() if os.path.exists(path): with open(path) as fid: # Attempt to load and parse the settings file. try: raw = fid.read() or raw settings = json.loads(json_minify(raw)) except Exception as e: message = 'Failed loading settings ({}): {}' self.log.warn(message.format(section_name, str(e))) # Validate the parsed data against the schema. if len(settings): validator = Validator(schema) try: validator.validate(settings) except ValidationError as e: message = 'Failed validating settings ({}): {}' self.log.warn(message.format(section_name, str(e))) raw = '{}' # Send back the raw data to the client. resp = dict(id=section_name, raw=raw, schema=schema) self.finish(json.dumps(resp))
def validate(data, schema=None): if schema is None: schema = generate() Validator.check_schema(schema) validator = Validator(schema) errors = list(validator.iter_errors(data)) if not errors: return check_unique(data) or [] try: resp = policy_error_scope(specific_error(errors[0]), data) name = isinstance( errors[0].instance, dict) and errors[0].instance.get( 'name', 'unknown') or 'unknown' return [resp, name] except Exception: logging.exception( "specific_error failed, traceback, followed by fallback") return list(filter(None, [ errors[0], best_match(validator.iter_errors(data)), ]))
def put(self, schema_name): overrides = self.overrides schemas_dir = self.schemas_dir settings_dir = self.settings_dir settings_error = 'No current settings directory' invalid_json_error = 'Failed parsing JSON payload: %s' invalid_payload_format_error = 'Invalid format for JSON payload. Must be in the form {\'raw\': ...}' validation_error = 'Failed validating input: %s' if not settings_dir: raise web.HTTPError(500, settings_error) raw_payload = self.request.body.strip().decode(u'utf-8') try: raw_settings = json.loads(raw_payload)['raw'] payload = json5.loads(raw_settings) except json.decoder.JSONDecodeError as e: raise web.HTTPError(400, invalid_json_error % str(e)) except KeyError as e: raise web.HTTPError(400, invalid_payload_format_error) # Validate the data against the schema. schema = _get_schema(schemas_dir, schema_name, overrides) validator = Validator(schema) try: validator.validate(payload) except ValidationError as e: raise web.HTTPError(400, validation_error % str(e)) # Write the raw data (comments included) to a file. path = _path(settings_dir, schema_name, True, SETTINGS_EXTENSION) with open(path, 'w') as fid: fid.write(raw_settings) self.set_status(204)
def get_validator(version=None, version_minor=None): """Load the JSON schema into a Validator""" if version is None: from .. import current_nbformat version = current_nbformat v = import_item("nbformat.v%s" % version) current_minor = v.nbformat_minor if version_minor is None: version_minor = current_minor version_tuple = (version, version_minor) if version_tuple not in validators: try: v.nbformat_schema except AttributeError: # no validator return None schema_path = os.path.join(os.path.dirname(v.__file__), v.nbformat_schema) with open(schema_path) as f: schema_json = json.load(f) if current_minor < version_minor: # notebook from the future, relax all `additionalProperties: False` requirements schema_json = _relax_additional_properties(schema_json) # and allow undefined cell types and outputs schema_json = _allow_undefined(schema_json) validators[version_tuple] = Validator(schema_json) return validators[version_tuple]
def put(self, schema_name): overrides = self.overrides schemas_dir = self.schemas_dir settings_dir = self.settings_dir settings_error = 'No current settings directory' validation_error = 'Failed validating input: %s' if not settings_dir: raise web.HTTPError(500, settings_error) raw = self.request.body.strip().decode('utf-8') # Validate the data against the schema. schema, _ = _get_schema(schemas_dir, schema_name, overrides, labextensions_path=self.labextensions_path) validator = Validator(schema) try: validator.validate(json5.loads(raw)) except ValidationError as e: raise web.HTTPError(400, validation_error % str(e)) # Write the raw data (comments included) to a file. path = _path(settings_dir, schema_name, True, SETTINGS_EXTENSION) with open(path, 'w', encoding='utf-8') as fid: fid.write(raw) self.set_status(204)
def merge_validator(request, json_schema_merge): return Validator( json_schema_merge, resolver=RefResolver( 'file://localhost/' + schema_dir.replace('\\', '/') + '/', json_schema_merge), # Ensure tuples validate to "array" schema type types={"array": (list, tuple)}, )
def validator(request, schema_json): return Validator( schema_json, resolver=RefResolver( 'file://localhost/' + schema_dir.replace('\\', '/') + '/', schema_json), # Ensure tuples validate to "array" schema type types={"array" : (list, tuple)}, )
def get_validator(version=None, version_minor=None, relax_add_props=False): """Load the JSON schema into a Validator""" if version is None: from . import current_nbformat version = current_nbformat v = import_item("nbformat.v%s" % version) current_minor = getattr(v, 'nbformat_minor', 0) if version_minor is None: version_minor = current_minor version_tuple = (version, version_minor) if version_tuple not in validators: try: schema_json = _get_schema_json(v, version=version, version_minor=version_minor) except AttributeError: return None if current_minor < version_minor: # notebook from the future, relax all `additionalProperties: False` requirements schema_json = _relax_additional_properties(schema_json) # and allow undefined cell types and outputs schema_json = _allow_undefined(schema_json) validators[version_tuple] = Validator(schema_json) if relax_add_props: try: schema_json = _get_schema_json(v, version=version, version_minor=version_minor) except AttributeError: return None # this allows properties to be added for intermediate # representations while validating for all other kinds of errors schema_json = _relax_additional_properties(schema_json) validators[version_tuple] = Validator(schema_json) return validators[version_tuple]
def parse_schemas(self, in_schema, in_meta_schema, \ out_schema, out_meta_schema): # load the two JSON schema objects if in_schema and in_meta_schema: m = json.loads(in_meta_schema) s = json.loads(in_schema) # add some sanity argument before changing the config Validator(m).validate(s) # search for the keys and change them if the schema requests for k in s[self.items][self.properties]: v = s[self.items][self.properties][k] if self.key in v: if v[self.key] == self.id: self.id_key = k elif v[self.key] == self.value: self.value_key = k elif v[self.key] == self.unit: self.unit_key = k elif v[self.key] == self.threshold: self.threshold_key = k elif v[self.key] == self.time: if k == self.time_key: self.time_key = self.fallback_time_key self.sensor_time_key = k elif v[self.key] == self.other: self.other_keys.append(k) # else: just throw it away.. elif in_schema: raise TypeError('Received input schema but no meta schema..') if out_schema and out_meta_schema: m = json.loads(out_meta_schema) s = json.loads(out_schema) # add some sanity argument before changing the config Validator(m).validate(s) # search for the keys and change them if the schema requests for k in s[self.items][self.properties]: v = s[self.items][self.properties][k] if self.key in v: t = v[self.key] self.translation_keys[t] = k elif out_schema: raise TypeError('Received input schema but no meta schema..')
def test_should_fail_extra_parameters(self): """Extra parameters should cause JSONSchema validation failure.""" data = copy.deepcopy(VALID_ARBITRARY) data['isoweek'] = '21' self.assertEqual(False, Validator(SCHEMAS[ARBITRARY]).is_valid(data)) data = copy.deepcopy(VALID_DAY) data['start'] = 'TBD' self.assertEqual(False, Validator(SCHEMAS[DAY]).is_valid(data)) data = copy.deepcopy(VALID_MONTH) data['start'] = 'TBD' self.assertEqual(False, Validator(SCHEMAS[MONTH]).is_valid(data)) data = copy.deepcopy(VALID_WEEK) data['start'] = 'TBD' self.assertEqual(False, Validator(SCHEMAS[WEEK]).is_valid(data)) data = copy.deepcopy(VALID_CATEGORIES) data[0]['extra'] = 'not_allowed' self.assertEqual(False, Validator(CATEGORIES_SCHEMA).is_valid(data))
def test_iter_errors_multiple_failures_one_validator(self): instance = {"foo" : 2, "bar" : [1], "baz" : 15, "quux" : "spam"} schema = { "properties" : { "foo" : {"type" : "string"}, "bar" : {"minItems" : 2}, "baz" : {"maximum" : 10, "enum" : [2, 4, 6, 8]}, } } errors = list(Validator().iter_errors(instance, schema)) self.assertEqual(len(errors), 4)
def register_schema(schema): """ Register 'schema' to be used with class::Validator. Set module variable ('_v'). Return None. """ global _validator global _schema try: _tmp = Validator(schema) except Exception as err: _tmp = None raise err finally: _validator = _tmp _schema = schema if _validator else None return None
def validate(data): schema = generate() Validator.check_schema(schema) validator = Validator(schema) errors = list(validator.iter_errors(data)) if not errors: return [] try: return [specific_error(errors[0])] except Exception: logging.exception( "specific_error failed, traceback, followed by fallback") return filter(None, [ errors[0], best_match(validator.iter_errors(data)), ])
def validate(nbjson): """Checks whether the given notebook JSON conforms to the current notebook format schema, and returns the list of errors. """ # load the schema file with open(schema_path, 'r') as fh: schema_json = json.load(fh) # resolve internal references schema = resolve_ref(schema_json) schema = jsonpointer.resolve_pointer(schema, '/notebook') # count how many errors there are v = Validator(schema) errors = list(v.iter_errors(nbjson)) return errors
def put(self, section_name): if not self.settings_dir: raise web.HTTPError(404, 'No current settings directory') raw = self.request.body.strip().decode(u'utf-8') # Validate the data against the schema. schema = _get_schema(self.schemas_dir, section_name, self.overrides) validator = Validator(schema) try: validator.validate(json.loads(json_minify(raw))) except ValidationError as e: raise web.HTTPError(400, str(e)) # Write the raw data (comments included) to a file. path = _path(self.settings_dir, section_name, _file_extension, True) with open(path, 'w') as fid: fid.write(raw) self.set_status(204)
def save_settings( schemas_dir, settings_dir, schema_name, raw_settings, overrides, labextensions_path=None, ): """ Save ``raw_settings`` settings for ``schema_name``. Parameters ---------- schemas_dir: str Path to schemas. settings_dir: str Path to settings. schema_name str Schema name. raw_settings: str Raw serialized settings dictionary overrides: dict Settings overrides. labextensions_path: list, optional List of paths to federated labextensions containing their own schema files. """ payload = json5.loads(raw_settings) # Validate the data against the schema. schema, _ = _get_schema(schemas_dir, schema_name, overrides, labextensions_path=labextensions_path) validator = Validator(schema) validator.validate(payload) # Write the raw data (comments included) to a file. path = _path(settings_dir, schema_name, True, SETTINGS_EXTENSION) with open(path, "w", encoding="utf-8") as fid: fid.write(raw_settings)
def test_single_nesting(self): instance = {"foo" : 2, "bar" : [1], "baz" : 15, "quux" : "spam"} schema = { "properties" : { "foo" : {"type" : "string"}, "bar" : {"minItems" : 2}, "baz" : {"maximum" : 10, "enum" : [2, 4, 6, 8]}, } } errors = Validator().iter_errors(instance, schema) e1, e2, e3, e4 = sorted_errors(errors) self.assertEqual(e1.path, ["bar"]) self.assertEqual(e2.path, ["baz"]) self.assertEqual(e3.path, ["baz"]) self.assertEqual(e4.path, ["foo"]) self.assertEqual(e1.validator, "minItems") self.assertEqual(e2.validator, "enum") self.assertEqual(e3.validator, "maximum") self.assertEqual(e4.validator, "type")
def validate(data): checker = Validator() document_schema = { "type" : "object", "additionalProperties": False, "required" : True, "properties" : { "destination" : { "type" : "object", "required" : True, "additionalProperties": False, "properties" : { "name" : {"type" : "string", "required" : True }, "subject" : {"type" : "string", "required" : True } } }, "report" : { "type" : "object", "required" : True, "additionalProperties": False, "properties" : { "message" : {"type" : "string", "required" : True }, } }, "request" : { "type" : "object", "required" : True, "additionalProperties": False, "properties" : { "uuid" : {"type" : "string", "required" : True }, "source" : {"type" : "string", "required" : True }, "time" : {"type" : "string", "required" : True }, "day_of_year" : {"type" : "number", "required" : True }, "day_of_week" : {"type" : "number", "required" : True }, "week_of_year" : {"type" : "number", "required" : True }, "month" : {"type" : "number", "required" : True }, "year" : {"type" : "number", "required" : True }, "day" : {"type" : "number", "required" : True }, "cycle" : {"type" : "number", "required" : True }, } }, "plugin" : { "type" : "object", "required" : True, "additionalProperties": False, "properties" : { "name" : {"type" : "string", "required" : True }, "hash" : {"type" : "string", "required" : True }, "timeout" : {"type" : "number", "required" : True }, "parameters" : {"type" : "array", "required" : True } } }, "evaluators" : { "type" : "object", "required" : True }, "tags" : { "type" : "array", "required" : True } } } evaluator_schema = { "type" : "object", "additionalProperties": False, "properties" : { "evaluator" : {"type" : "string", "required" : True }, "metric" : {"type" : "string", "required" : True }, "thresholds" : {"type" : "object", "required" : True } } } threshold_schema = { "type" : "string" } checker.validate(data,document_schema) for evaluator in data['evaluators']: checker.validate(data['evaluators'][evaluator],evaluator_schema) for threshold in data['evaluators'][evaluator]['thresholds']: checker.validate(data['evaluators'][evaluator]['thresholds'][threshold],threshold_schema)