def test_validates_registers_meta_schema_draft6_id(self): meta_schema_key = "meta schema $id" my_meta_schema = {u"$id": meta_schema_key} validators.create( meta_schema=my_meta_schema, version="my version", ) self.assertIn(meta_schema_key, validators.meta_schemas)
def test_validates_registers_meta_schema_id(self): meta_schema_key = "meta schema id" my_meta_schema = {u"id": meta_schema_key} validators.create( meta_schema=my_meta_schema, version="my version", id_of=lambda s: s.get("id", ""), ) self.assertIn(meta_schema_key, validators.meta_schemas)
def test_providing_default_types_with_type_checker_errors(self): with self.assertRaises(TypeError) as e: validators.create( meta_schema={}, validators={}, default_types={"foo": object}, type_checker=TypeChecker(), ) self.assertIn( "Do not specify default_types when providing a type checker", str(e.exception), ) self.assertFalse(self.flushWarnings())
def get_jsonschema_validator(): """Get metaschema validator and format checker Older versions of jsonschema require some compatibility changes. @returns: Tuple: (jsonschema.Validator, FormatChecker) @raises: ImportError when jsonschema is not present """ from jsonschema import Draft4Validator, FormatChecker from jsonschema.validators import create # Allow for bytes to be presented as an acceptable valid value for string # type jsonschema attributes in cloud-init's schema. # This allows #cloud-config to provide valid yaml "content: !!binary | ..." strict_metaschema = deepcopy(Draft4Validator.META_SCHEMA) strict_metaschema["additionalProperties"] = False # This additional label allows us to specify a different name # than the property key when generating docs. # This is especially useful when using a "patternProperties" regex, # otherwise the property label in the generated docs will be a # regular expression. # http://json-schema.org/understanding-json-schema/reference/object.html#pattern-properties strict_metaschema["properties"]["label"] = {"type": "string"} if hasattr(Draft4Validator, "TYPE_CHECKER"): # jsonschema 3.0+ type_checker = Draft4Validator.TYPE_CHECKER.redefine( "string", is_schema_byte_string ) cloudinitValidator = create( meta_schema=strict_metaschema, validators=Draft4Validator.VALIDATORS, version="draft4", type_checker=type_checker, ) else: # jsonschema 2.6 workaround types = Draft4Validator.DEFAULT_TYPES # pylint: disable=E1101 # Allow bytes as well as string (and disable a spurious unsupported # assignment-operation pylint warning which appears because this # code path isn't written against the latest jsonschema). types["string"] = (str, bytes) # pylint: disable=E1137 cloudinitValidator = create( # pylint: disable=E1123 meta_schema=strict_metaschema, validators=Draft4Validator.VALIDATORS, version="draft4", default_types=types, ) return (cloudinitValidator, FormatChecker)
def check_schema(schema): """ Check a given schema to make sure it is valid YAML schema. """ # We also want to validate the "default" values in the schema # against the schema itself. jsonschema as a library doesn't do # this on its own. def validate_default(validator, default, instance, schema): if not validator.is_type(instance, "object"): return if "default" in instance: with instance_validator.resolver.in_scope(scope): for err in instance_validator.iter_errors(instance["default"], instance): yield err VALIDATORS = util.HashableDict(mvalidators.Draft4Validator.VALIDATORS.copy()) VALIDATORS.update({"default": validate_default}) meta_schema = load_schema(YAML_SCHEMA_METASCHEMA_ID, mresolver.default_url_mapping) resolver = _make_resolver(mresolver.default_url_mapping) cls = mvalidators.create(meta_schema=meta_schema, validators=VALIDATORS) validator = cls(meta_schema, resolver=resolver) instance_validator = mvalidators.Draft4Validator(schema, resolver=resolver) scope = schema.get("id", "") validator.validate(schema, _schema=meta_schema)
def create_validator(): global _created_validator if _created_validator: return validator = validators.create( meta_schema=load_schema( 'http://stsci.edu/schemas/yaml-schema/draft-01', mresolver.UrlMapping()), validators=YAML_VALIDATORS, version=str('yaml schema draft 1')) validator.orig_iter_errors = validator.iter_errors # We can't validate anything that looks like an external # reference, since we don't have the actual content, so we # just have to defer it for now. If the user cares about # complete validation, they can call # `AsdfFile.resolve_references`. def iter_errors(self, instance, _schema=None): if ((isinstance(instance, dict) and '$ref' in instance) or isinstance(instance, reference.Reference)): return for x in self.orig_iter_errors(instance, _schema=_schema): yield x validator.iter_errors = iter_errors _created_validator = True
def setUp(self): all_validators = dict(validators.Draft7Validator.VALIDATORS) all_validators['async_valid'] = async_validator self.validator = validators.create( meta_schema=validators.Draft7Validator.META_SCHEMA, validators=all_validators, )({})
def create_validator(): global _created_validator if _created_validator: return validator = validators.create(meta_schema=load_schema( 'http://stsci.edu/schemas/yaml-schema/draft-01', mresolver.UrlMapping()), validators=YAML_VALIDATORS, version=str('yaml schema draft 1')) validator.orig_iter_errors = validator.iter_errors # We can't validate anything that looks like an external # reference, since we don't have the actual content, so we # just have to defer it for now. If the user cares about # complete validation, they can call # `AsdfFile.resolve_references`. def iter_errors(self, instance, _schema=None): if ((isinstance(instance, dict) and '$ref' in instance) or isinstance(instance, reference.Reference)): return for x in self.orig_iter_errors(instance, _schema=_schema): yield x validator.iter_errors = iter_errors _created_validator = True
def check_schema(schema): """ Check a given schema to make sure it is valid YAML schema. """ # We also want to validate the "default" values in the schema # against the schema itself. jsonschema as a library doesn't do # this on its own. def validate_default(validator, default, instance, schema): if not validator.is_type(instance, 'object'): return if 'default' in instance: with instance_validator.resolver.in_scope(scope): for err in instance_validator.iter_errors( instance['default'], instance): yield err VALIDATORS = util.HashableDict( mvalidators.Draft4Validator.VALIDATORS.copy()) VALIDATORS.update({'default': validate_default}) meta_schema = load_schema(YAML_SCHEMA_METASCHEMA_ID, mresolver.default_resolver) resolver = _make_resolver(mresolver.default_resolver) cls = mvalidators.create(meta_schema=meta_schema, validators=VALIDATORS) validator = cls(meta_schema, resolver=resolver) instance_validator = mvalidators.Draft4Validator(schema, resolver=resolver) scope = schema.get('id', '') validator.validate(schema, _schema=meta_schema)
def _create_validator(): global _validator if _validator is not None: return _validator validator = validators.create( meta_schema=load_schema( 'http://stsci.edu/schemas/yaml-schema/draft-01', mresolver.default_url_mapping), validators=YAML_VALIDATORS) validator.orig_iter_errors = validator.iter_errors # We can't validate anything that looks like an external # reference, since we don't have the actual content, so we # just have to defer it for now. If the user cares about # complete validation, they can call # `AsdfFile.resolve_references`. def iter_errors(self, instance, _schema=None, _seen=set()): if id(instance) in _seen: return if _schema is None: schema = self.schema else: schema = _schema if ((isinstance(instance, dict) and '$ref' in instance) or isinstance(instance, reference.Reference)): return if _schema is None: tag = tagged.get_tag(instance) if tag is not None: schema_path = self.ctx.tag_to_schema_resolver(tag) if schema_path != tag: s = load_schema(schema_path, self.ctx.url_mapping) if s: with self.resolver.in_scope(schema_path): for x in self.orig_iter_errors(instance, s): yield x if isinstance(instance, dict): new_seen = _seen | set([id(instance)]) for val in six.itervalues(instance): for x in self.iter_errors(val, _seen=new_seen): yield x elif isinstance(instance, list): new_seen = _seen | set([id(instance)]) for val in instance: for x in self.iter_errors(val, _seen=new_seen): yield x else: for x in self.orig_iter_errors(instance, _schema=schema): yield x validator.iter_errors = iter_errors _validator = validator return validator
def test_if_a_version_is_provided_it_is_registered(self): Validator = validators.create( meta_schema={u"$id": "something"}, version="my version", ) self.addCleanup(validators.meta_schemas.pop, "something") self.assertEqual(Validator.__name__, "MyVersionValidator")
def validate_homology_inputs(cromwell_inputs): with pkg_resources.path("validation", "homology.schema.json") as schema_file: with open(schema_file, 'r') as schema: schema = json.load(schema) all_validators = dict(Draft7Validator.VALIDATORS) reat_validator = validators.create(meta_schema=schema, validators=all_validators) reat_validator(schema).validate(cromwell_inputs)
def validate(data, schema): from jsonschema import RefResolver, validators, FormatChecker, Draft4Validator from urllib.parse import urljoin from snakemake.io import _load_configfile schemafile = schema schema = _load_configfile(schema, filetype="Schema") resolver = RefResolver( urljoin('file:', schemafile), schema, handlers={ 'file': lambda uri: _load_configfile(re.sub("^file://", "", uri)) }) format_checker = FormatChecker() def path_exists(validator, properties, instance, schema): if properties and not Path(instance).expanduser().exists(): yield jsonschema.exceptions.ValidationError( "{} does not exist".format(instance)) @format_checker.checks('file') def check_filepath(value): path = Path(value) return path.is_file() if path.exists() else True @format_checker.checks('directory') def check_directory(value): path = Path(value) return path.is_dir() if path.exists() else True all_validators = dict(Draft4Validator.VALIDATORS) all_validators['must_exist'] = path_exists Validator = validators.create(meta_schema=Draft4Validator.META_SCHEMA, validators=all_validators) validator = Validator(schema, resolver=resolver, format_checker=format_checker) errors = [] if not isinstance(data, dict): for row in data.to_dict('records'): print(row) for ve in validator.iter_errors(row): key = ve.relative_path.pop() if len( ve.relative_path) > 0 else None errors.append(MarsValidationError(ve.instance, key, ve.message)) else: for ve in validator.iter_errors(data): key = ve.relative_path.pop() if len(ve.relative_path) > 0 else None errors.append(MarsValidationError(ve.instance, key, ve.message)) return errors
def _create_validator(validators=YAML_VALIDATORS): meta_schema = load_schema(YAML_SCHEMA_METASCHEMA_ID, default_ext_resolver) base_cls = mvalidators.create(meta_schema=meta_schema, validators=validators) class ASDFValidator(base_cls): DEFAULT_TYPES = base_cls.DEFAULT_TYPES.copy() DEFAULT_TYPES['array'] = (list, tuple) def iter_errors(self, instance, _schema=None, _seen=set()): # We can't validate anything that looks like an external reference, # since we don't have the actual content, so we just have to defer # it for now. If the user cares about complete validation, they # can call `AsdfFile.resolve_references`. if id(instance) in _seen: return if _schema is None: schema = self.schema else: schema = _schema if ((isinstance(instance, dict) and '$ref' in instance) or isinstance(instance, reference.Reference)): return if _schema is None: tag = getattr(instance, '_tag', None) if tag is not None: schema_path = self.ctx.resolver(tag) if schema_path != tag: try: s = load_schema(schema_path, self.ctx.resolver) except FileNotFoundError: msg = "Unable to locate schema file for '{}': '{}'" warnings.warn(msg.format(tag, schema_path)) s = {} if s: with self.resolver.in_scope(schema_path): for x in super(ASDFValidator, self).iter_errors(instance, s): yield x if isinstance(instance, dict): new_seen = _seen | set([id(instance)]) for val in instance.values(): for x in self.iter_errors(val, _seen=new_seen): yield x elif isinstance(instance, list): new_seen = _seen | set([id(instance)]) for val in instance: for x in self.iter_errors(val, _seen=new_seen): yield x else: for x in super(ASDFValidator, self).iter_errors(instance, _schema=schema): yield x return ASDFValidator
def test_if_a_version_is_provided_it_is_registered(self): with mock.patch("jsonschema.validators.validates") as validates: validates.side_effect = lambda version: lambda cls: cls Validator = validators.create( meta_schema={u"id": ""}, version="my version", ) validates.assert_called_once_with("my version") self.assertEqual(Validator.__name__, "MyVersionValidator")
def _create_validator(validators=YAML_VALIDATORS): meta_schema = load_schema(YAML_SCHEMA_METASCHEMA_ID, mresolver.default_resolver) base_cls = mvalidators.create(meta_schema=meta_schema, validators=validators) class ASDFValidator(base_cls): DEFAULT_TYPES = base_cls.DEFAULT_TYPES.copy() DEFAULT_TYPES['array'] = (list, tuple) def iter_errors(self, instance, _schema=None, _seen=set()): # We can't validate anything that looks like an external reference, # since we don't have the actual content, so we just have to defer # it for now. If the user cares about complete validation, they # can call `AsdfFile.resolve_references`. if id(instance) in _seen: return if _schema is None: schema = self.schema else: schema = _schema if ((isinstance(instance, dict) and '$ref' in instance) or isinstance(instance, reference.Reference)): return if _schema is None: tag = getattr(instance, '_tag', None) if tag is not None: schema_path = self.ctx.resolver(tag) if schema_path != tag: s = load_schema(schema_path, self.ctx.resolver) if s: with self.resolver.in_scope(schema_path): for x in super(ASDFValidator, self).iter_errors(instance, s): yield x if isinstance(instance, dict): new_seen = _seen | set([id(instance)]) for val in instance.values(): for x in self.iter_errors(val, _seen=new_seen): yield x elif isinstance(instance, list): new_seen = _seen | set([id(instance)]) for val in instance: for x in self.iter_errors(val, _seen=new_seen): yield x else: for x in super(ASDFValidator, self).iter_errors(instance, _schema=schema): yield x return ASDFValidator
def test_providing_explicit_type_checker_does_not_warn(self): Validator = validators.create( meta_schema={}, validators={}, type_checker=TypeChecker(), ) self.assertFalse(self.flushWarnings()) Validator({}) self.assertFalse(self.flushWarnings())
def validate_transcriptome_inputs(cromwell_inputs): with pkg_resources.path("validation", "transcriptome.schema.json") as schema_file: with open(schema_file, 'r') as schema: reat_schema = json.load(schema) all_validators = dict(Draft7Validator.VALIDATORS) all_validators["is_name"] = is_valid_name reat_validator = validators.create(meta_schema=reat_schema, validators=all_validators) reat_validator(reat_schema).validate(cromwell_inputs)
def setUp(self): self.meta_schema = {u"properties": {u"smelly": {}}} self.smelly = mock.MagicMock() self.validators = {u"smelly": self.smelly} self.types = {u"dict": dict} self.Validator = create(meta_schema=self.meta_schema, validators=self.validators, default_types=self.types) self.validator_value = 12 self.schema = {u"smelly": self.validator_value} self.validator = self.Validator(self.schema)
def test_custom_validator(self): Validator = validators.create( meta_schema={"id": "meta schema id"}, version="12", ) schema = {"$schema": "meta schema id"} self.assertIs( validators.validator_for(schema), Validator, )
def test_custom_validator_draft6(self): Validator = validators.create( meta_schema={"$id": "meta schema $id"}, version="13", ) schema = {"$schema": "meta schema $id"} self.assertIs( validators.validator_for(schema), Validator, )
def test_default_types_update_type_checker(self): Validator = validators.create(meta_schema=self.meta_schema, validators=self.validators, default_types={u"array": list}) self.assertEqual(set(Validator.DEFAULT_TYPES), {u"array"}) Extended = validators.extend( Validator, type_checker=Validator.TYPE_CHECKER.remove(u"array")) self.assertEqual(set(Extended.DEFAULT_TYPES), {})
def test_custom_validator(self): Validator = validators.create( meta_schema={"id": "meta schema id"}, version="12", id_of=lambda s: s.get("id", ""), ) schema = {"$schema": "meta schema id"} self.assertIs( validators.validator_for(schema), Validator, )
def validator(self): def is_unique(validator, value, instance, schema): if is_exists(self.store, key=value, value=instance): yield ValidationError(f"`{value}` `{instance}` already exists") all_validators = dict(Draft4Validator.VALIDATORS) all_validators["is_unique"] = is_unique MyValidator = validators.create( meta_schema=Draft4Validator.META_SCHEMA, validators=all_validators) return MyValidator(self.schema)
def _create_validator(_validators=YAML_VALIDATORS): validator = validators.create(meta_schema=load_schema( 'http://stsci.edu/schemas/yaml-schema/draft-01', mresolver.default_url_mapping), validators=_validators) validator.orig_iter_errors = validator.iter_errors # We can't validate anything that looks like an external # reference, since we don't have the actual content, so we # just have to defer it for now. If the user cares about # complete validation, they can call # `AsdfFile.resolve_references`. def iter_errors(self, instance, _schema=None, _seen=set()): if id(instance) in _seen: return if _schema is None: schema = self.schema else: schema = _schema if ((isinstance(instance, dict) and '$ref' in instance) or isinstance(instance, reference.Reference)): return if _schema is None: tag = tagged.get_tag(instance) if tag is not None: schema_path = self.ctx.tag_to_schema_resolver(tag) if schema_path != tag: s = load_schema(schema_path, self.ctx.url_mapping) if s: with self.resolver.in_scope(schema_path): for x in self.orig_iter_errors(instance, s): yield x if isinstance(instance, dict): new_seen = _seen | set([id(instance)]) for val in six.itervalues(instance): for x in self.iter_errors(val, _seen=new_seen): yield x elif isinstance(instance, list): new_seen = _seen | set([id(instance)]) for val in instance: for x in self.iter_errors(val, _seen=new_seen): yield x else: for x in self.orig_iter_errors(instance, _schema=schema): yield x validator.iter_errors = iter_errors return validator
def test_default_types_used_if_no_type_checker_given(self): Validator = validators.create( meta_schema=self.meta_schema, validators=self.validators, ) expected_types = { u"array", u"boolean", u"integer", u"null", u"number", u"object", u"string" } self.assertEqual(set(Validator.DEFAULT_TYPES), expected_types)
def setUp(self): self.meta_schema = {u"properties": {u"smelly": {}}} self.smelly = mock.MagicMock() self.validators = {u"smelly": self.smelly} self.type_checker = TypeChecker() self.Validator = validators.create(meta_schema=self.meta_schema, validators=self.validators, type_checker=self.type_checker) self.validator_value = 12 self.schema = {u"smelly": self.validator_value} self.validator = self.Validator(self.schema)
def validator(self): def unique(validator, value, instance, schema): if store.contains(self.__storage_name, key=value, value=instance): yield ValidationError( f"`{value}` `{instance}` already exists.") all_validators = dict(Draft4Validator.VALIDATORS) all_validators["unique"] = unique MyValidator = validators.create( meta_schema=Draft4Validator.META_SCHEMA, validators=all_validators) return MyValidator(self.validation_schema)
def setUp(self): self.addCleanup( self.assertEqual, validators.meta_schemas, dict(validators.meta_schemas), ) self.meta_schema = {u"$id": "some://meta/schema"} self.validators = {u"startswith": startswith} self.type_checker = TypeChecker() self.Validator = validators.create(meta_schema=self.meta_schema, validators=self.validators, type_checker=self.type_checker)
def test_default_types_used_if_no_type_checker_given(self): Validator = validators.create( meta_schema=self.meta_schema, validators=self.validators, ) expected_types = {u"array", u"boolean", u"integer", u"null", u"number", u"object", u"string"} self.assertEqual(set(Validator.DEFAULT_TYPES), expected_types) self.assertEqual(set(Validator.TYPE_CHECKER._type_checkers), expected_types)
def _create_validator(validators=YAML_VALIDATORS): meta_schema = load_schema(YAML_SCHEMA_METASCHEMA_ID, mresolver.default_url_mapping) base_cls = mvalidators.create(meta_schema=meta_schema, validators=validators) class ASDFValidator(base_cls): DEFAULT_TYPES = base_cls.DEFAULT_TYPES.copy() DEFAULT_TYPES["array"] = (list, tuple) def iter_errors(self, instance, _schema=None, _seen=set()): # We can't validate anything that looks like an external reference, # since we don't have the actual content, so we just have to defer # it for now. If the user cares about complete validation, they # can call `AsdfFile.resolve_references`. if id(instance) in _seen: return if _schema is None: schema = self.schema else: schema = _schema if (isinstance(instance, dict) and "$ref" in instance) or isinstance(instance, reference.Reference): return if _schema is None: tag = getattr(instance, "_tag", None) if tag is not None: schema_path = self.ctx.tag_to_schema_resolver(tag) if schema_path != tag: s = load_schema(schema_path, self.ctx.url_mapping) if s: with self.resolver.in_scope(schema_path): for x in super(ASDFValidator, self).iter_errors(instance, s): yield x if isinstance(instance, dict): new_seen = _seen | set([id(instance)]) for val in six.itervalues(instance): for x in self.iter_errors(val, _seen=new_seen): yield x elif isinstance(instance, list): new_seen = _seen | set([id(instance)]) for val in instance: for x in self.iter_errors(val, _seen=new_seen): yield x else: for x in super(ASDFValidator, self).iter_errors(instance, _schema=schema): yield x return ASDFValidator
def validate_cloudconfig_schema(config, schema, strict=False): """Validate provided config meets the schema definition. @param config: Dict of cloud configuration settings validated against schema. @param schema: jsonschema dict describing the supported schema definition for the cloud config module (config.cc_*). @param strict: Boolean, when True raise SchemaValidationErrors instead of logging warnings. @raises: SchemaValidationError when provided config does not validate against the provided schema. """ try: from jsonschema import Draft4Validator, FormatChecker from jsonschema.validators import create, extend except ImportError: logging.debug( 'Ignoring schema validation. python-jsonschema is not present') return # Allow for bytes to be presented as an acceptable valid value for string # type jsonschema attributes in cloud-init's schema. # This allows #cloud-config to provide valid yaml "content: !!binary | ..." if hasattr(Draft4Validator, 'TYPE_CHECKER'): # jsonschema 3.0+ type_checker = Draft4Validator.TYPE_CHECKER.redefine( 'string', is_schema_byte_string) cloudinitValidator = extend(Draft4Validator, type_checker=type_checker) else: # jsonschema 2.6 workaround types = Draft4Validator.DEFAULT_TYPES # Allow bytes as well as string (and disable a spurious # unsupported-assignment-operation pylint warning which appears because # this code path isn't written against the latest jsonschema). types['string'] = (str, bytes) # pylint: disable=E1137 cloudinitValidator = create( meta_schema=Draft4Validator.META_SCHEMA, validators=Draft4Validator.VALIDATORS, version="draft4", default_types=types) validator = cloudinitValidator(schema, format_checker=FormatChecker()) errors = () for error in sorted(validator.iter_errors(config), key=lambda e: e.path): path = '.'.join([str(p) for p in error.path]) errors += ((path, error.message),) if errors: if strict: raise SchemaValidationError(errors) else: messages = ['{0}: {1}'.format(k, msg) for k, msg in errors] logging.warning('Invalid config:\n%s', '\n'.join(messages))
def __init__(self, document, scheme): self.document = document self.scheme = scheme title = scheme.get('title') or 'Scheme' self.title = 'Invalid ' + title all_validators = dict(Draft4Validator.VALIDATORS) MyValidator = validators.create( meta_schema=Draft4Validator.META_SCHEMA, validators=all_validators) format_checker = FormatChecker() self.validator = MyValidator(self.scheme, format_checker=format_checker)
def test_default_types_update_type_checker(self): Validator = validators.create( meta_schema=self.meta_schema, validators=self.validators, default_types={u"array": list} ) self.assertEqual(set(Validator.DEFAULT_TYPES), {u"array"}) Extended = validators.extend( Validator, type_checker=Validator.TYPE_CHECKER.remove(u"array") ) self.assertEqual(set(Extended.DEFAULT_TYPES), {})
def setUp(self): self.meta_schema = {u"properties": {u"smelly": {}}} self.smelly = mock.MagicMock() self.validators = {u"smelly": self.smelly} self.type_checker = TypeChecker() self.Validator = validators.create( meta_schema=self.meta_schema, validators=self.validators, type_checker=self.type_checker ) self.validator_value = 12 self.schema = {u"smelly": self.validator_value} self.validator = self.Validator(self.schema)
def setUp(self): self.meta_schema = {u"properties": {u"smelly": {}}} self.smelly = mock.MagicMock() self.validators = {u"smelly": self.smelly} self.types = {u"dict": dict} self.Validator = create( meta_schema=self.meta_schema, validators=self.validators, default_types=self.types, ) self.validator_value = 12 self.schema = {u"smelly": self.validator_value} self.validator = self.Validator(self.schema)
def test_accessing_default_types_warns(self): Validator = validators.create(meta_schema={}, validators={}) self.assertFalse(self.flushWarnings()) self.assertWarns( DeprecationWarning, ("The DEFAULT_TYPES attribute is deprecated. " "See the type checker attached to this validator instead."), # https://tm.tl/9363 :'( sys.modules[self.assertWarns.__module__].__file__, getattr, Validator, "DEFAULT_TYPES", )
def get_validator(schema, default=False): """return a validator supporting "vocabulary" rule, return a default validator if default is True. """ if default: return Draft7Validator(schema) all_validators = dict(Draft7Validator.VALIDATORS) all_validators['vocabulary'] = is_controlled_vocabulary VocabularyValidator = validators.create( meta_schema=Draft7Validator.META_SCHEMA, validators=all_validators) vocab_validator = VocabularyValidator(schema) return vocab_validator
def __init__(self, config: TransformConfig): """ Initialize TypeTransformer instance. :param config Transform config that would be applied to object """ if TransformConfig.NoTransform in config and config != TransformConfig.NoTransform: raise Exception("NoTransform option cannot be combined with other flags.") self._config = config all_validators = { key: self.__get_normalizer(key, orig_validator) for key, orig_validator in Draft7Validator.VALIDATORS.items() # Do not validate field we do not transform for maximum performance. if key in ["type", "array", "$ref", "properties", "items"] } self._normalizer = validators.create(meta_schema=Draft7Validator.META_SCHEMA, validators=all_validators)
def test_providing_types_to_init_warns(self): Validator = validators.create(meta_schema={}, validators={}) self.assertFalse(self.flushWarnings()) self.assertWarns( category=DeprecationWarning, message=("The types argument is deprecated. " "Provide a type_checker to jsonschema.validators.extend " "instead."), # https://tm.tl/9363 :'( filename=sys.modules[self.assertWarns.__module__].__file__, f=Validator, schema={}, types={"bar": object}, )
def extend(validator, validators, version=None): all_validators = dict(validator.VALIDATORS) all_validators.update(validators) all_types = dict(validator.DEFAULT_TYPES) all_types.update({ "fd": FileDescriptor, "binary": bytes, "datetime": datetime.datetime }) return create( meta_schema=validator.META_SCHEMA, validators=all_validators, version=version, default_types=all_types, )
def test_accessing_default_types_warns(self): Validator = validators.create(meta_schema={}, validators={}) self.assertFalse(self.flushWarnings()) self.assertWarns( DeprecationWarning, ( "The DEFAULT_TYPES attribute is deprecated. " "See the type checker attached to this validator instead." ), # https://tm.tl/9363 :'( sys.modules[self.assertWarns.__module__].__file__, getattr, Validator, "DEFAULT_TYPES", )
def test_providing_types_to_init_warns(self): Validator = validators.create(meta_schema={}, validators={}) self.assertFalse(self.flushWarnings()) self.assertWarns( category=DeprecationWarning, message=( "The types argument is deprecated. " "Provide a type_checker to jsonschema.validators.extend " "instead." ), # https://tm.tl/9363 :'( filename=sys.modules[self.assertWarns.__module__].__file__, f=Validator, schema={}, types={"bar": object}, )
def test_types_update_type_checker(self): tc = TypeChecker() tc = tc.redefine(u"integer", _types.is_integer) Validator = validators.create( meta_schema=self.meta_schema, validators=self.validators, type_checker=tc, ) v = Validator({}) self.assertEqual( v.TYPE_CHECKER, TypeChecker(type_checkers={u"integer": _types.is_integer}), ) v = Validator({}, types={u"array": list}) self.assertEqual( v.TYPE_CHECKER, TypeChecker(type_checkers={u"array": _types.is_array}), )
def check_schema(schema): """ Check a given schema to make sure it is valid YAML schema. """ # We also want to validate the "default" values in the schema # against the schema itself. jsonschema as a library doesn't do # this on its own. def validate_default(validator, default, instance, schema): if not validator.is_type(instance, 'object'): return if 'default' in instance: with instance_validator.resolver.in_scope(scope): for err in instance_validator.iter_errors( instance['default'], instance): yield err VALIDATORS = util.HashableDict( validators.Draft4Validator.VALIDATORS.copy()) VALIDATORS.update({ 'default': validate_default }) meta_schema = load_schema( 'http://stsci.edu/schemas/yaml-schema/draft-01', mresolver.default_url_mapping) resolver = _make_resolver(mresolver.default_url_mapping) cls = validators.create(meta_schema=meta_schema, validators=VALIDATORS) validator = cls(meta_schema, resolver=resolver) instance_validator = validators.Draft4Validator(schema, resolver=resolver) scope = schema.get('id', '') validator.validate(schema, _schema=meta_schema)
def check_schema(schema): """ Check a given schema to make sure it is valid YAML schema. """ # We also want to validate the "default" values in the schema # against the schema itself. jsonschema as a library doesn't do # this on its own. def validate_default(validator, default, instance, schema): if not validator.is_type(instance, 'object'): return if 'default' in instance: with instance_validator.resolver.in_scope(scope): for err in instance_validator.iter_errors( instance['default'], instance): yield err VALIDATORS = util.HashableDict( mvalidators.Draft4Validator.VALIDATORS.copy()) VALIDATORS.update({ 'default': validate_default }) meta_schema_id = schema.get('$schema', YAML_SCHEMA_METASCHEMA_ID) meta_schema = load_schema(meta_schema_id, default_ext_resolver) resolver = _make_resolver(default_ext_resolver) cls = mvalidators.create(meta_schema=meta_schema, validators=VALIDATORS) validator = cls(meta_schema, resolver=resolver) instance_validator = mvalidators.Draft4Validator(schema, resolver=resolver) scope = schema.get('id', '') validator.validate(schema, _schema=meta_schema)
def test_if_a_version_is_not_provided_it_is_not_registered(self): with mock.patch("jsonschema.validators.validates") as validates: create(meta_schema={"id" : "id"}) self.assertFalse(validates.called)
class DummyView(requesthandlers.ViewHandler): """Dummy ViewHandler for coverage""" def delete(self): # Reference db_conn to test for AttributeError self.db_conn meta_schema = Draft4Validator.META_SCHEMA.copy() meta_schema['definitions']["simpleTypes"]['enum'].append('int') default_types = Draft4Validator.DEFAULT_TYPES.copy() default_types['int'] = int ExtendedDraft4Validator = create(meta_schema, Draft4Validator.VALIDATORS, default_types=default_types) class PeopleHandler(requesthandlers.APIHandler): """Example handler with input schema validation that uses custom Validator. """ @schema.validate( input_schema={ "type": "object", "properties": { "name": {'type': "string"}, "age": {'type': "int"}, }, 'required': ['name', 'age'], },
def test_providing_neither_does_not_warn(self): Validator = validators.create(meta_schema={}, validators={}) self.assertFalse(self.flushWarnings()) Validator({}) self.assertFalse(self.flushWarnings())
def test_extending_a_legacy_validator_does_not_rewarn(self): Validator = validators.create(meta_schema={}, default_types={}) self.assertTrue(self.flushWarnings()) validators.extend(Validator) self.assertFalse(self.flushWarnings())
CustomValidator = create( meta_schema=get_draft_schema(version='custom', additional_properties=True), validators={ u"$ref": _validators.ref, u"additionalItems": _validators.additionalItems, u"additionalProperties": _validators.additionalProperties, u"allOf": _validators.allOf_draft4, u"anyOf": _validators.anyOf_draft4, u"dependencies": _validators.dependencies, u"enum": _validators.enum, u"format": _validators.format, u"items": _validators.items, u"maxItems": _validators.maxItems, u"maxLength": _validators.maxLength, u"maxProperties": _validators.maxProperties_draft4, u"maximum": _validators.maximum, u"minItems": _validators.minItems, u"minLength": _validators.minLength, u"minProperties": _validators.minProperties_draft4, u"minimum": _validators.minimum, u"multipleOf": _validators.multipleOf, u"not": _validators.not_draft4, u"oneOf": _validators.oneOf_draft4, u"pattern": _validators.pattern, u"patternProperties": _validators.patternProperties, u"properties": _validators.properties_draft3, u"type": _validators.type_draft4, u"uniqueItems": _validators.uniqueItems, }, version="custom_validator", )
), ) with warnings.catch_warnings(): warnings.simplefilter("ignore", DeprecationWarning) TestDraft3LegacyTypeCheck = DRAFT3.to_unittest_testcase( # Interestingly the any part couldn't really be done w/the old API. ( (test for test in each if test.schema != {"type": "any"}) for each in DRAFT3.tests_of(name="type") ), name="TestDraft3LegacyTypeCheck", Validator=create( meta_schema=Draft3Validator.META_SCHEMA, validators=Draft3Validator.VALIDATORS, default_types=_DEPRECATED_DEFAULT_TYPES, ), ) TestDraft4LegacyTypeCheck = DRAFT4.to_unittest_testcase( DRAFT4.tests_of(name="type"), name="TestDraft4LegacyTypeCheck", Validator=create( meta_schema=Draft4Validator.META_SCHEMA, validators=Draft4Validator.VALIDATORS, default_types=_DEPRECATED_DEFAULT_TYPES, ), )