def custom_card_check(args, card, pack_code, factions_data, types_data): "Performs more in-depth sanity checks than jsonschema validator is capable of. Assumes that the basic schema validation has already completed successfully." if card["pack_code"] != pack_code: raise jsonschema.ValidationError("Pack code '%s' of the card '%s' doesn't match the pack code '%s' of the file it appears in." % (card["pack_code"], card["code"], pack_code)) if card.get("faction_code") and card["faction_code"] not in [f["code"] for f in factions_data]: raise jsonschema.ValidationError("Faction code '%s' of the pack '%s' doesn't match any valid faction code." % (card["faction_code"], card["code"])) if card.get("type_code") and card["type_code"] not in [f["code"] for f in types_data]: raise jsonschema.ValidationError("Faction code '%s' of the pack '%s' doesn't match any valid type code." % (card["type_code"], card["code"]))
def compareProperties(validator: jsonschema.Draft7Validator, compare: Dict, instance: Any, schema: Dict) -> Iterator[jsonschema.ValidationError]: """ compareProperties allows a schema to compare values in the instance against each other. Amazingly, json-schema does not have a built-in way to do this. Example: ensuring that hyperparmeter minval is less than maxval: "compareProperties": { "type": "a<b", "a": "minval", "b": "maxval" } """ if not validator.is_type(instance, "object"): return def get_by_path(path: str) -> Any: obj = instance for key in path.split("."): if not obj: return None obj = obj.get(key) return obj a_path = compare["a"] a = get_by_path(a_path) b_path = compare["b"] b = get_by_path(b_path) if a is None or b is None: return typ = compare["type"] if typ == "a<b": if a >= b: yield jsonschema.ValidationError( f"{a_path} must be less than {b_path}") return if typ == "a_is_subdir_of_b": a_norm = os.path.normpath(a) b_norm = os.path.normpath(b) if os.path.isabs(a_norm): if not a_norm.startswith(b_norm): yield jsonschema.ValidationError( f"{a_path} must be a subdirectory of {b_path}") else: if a_norm.startswith(".."): yield jsonschema.ValidationError( f"{a_path} must be a subdirectory of {b_path}") return raise ValueError(f"unrecognized comparison {compare[typ]}")
def custom_card_check(args, card, pack_code, locale=None): "Performs more in-depth sanity checks than jsonschema validator is capable of. Assumes that the basic schema validation has already completed successfully." if locale: pass #no checks by the moment else: if card["pack_code"] != pack_code: raise jsonschema.ValidationError("Pack code '%s' of the card '%s' doesn't match the pack code '%s' of the file it appears in." % (card["pack_code"], card["code"], pack_code)) if card["code"] in unique_card_codes: raise jsonschema.ValidationError("Card code '%s' of the card '%s' has been used by '%s'." % (card["code"], card["name"], unique_card_codes[card["code"]]["name"]))
def validate_scid(scid): """Validates an SCID. :raises :exc:`jsonschema.ValidationError`: If SCID is invalid. """ schema, revision = scid if not isinstance(revision, integer_types) or revision < 1: raise jsonschema.ValidationError('Invalid revision ID: %s' % revision) if not isinstance(schema, string_types) or not SCHEMA_RE.match(schema): raise jsonschema.ValidationError('Invalid schema name: %s' % schema)
def test_consistent_error_messages_required(self): error = jsonschema.ValidationError( "%r is a required property" % u'name', validator=u'required') msg = validator._get_consistent_error_message(error) self.assertEqual(msg, "'name' is a required property") error = jsonschema.ValidationError( "u'name' is a required property", validator=u'required') msg = validator._get_consistent_error_message(error) self.assertEqual(msg, "'name' is a required property")
def validate_schema(instance): """ Validates the pytest-workflow schema :param instance: an object that is validated against the schema :return: This function rasises a ValidationError when the schema is not correct. """ jsonschema.validate(instance, JSON_SCHEMA) # Some extra tests here below that can not be captured in jsonschema # Test if there are name collisions when whitespace is removed. This will # cause errors in pytest (collectors not having unique names) so has to # be avoided. test_names = [replace_whitespace(test['name'], ' ') for test in instance] if len(test_names) != len(set(test_names)): raise jsonschema.ValidationError( f"Some names were not unique when whitespace was removed. " f"Defined names: {test_names}") def test_contains_concordance(dictionary: dict, name: str): """ Test whether contains and must not contain have the same members. :param dictionary: part of the schema dictionary. :param name: The name of the object the dictionary originates from. This makes the error easier to comprehend for the user. :return: An error if the test fails. """ contains = dictionary.get("contains", []) must_not_contain = dictionary.get("must_not_contain", []) if len(contains) > 0 and len(must_not_contain) > 0: common_members = set(contains).intersection(set(must_not_contain)) if common_members != set(): raise jsonschema.ValidationError( f"contains and must_not_contain are not allowed to have " f"the same members for the same object. " f"Object: {name}. Common members: {common_members}" ) for test in instance: test_contains_concordance(test.get('stdout', {}), test['name'] + "/stdout") test_contains_concordance(test.get('stderr', {}), test['name'] + "/stderr") for test_file in test.get("files", []): keys = test_file.keys() test_contains_concordance(test_file, test_file['path']) file_should_exist = test_file.get("should_exist", DEFAULT_FILE_SHOULD_EXIST) if not file_should_exist: for check in ["md5sum", "contains", "must_not_contain"]: if check in keys: raise jsonschema.ValidationError( f"Content checking not allowed on non existing " f"file: {test_file['path']}. Key = {check}")
def typeSize(validator, typeSize, instance, schema): if (isinstance(instance[0], tagged_list)): if typeSize != instance[0].type_size: yield jsonschema.ValidationError("size is %r, expected %r" % (instance[0].type_size, typeSize)) elif isinstance(instance[0], list) and isinstance(instance[0][0], int) and \ typeSize == 32: # 32-bit sizes aren't explicitly tagged return else: yield jsonschema.ValidationError("missing size tag in %r" % instance)
def linkFromValidator(self, validator, linkFrom, instance, schema): if schema["linkFrom"] == "QualityMetric.quality_metric_of": if "quality_metric_of" not in instance: yield jsonschema.ValidationError( "required tag quality_metric_of missing") object_id = instance["quality_metric_of"] obj = self.get_json(object_id) if obj is None: yield jsonschema.ValidationError( "quality_metric_of {} was not found".format(object_id))
def custom_pack_check(args, pack, cycles_data, locale=None, en_packs=None): if locale: if pack["code"] not in [p["code"] for p in en_packs]: raise jsonschema.ValidationError( "Pack code '%s' in translation file for '%s' locale does not exists in original locale." % (pack["code"], locale)) else: if pack["cycle_code"] not in [c["code"] for c in cycles_data]: raise jsonschema.ValidationError( "Cycle code '%s' of the pack '%s' doesn't match any valid cycle code." % (pack["cycle_code"], pack["code"]))
def validate_oneOf(self, schemas, instance, schema): match = False for s in schemas: if self.is_valid(instance, s): if match: yield jsonschema.ValidationError( "%r matches more than one schema in %r" % (instance, schemas)) match = True if not match: yield jsonschema.ValidationError( "%r is not valid for any of listed schemas %r" % (instance, schemas))
def linkToValidator(self, validator, linkTo, instance, schema): if not validator.is_type(instance, "string"): return # hack for ['Dataset'] if isinstance(linkTo, list): # In release 57 one linkTo property had a single item as a list # the dcc is trying to get rid of it and convert it to a # string, but on the off chance they change and start using # lists again, lets flag this implementation because it'd be # inadequate. linkTo = linkTo[0] try: try: UUID(instance) object_id = instance except ValueError: # a hack to detect if we have an alias? if ":" in instance: object_id = instance else: collection = TYPE_TO_COLLECTION.get(linkTo) object_id = urljoin(collection, instance) item = self.get_json(object_id) except HTTPError as e: yield jsonschema.ValidationError("%s doesn't exist: %s" % (object_id, str(e))) linkEnum = schema.get("linkEnum") if linkEnum is not None: if not validator.is_type(linkEnum, "array"): raise Exception("Bad schema") if not any(enum_uuid == item["uuid"] for enum_uuid in linkEnum): reprs = ", ".join(repr(it) for it in linkTo) error = "%r is not one of %s" % (instance, reprs) yield jsonschema.ValidationError(error) return if schema.get("linkSubmitsFor"): if self._server.user is not None: submits_for = [ self.get_json(s) for s in self._server.user.get("submits_for") ] if submits_for is not None and not any( lab["uuid"] == item["uuid"] for lab in submits_for): error = "%r is not in user submits_for" % instance yield jsonschema.ValidationError(error) return
def custom_card_check(args, card, pack_code, factions_data, types_data, sides_data): "Performs more in-depth sanity checks than jsonschema validator is capable of. Assumes that the basic schema validation has already completed successfully." if card["pack_code"] != pack_code: raise jsonschema.ValidationError("Pack code '%s' of the card '%s' doesn't match the pack code '%s' of the file it appears in." % (card["pack_code"], card["code"], pack_code)) allowed_factions = [f["code"] for f in factions_data if f["side_code"] == card["side_code"]] if card["faction_code"] not in allowed_factions: raise jsonschema.ValidationError("Faction code '%s' of the card '%s' doesn't match any valid faction code for side '%s'." % (card["faction_code"], card["code"], card["side_code"])) if card["type_code"] not in [f["code"] for f in types_data]: raise jsonschema.ValidationError("Type code '%s' of the card '%s' doesn't match any valid type code." % (card["type_code"], card["code"])) if card["side_code"] not in [f["code"] for f in sides_data]: raise jsonschema.ValidationError("Side code '%s' of the card '%s' doesn't match any valid side code." % (card["side_code"], card["code"])) allowed_types = [f["code"] for f in types_data if f["side_code"] == card["side_code"] or f["side_code"] is None] if card["type_code"] not in allowed_types: raise jsonschema.ValidationError("Type code '%s' of the card '%s' doesn't match any valid types for side '%s'." % (card["type_code"], card["code"], card["side_code"]))
def __superficial_check(cls, fd): """Check if the cis and links field are a list. If not, raise a jsonschema.ValidationError. It move the cursor of the fd to back 0.""" # cis and links store if a field cis and links are found in the import # *_start store if a the beginning of a json array are found in the cis # and links fields of an import # *_end store if a the end of a json array are found in the cis and # links fields of an import cis = False cis_start = False cis_end = False links = False links_start = False links_end = False parser = ijson.parse(fd) for prefix, event, _ in parser: if prefix == "cis": cis = True if event == "end_array": cis_end = True if event == "start_array": cis_start = True if prefix == "links": links = True if event == "end_array": links_end = True if event == "start_array": links_start = True fd.seek(0) cis_status = (cis, cis_start, cis_end) links_status = (links, links_start, links_end) # ok is a filter to ascertain if a cis/link field of an import is # correct. ok = [(True, True, True), (False, False, False)] if cis_status in ok and links_status in ok: return True elif cis_status not in ok and links_status not in ok: raise jsonschema.ValidationError( "CIS and LINKS should be an array.") elif cis_status not in ok: raise jsonschema.ValidationError("CIS should be an array.") elif links_status not in ok: raise jsonschema.ValidationError("LINKS should be an array.")
def integer_and_range_validator(field_name, field_value, min_val=None, max_val=None): if not isinstance(field_value, str_types) and not isinstance(field_value, int) and field_value is not None: return False # empty input is fine if field_value is None or field_value == '': return True try: x = int(field_value) except Exception: raise jsonschema.ValidationError(message="Invalid input for %s. Not an integer." % field_name) if min_val is not None and x < min_val: raise jsonschema.ValidationError(message="%s field value should be an integer >= %s" % (field_name, min_val)) if max_val is not None and x > max_val: raise jsonschema.ValidationError(message="%s field value should be an integer <= %s" % (field_name, max_val)) return True
def wrapped_f(*args, **kwargs): try: callargs = inspect.getcallargs(f, *args, **kwargs) except TypeError: raise jsonschema.ValidationError( "Invalid number of arguments.") # hack to handle methods if "self" in callargs: del callargs["self"] # We might want to remove things like 'details' for authenticated # WAMPv2 calls for arg in drop_args: if arg in callargs: del callargs[arg] # json only accepts lists as arrays, not tuples for key in callargs: if type(callargs[key]) == tuple: callargs[key] = list(callargs[key]) # validate f.validator.validate(callargs) return f(*args, **kwargs)
def qs_check_and_return(self, instance: MultiValueDict): try: res = {} for key, value in self.properties.items(): try: required = True if isinstance(value, Optional): required = False value = value.schema if not required and key not in instance: continue if isinstance(value, Array): res[key] = value.qs_check_and_return( instance.getlist(key)) else: res[key] = value.qs_check_and_return(instance.get(key)) except ConvertError as err: raise jsonschema.ValidationError(message=err.message, path=[key] + err.path) return res except jsonschema.ValidationError as err: raise DataError([err])
def validate(doc, version='current'): jsonschema.validate(doc, schema_for_version(version)) if not doc["weburl"].startswith('http'): m = 'Expecting "weburl" field to start with http found "{}"'.format( doc['weburl']) raise jsonschema.ValidationError(m) return True
def __init__(self, name: str, config: dict): """A hyperparameter to optimize. >>> parameter = HyperParameter('int_unif_distributed', {'min': 1, 'max': 10}) >>> assert parameter.config['min'] == 1 >>> parameter = HyperParameter('normally_distributed', {'distribution': 'normal'}) >>> assert np.isclose(parameter.config['mu'], 0) Args: name: The name of the hyperparameter. config: Hyperparameter config dict. """ self.name = name result = fill_parameter(config) if result is None: raise jsonschema.ValidationError( f"invalid hyperparameter configuration: {name}" ) self.type, self.config = result if self.config is None or self.type is None: raise ValueError( "list of allowed schemas has length zero; please provide some valid schemas" ) self.value = ( None if self.type != HyperParameter.CONSTANT else self.config["value"] )
def custom_check(self, thing, thing_data): if thing_data.has_key( "code") and not self.parent.collections[thing].has_key( thing_data["code"]): raise jsonschema.ValidationError( "- %s code '%s' does not exist in '%s' %s translations" % (thing, thing_data["code"], self.locale, thing))
def _check_value(value, schema): """ Perform the actual validation. """ if value is None: if schema.get('fits_required'): name = schema.get("fits_keyword") or schema.get("fits_hdu") raise jsonschema.ValidationError("%s is a required value" % name) else: validator_context = AsdfFile() validator_resolver = validator_context.resolver temp_schema = { '$schema': 'http://stsci.edu/schemas/asdf-schema/0.1.0/asdf-schema'} temp_schema.update(schema) validator = asdf_schema.get_validator(temp_schema, validator_context, validator_callbacks, validator_resolver) value = yamlutil.custom_tree_to_tagged_tree(value, validator_context) validator.validate(value, _schema=temp_schema) validator_context.close()
def _validate_config_values_against_schema(self): try: config_schema_db = ConfigSchema.get_by_pack(value=self.pack) except StackStormDBObjectNotFoundError: # Config schema is optional return # Note: We are doing optional validation so for now, we do allow additional properties instance = self.values or {} schema = config_schema_db.attributes schema = util_schema.get_schema_for_resource_parameters( parameters_schema=schema, allow_additional_properties=True) try: cleaned = util_schema.validate(instance=instance, schema=schema, cls=util_schema.CustomValidator, use_default=True, allow_default_none=True) except jsonschema.ValidationError as e: attribute = getattr(e, 'path', []) attribute = '.'.join(attribute) configs_path = os.path.join(cfg.CONF.system.base_path, 'configs/') config_path = os.path.join(configs_path, '%s.yaml' % (self.pack)) msg = ( 'Failed validating attribute "%s" in config for pack "%s" (%s): %s' % (attribute, self.pack, config_path, str(e))) raise jsonschema.ValidationError(msg) return cleaned
def type_draft3(validator, types, instance, schema): types = ensure_list(types) # NOTE(kiall): A datetime object is not a string, but is still valid. if ('format' in schema and schema['format'] == 'date-time' and isinstance(instance, datetime.datetime)): return all_errors = [] for index, type in enumerate(types): if type == "any": return if validator.is_type(type, "object"): errors = list(validator.descend(instance, type, schema_path=index)) if not errors: return all_errors.extend(errors) else: if validator.is_type(instance, type): return else: yield jsonschema.ValidationError( types_msg(instance, types), context=all_errors, )
def validate_config_against_schema(config_schema, config_object, config_path, pack_name=None): """ Validate provided config dictionary against the provided config schema dictionary. """ pack_name = pack_name or 'unknown' schema = util_schema.get_schema_for_resource_parameters( parameters_schema=config_schema, allow_additional_properties=True) instance = config_object try: cleaned = util_schema.validate(instance=instance, schema=schema, cls=util_schema.CustomValidator, use_default=True, allow_default_none=True) except jsonschema.ValidationError as e: attribute = getattr(e, 'path', []) attribute = '.'.join(attribute) msg = ( 'Failed validating attribute "%s" in config for pack "%s" (%s): %s' % (attribute, pack_name, config_path, str(e))) raise jsonschema.ValidationError(msg) return cleaned
def validate(loaded_json): try: jsonschema.validate(loaded_json, schema=ROOT_NNCF_CONFIG_SCHEMA) except jsonschema.ValidationError as e: logger.error("Invalid NNCF config supplied!") # The default exception's __str__ result will contain the entire schema, # which is too large to be readable. msg = e.message + ". See documentation or {} for an NNCF configuration file JSON schema definition".format( nncf.config_schema.__file__) raise jsonschema.ValidationError(msg) compression_section = loaded_json.get("compression") if compression_section is None: # No compression specified return try: if isinstance(compression_section, dict): validate_single_compression_algo_schema(compression_section) else: # Passed a list of dicts for compression_algo_dict in compression_section: validate_single_compression_algo_schema(compression_algo_dict) except jsonschema.ValidationError: # No need to trim the exception output here since only the compression algo # specific sub-schema will be shown, which is much shorter than the global schema logger.error("Invalid NNCF config supplied!") raise
def wrapped_validate(cls, validator, value, instance, schema): r"""Wrapped validator that handles errors produced by the native validate method and ensures that the property is parsed by the base validator and raises the correct error if necessary. Args: *args: All arguments are passed to the validate class method. **kwargs: All keyword arguments are passed to the validate class method. """ if validator._normalizing: validator._normalized = cls.normalize(validator, value, instance, schema) instance = validator._normalized try: failed = False errors = cls.validate(validator, value, instance, schema) or () for e in errors: failed = True yield jsonschema.ValidationError(e) if (not failed) and (cls.name in validator._base_validator.VALIDATORS): errors = validator._base_validator.VALIDATORS[cls.name]( validator, value, instance, schema) or () for e in errors: failed = True yield e finally: if validator._normalizing and (not failed): cls.post_validate(validator, value, instance, schema)
def test_consistent_error_messages_oneOf(self): error = jsonschema.ValidationError( "%r is not one of %r" % (u'type', [u'vlan', u'interface']), validator=u'enum', validator_value=[u'vlan', u'interface'], instance=u'type') msg = validator._get_consistent_error_message(error) self.assertEqual(msg, "'type' is not one of ['vlan','interface']")
def is_date(validator, value, instance, schema): if not isinstance(instance, str): return try: return datetime.datetime.strptime(instance, "%Y-%m-%d") except Exception: yield jsonschema.ValidationError('Invalid date {!r}'.format(instance))
def custom_check_card(self, card): validations = [] #check foreing codes for collection in [ "affiliation", "faction", "rarity", "type", "subtype" ]: field = collection + "_code" if field in card and not card.get( field) in self.collections[collection]: validations.append( "%s code '%s' does not exist in card '%s'" % (collection, card.get(field), card.get('code'))) #check reprint of if 'reprint_of' in card and not card.get( 'reprint_of') in self.collections['card']: validations.append("Reprinted card %s does not exist" % (card.get('reprint_of'))) #checks by type check_by_type_method = "custom_check_%s_card" % card.get('type_code') if hasattr(self, check_by_type_method) and callable( getattr(self, check_by_type_method)): validations.extend(getattr(self, check_by_type_method)(card)) if validations: raise jsonschema.ValidationError("\n".join( ["- %s" % v for v in validations]))
def _deprecated_properties(validator, deprecated, instance, schema): if not (validator.is_type(instance, "object") or validator.is_type(instance, "array")): return # Get a list of the deprecated properties, return if there is none deprecated_properties = [ x for x in instance if x in deprecated['properties'] ] if not deprecated_properties: return # Retrieve the template message msg_str_or_func = deprecated['message'] if isinstance(msg_str_or_func, six.string_types): msg = msg_str_or_func.format(properties=deprecated_properties) else: msg = msg_str_or_func(instance, deprecated_properties) if msg is None: return is_error = deprecated['error'] if not is_error: warnings.warn(msg) else: import jsonschema yield jsonschema.ValidationError(msg)
def test_check_cluster_templates_valid(self, validate): self.logger.clear_log() ng_templates = [{"template": c.SAMPLE_NGT, "path": "/foo"}] # Reference the node group template by name clt = copy.copy(c.SAMPLE_CLT) clt["node_groups"] = [{ "name": "test", "count": 1, "node_group_template_id": "{%s}" % c.SAMPLE_NGT["name"] }] cl_templates = [{"template": clt, "path": "/bar"}] # Test failed validation validate.side_effect = jsonschema.ValidationError("mistake") res = template_api.check_cluster_templates_valid( ng_templates, cl_templates) self.assertTrue(res) msg = "Validation for /bar failed, mistake" self.assertIn(msg, self.logger.warnings) # Validation passes, name replaced validate.side_effect = None self.logger.clear_log() res = template_api.check_cluster_templates_valid( ng_templates, cl_templates) self.assertFalse(res) node_groups = validate.call_args[0][0]["node_groups"] self.assertTrue( uuidutils.is_uuid_like(node_groups[0]["node_group_template_id"]))