def test_import_examples(example): with open(IMPORT_SCHEMA) as schema_data: schema = json.load(schema_data) resolver = jsonschema.RefResolver('file://' + IMPORT_SCHEMA, schema) result = jsonschema.Draft4Validator( schema, resolver=resolver).validate(example) assert result is None
def resolver(self) -> jsonschema.RefResolver: if not hasattr(self, "_resolver"): # pylint: disable=attribute-defined-outside-init self._resolver = jsonschema.RefResolver(self.location or "", self.raw_schema, handlers={"": load_file}) return self._resolver
def generate_validator_from_schema(schema_uri): #download the schema to a string schema = None #handle http and file uri_split = rfc3987.parse(schema_uri) if uri_split['scheme'] in ("http", "https"): #its a http or https use requests schema = requests.get(schema_uri).json() elif uri_split['scheme'] == "file": #its a file, open as normal #reconstiture the file path from the uri with open( os.path.abspath( os.path.join(uri_split['authority'], uri_split['path'])), 'r') as schema_file: schema = json.load(schema_file) else: raise ValueError("schema uri must have file or url scheme") #Create a refresolver to allow resolution #of relative schema links #This is required to use git branches / versions and #local development correctly #Don't use from_schema because it uses the $id baked #into the schema, and we want to avoid baking handlers = dict(file=file_handler) resolver = jss.RefResolver(schema_uri, schema, handlers=handlers, store={}) validator = jss.Draft7Validator( schema=schema, resolver=resolver, ) return validator
def setup_class(cls): """Set the test up.""" cls.runner = CliRunner() cls.agent_name = "myagent" cls.resource_name = "myresource" cls.cwd = os.getcwd() cls.t = tempfile.mkdtemp() cls.patch = unittest.mock.patch.object(aea.cli.common.logger, "error") cls.mocked_logger_error = cls.patch.__enter__() cls.schema = json.load(open(CONNECTION_CONFIGURATION_SCHEMA)) cls.resolver = jsonschema.RefResolver( "file://{}/".format(Path(CONFIGURATION_SCHEMA_DIR).absolute()), cls.schema) cls.validator = Draft4Validator(cls.schema, resolver=cls.resolver) os.chdir(cls.t) result = cls.runner.invoke( cli, [*CLI_LOG_OPTION, "init", "--local", "--author", AUTHOR]) assert result.exit_code == 0 result = cls.runner.invoke( cli, [*CLI_LOG_OPTION, "create", "--local", cls.agent_name], standalone_mode=False, ) assert result.exit_code == 0 os.chdir(cls.agent_name) # scaffold connection cls.result = cls.runner.invoke( cli, [*CLI_LOG_OPTION, "scaffold", "connection", cls.resource_name], standalone_mode=False, )
def create_validator(simple=False): """Method to create a validator class (see extend_with_default). The simple keyword (boolean) is used to determine whether to keep only SimpleComment or full Comments from the schema. :type simple: bool :return validator :rtype: jsonschema.Draft4Validator """ validator = extend_with_default(jsonschema.Draft7Validator, simple=simple) resolver = jsonschema.RefResolver("file:///{}".format(os.path.abspath( os.path.dirname(pkg_resources.resource_filename("Mikado.configuration", os.path.basename(__file__))) )), None) with io.TextIOWrapper(resource_stream("Mikado.configuration", "configuration_blueprint.json")) as blue: blue_print = json.load(blue) validator = validator(blue_print, resolver=resolver) return validator
def __init__(self, versions, path, skip_unknown=False, min_date='2016.09.01', future_hours=24): """ Класс для валидации документов от ККТ по json-схеме. :param versions: поддерживаемые версии протокола, например ['1.0', '1.05']. :param path: путь до директории, которая содержит все директории со схемами, разбитым по версиям, например, схемы для протокола 1.0 должны лежать в <path>/1.0/ :param skip_unknown: если номер версии отличается от поддерживаемых пропускать валидацию """ self._validators = {} self._skip_unknown = skip_unknown schema_dir = os.path.expanduser(path) schema_dir = os.path.abspath(schema_dir) self.min_date = datetime.datetime.strptime( min_date, '%Y.%m.%d') if min_date else None self.future_hours = future_hours for version in versions: full_path = os.path.join(schema_dir, version, 'document.schema.json') with open(full_path, encoding='utf-8') as fh: schema = json.loads(fh.read()) resolver = jsonschema.RefResolver('file://' + full_path, None) validator = Draft4Validator(schema=schema, resolver=resolver) validator.check_schema( schema) # проверяем, что сама схема - валидная self._validators[version] = validator
def check_schema(filepath, example, schema): example = resolve_references(filepath, example) schema = resolve_references(filepath, schema) resolver = jsonschema.RefResolver(filepath, schema, handlers={"file": load_file}) jsonschema.validate(example, schema, resolver=resolver)
def validate(factoid, strict=True, spec_file=None): """Validate a single factoid. :param factoid: A single factoid :type factoid: dict :param strict: If set to True, formats are used for validation :type strict: bool :param spec_file: Path to the OpenAPI spec file to use for validation. If omitted, the default ipif spec file will be used. :type spec_file: str :raises: jsonschema.exceptions.ValidationError, jsonschema.exception.SchemaError :return: None """ schemata = get_schema(spec_file) factoid_schema = schemata["components"]["schemas"]["Factoid"] schemastore = {"": schemata} resolver = jsonschema.RefResolver(base_uri="", referrer=schemata, store=schemastore) format_checker = None if strict: format_checker = jsonschema.FormatChecker() jsonschema.validate(factoid, factoid_schema, format_checker=format_checker, resolver=resolver)
def make_validator(url: Optional[str] = None) -> Any: # Use the experiment config schema by default. if url is None: url = "http://determined.ai/schemas/expconf/v1/experiment.json" global _validators if url in _validators: return _validators[url] schema = _gen.schemas[url] resolver = jsonschema.RefResolver( base_uri=url, referrer=schema, handlers={"http": lambda url: _gen.schemas[url]}, ) validator = jsonschema.Draft7Validator(schema=schema, resolver=resolver) ext = { "disallowProperties": extensions.disallowProperties, "union": extensions.union, "checks": extensions.checks, "compareProperties": extensions.compareProperties, "conditional": extensions.conditional, # "eventuallyRequired": extensions.eventuallyRequired, "optionalRef": extensions.optionalRef, } cls = jsonschema.validators.extend(validator, ext) _validators[url] = cls(schema=schema, resolver=resolver) return _validators[url]
def make_validator(url: Optional[str] = None, complete: Optional[bool] = False) -> Any: # Use the experiment config schema by default. if url is None: url = "http://determined.ai/schemas/expconf/v0/experiment.json" global _validators key = "completeness" if complete else "sanity" if url in _validators[key]: return _validators[key][url] schema = _gen.schemas[url] resolver = jsonschema.RefResolver( base_uri=url, referrer=schema, handlers={"http": lambda url: _gen.schemas[url]}, ) validator = jsonschema.Draft7Validator(schema=schema, resolver=resolver) ext = { "disallowProperties": extensions.disallowProperties, "union": extensions.union, "checks": extensions.checks, "compareProperties": extensions.compareProperties, "optionalRef": extensions.optionalRef, } if complete: ext["eventuallyRequired"] = extensions.eventuallyRequired ext["eventually"] = extensions.eventually cls = jsonschema.validators.extend(validator, ext) _validators[key][url] = cls(schema=schema, resolver=resolver) return _validators[key][url]
def validate_schema(self, name, definition, module_name, module_schema: str = None) -> None: """ JSON Validation of the resources module validation """ if not self.module.json_schema and not module_schema: return resolver_source = pkg_files("ecs_composex").joinpath( "specs/compose-spec.json") LOG.debug(f"Validating against input schema {resolver_source}") resolver = jsonschema.RefResolver( base_uri=f"file://{path.abspath(path.dirname(resolver_source))}/", referrer=self.module.json_schema, ) try: jsonschema.validate( definition, module_schema if module_schema else self.module.json_schema, resolver=resolver, ) except jsonschema.exceptions.ValidationError: LOG.error( f"{module_name}.{name} - Definition is not conform to schema.") raise
def from_schema(name, schema=None, schema_file=None, base_classes=None): base_classes = base_classes or [] base_classes.append(BaseModel) schema_file = schema_file or '{}.json'.format(name) class_name = '{}{}'.format(name[0].upper(), name[1:]) if '/' not in 'schema_file': schema_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'schemas', schema_file) schema_path = 'file://' + schema_file with open(schema_file) as f: schema = json.load(f) dct = {} resolver = jsonschema.RefResolver(schema_path, schema) dct['@type'] = name dct['_schema_file'] = schema_file dct['schema'] = schema dct['_validator'] = jsonschema.Draft4Validator(schema, resolver=resolver) newclass = type(class_name, tuple(base_classes), dct) register(newclass, name) return newclass
def _verify_list(list_filename, schema_filename, name): """Verifies a list with elements of a given schema. Args: list_filename (pathlib.Path): The filename containing the list to verify. schema_filename (pathlib.Path): The filename containing the schema for the elements in the list. name (str): The name of the schema. Returns: bool: Indicated if there were any failures. """ with open(list_filename, "r") as file_obj: elements = json.load(file_obj) with open(schema_filename, "r") as file_obj: schema = json.load(file_obj) # NOTE: We need to set a custom resolver for ``$ref`` to the local # filesystem. # See: https://github.com/Julian/jsonschema/issues/313 resolver = jsonschema.RefResolver( base_uri=f"file://{SCHEMA_DIR}{os.path.sep}", referrer=schema) failed = False for element in elements: id_ = element.get("id", "<unknown>") try: jsonschema.validate(element, schema, resolver=resolver) except jsonschema.ValidationError: print(f"{name} {id_} does not adhere to the schema.") failed = True return failed
def _verify_map(map_filename, schema_filename, name): """Verifies a map with string keys and values of a given schema. Args: map_filename (pathlib.Path): The filename containing the map to verify. schema_filename (pathlib.Path): The filename containing the schema for the values in the map name (str): The name of the schema. Returns: bool: Indicated if there were any failures. """ with open(map_filename, "r") as file_obj: object_map = json.load(file_obj) with open(schema_filename, "r") as file_obj: schema = json.load(file_obj) # NOTE: We need to set a custom resolver for ``$ref`` to the local # filesystem. # See: https://github.com/Julian/jsonschema/issues/313 resolver = jsonschema.RefResolver( base_uri=f"file://{SCHEMA_DIR}{os.path.sep}", referrer=schema) failed = False for object_id, info in object_map.items(): try: jsonschema.validate(info, schema, resolver=resolver) except jsonschema.ValidationError: print(f"{name} {object_id} does not adhere to the schema.") failed = True return failed
def test_with_valid_token(self, mock_controller): """Client auth token has required public read scope.""" document = mocks.document() docs = { "results": [document], "metadata": { "start": 0, "end": 1, "size": 50, "total": 1 }, } r_data = {"results": docs, "query": APIQuery()} mock_controller.search.return_value = r_data, HTTPStatus.OK, {} token = helpers.generate_token("1234", "*****@*****.**", "foouser", scope=[auth.scopes.READ_PUBLIC]) response = self.client.get("/", headers={"Authorization": token}) self.assertEqual(response.status_code, HTTPStatus.OK) data = json.loads(response.data) res = jsonschema.RefResolver( "file://%s/" % os.path.abspath(os.path.dirname(self.SCHEMA_PATH)), None, ) self.assertIsNone( jsonschema.validate(data, self.schema, resolver=res), "Response content is valid per schema", ) for field in get_required_fields(): self.assertIn(field, data["results"][0])
def validate_json(self, data, filename, jsonType): print("Validating %s JSON." % (jsonType)) schema_file_name = None if jsonType == 'disease': schema_file_name = 'schemas/disease/diseaseMetaDataDefinition.json' elif jsonType == 'BGI': schema_file_name = 'schemas/gene/basicGeneInfoFile.json' elif jsonType == 'orthology': schema_file_name = 'schemas/orthology/orthoHeader.json' elif jsonType == 'allele': schema_file_name = 'schemas/feature/featureMetadata.json' with open(schema_file_name) as schema_file: schema = json.load(schema_file) # Defining a resolver for relative paths and schema issues, see https://github.com/Julian/jsonschema/issues/313 # and https://github.com/Julian/jsonschema/issues/274 sSchemaDir = os.path.dirname(os.path.abspath(schema_file_name)) oResolver = js.RefResolver(base_uri='file://' + sSchemaDir + '/', referrer=schema) try: #js.validate(data, schema, format_checker=js.FormatChecker(), resolver=oResolver) print("'%s' successfully validated against '%s'" % (filename, schema_file_name)) except js.ValidationError as e: print(e.message) print(e) raise SystemExit("FATAL ERROR in JSON validation.") except js.SchemaError as e: print(e.message) print(e) raise SystemExit("FATAL ERROR in JSON validation.")
def load(kind): filepath = '{}/{}.json'.format(schema_path, kind) with open(filepath, 'r') as file: schema = json.load(file) resolved = jsonschema.RefResolver('file://{}/'.format(schema_path), schema) return (schema, resolved)
def setup_class(cls): """Set the test up.""" cls.schema = json.load(open(AGENT_CONFIGURATION_SCHEMA)) cls.resolver = jsonschema.RefResolver( make_jsonschema_base_uri( Path(CONFIGURATION_SCHEMA_DIR).absolute()), cls.schema, ) cls.validator = Draft4Validator(cls.schema, resolver=cls.resolver) cls.runner = CliRunner() cls.agent_name = "myagent" cls.cwd = os.getcwd() cls.t = tempfile.mkdtemp() os.chdir(cls.t) cls.cli_config_file = f"{cls.t}/cli_config.yaml" cls.cli_config_patch = patch("aea.cli.utils.config.CLI_CONFIG_PATH", cls.cli_config_file) cls.cli_config_patch.start() result = cls.runner.invoke( cli, [*CLI_LOG_OPTION, "init", "--local", "--author", AUTHOR]) assert result.exit_code == 0 cls.result = cls.runner.invoke( cli, [*CLI_LOG_OPTION, "create", "--local", cls.agent_name], standalone_mode=False, ) cls.agent_config = cls._load_config_file(cls.agent_name)
def _validate(self): """Validates self.redfish against self.schema_obj Validates a redfish OrderedDict against the schema object passed on the object creation. Returns: None Exception: ValidationError: Raises this exception on validation failure. OneViewRedfishError: Raises this exception if schema is not found. """ schema_version = util.schemas[self.schema_name] stored_schemas = util.stored_schemas try: schema_obj = stored_schemas["http://redfish.dmtf.org/schemas/v1/" + schema_version] except KeyError: raise OneViewRedfishError("{} not found".format(schema_version)) resolver = jsonschema.RefResolver('', schema_obj, store=stored_schemas) jsonschema.validate(self.redfish, schema_obj, resolver=resolver)
def set_content(self, kwargs, content=None, fully_load=True): """ Method to initialize the compose content :param dict kwargs: :param dict content: :param bool fully_load: """ files = ( [] if not keyisset(self.input_file_arg, kwargs) else kwargs[self.input_file_arg] ) content_def = ComposeDefinition(files, content) self.compose_content = content_def.definition source = pkg_files("ecs_composex").joinpath("specs/compose-spec.json") LOG.info(f"Validating against input schema {source}") resolver = jsonschema.RefResolver( f"file://{path.abspath(path.dirname(source))}/", None ) jsonschema.validate( content_def.definition, loads(source.read_text()), resolver=resolver, ) if fully_load: self.set_secrets() self.set_volumes() self.set_services() self.set_families() self.set_efs()
def __init__(self, schema_filename: str, configuration_type: Type[T]): """Initialize the parser for configuration files.""" self.schema = json.load(open(os.path.join(_SCHEMAS_DIR, schema_filename))) root_path = "file://{}{}".format(Path(_SCHEMAS_DIR).absolute(), os.path.sep) self.resolver = jsonschema.RefResolver(root_path, self.schema) self.validator = Draft4Validator(self.schema, resolver=self.resolver) self.configuration_type = configuration_type # type: Type[T]
def _resolve_refs(uri, spec): """Resolve JSON references in a given dictionary. OpenAPI spec may contain JSON references to its nodes or external sources, so any attempt to rely that there's some expected attribute in the spec may fail. So we need to resolve JSON references before we use it (i.e. replace with referenced object). For details see: https://tools.ietf.org/html/draft-pbryan-zyp-json-ref-02 The input spec is modified in-place despite being returned from the function. """ resolver = jsonschema.RefResolver(uri, spec) def _do_resolve(node): if isinstance(node, collections.Mapping) and '$ref' in node: with resolver.resolving(node['$ref']) as resolved: return resolved elif isinstance(node, collections.Mapping): for k, v in node.items(): node[k] = _do_resolve(v) elif isinstance(node, (list, tuple)): for i in range(len(node)): node[i] = _do_resolve(node[i]) return node return _do_resolve(spec)
def setup_class(cls): """Set up the test class.""" cls.schema = json.load(open(SKILL_CONFIGURATION_SCHEMA)) cls.resolver = jsonschema.RefResolver( "file://{}/".format(Path(CONFIGURATION_SCHEMA_DIR).absolute()), cls.schema) cls.validator = Draft4Validator(cls.schema, resolver=cls.resolver)
def setup_class(cls): """Set the test up.""" cls.schema = json.load(open(AGENT_CONFIGURATION_SCHEMA)) cls.resolver = jsonschema.RefResolver( "file://{}/".format(Path(CONFIGURATION_SCHEMA_DIR).absolute()), cls.schema) cls.validator = Draft4Validator(cls.schema, resolver=cls.resolver) cls.cwd = os.getcwd() cls.runner = CliRunner() cls.t = tempfile.mkdtemp() os.chdir(cls.t) result = cls.runner.invoke( cli, [*CLI_LOG_OPTION, "init", "--local", "--author", AUTHOR]) assert result.exit_code == 0 result = cls.runner.invoke( cli, [*CLI_LOG_OPTION, "create", "--local", "myagent"], standalone_mode=False, ) assert result.exit_code == 0 Path(cls.t, "packages", "agents").mkdir(parents=True) shutil.copytree(Path(cls.t, "myagent"), Path(cls.t, "packages", "agents", "myagent")) os.chdir(Path(cls.t, "myagent")) cls.result = cls.runner.invoke( cli, [*CLI_LOG_OPTION, "search", "--local", "agents"], standalone_mode=False)
def get_schema_validator(self, schema_name): """ Had to remove the id property from map.json or it uses URLs for validation See various issues at https://github.com/Julian/jsonschema/pull/306 """ if schema_name not in self.schemas: schema_file = self.get_schema_file(schema_name) with open(schema_file) as f: try: jsn_schema = json.load(f) except ValueError as ex: log.error("Could not load %s", schema_file) raise ex schemas_folder = self.get_schemas_folder() root_schema_path = self.get_schema_path(schemas_folder) resolver = jsonschema.RefResolver(root_schema_path, None) # cache the schema for future use self.schemas[schema_name] = (jsn_schema, resolver) else: jsn_schema, resolver = self.schemas[schema_name] validator = jsonschema.Draft4Validator(schema=jsn_schema, resolver=resolver) # validator.check_schema(jsn_schema) # check schema is valid return validator
def validate_json(name, json): schema = get_schemas()[name] resolver = jsonschema.RefResolver( base_uri="file://{}/{}".format(BASE_SCHEMA_DIR, name), referrer=name ) jsonschema.validate(schema=schema, resolver=resolver, instance=json)
def expand_with_schema(name, attrs): if 'schema' in attrs: # Schema specified by name schema_file = '{}.json'.format(attrs['schema']) elif 'schema_file' in attrs: schema_file = attrs['schema_file'] del attrs['schema_file'] else: return attrs if '/' not in 'schema_file': thisdir = os.path.dirname(os.path.realpath(__file__)) schema_file = os.path.join(thisdir, 'schemas', schema_file) schema_path = 'file://' + schema_file with open(schema_file) as f: schema = json.load(f) resolver = jsonschema.RefResolver(schema_path, schema) attrs['@type'] = "".join((name[0].lower(), name[1:])) attrs['_schema_file'] = schema_file attrs['schema'] = schema attrs['_validator'] = jsonschema.Draft4Validator(schema, resolver=resolver) schema_defaults = BaseMeta.get_defaults(attrs['schema']) attrs.update(schema_defaults) return attrs
def _create_validator() -> jsonschema.Draft4Validator: """Create a JSON validator instance from dcmqi schema files. In order to allow offline usage, the required schemas a pre-loaded from the dcmqi repository located at `pydicom_seg/externals/dcmqi`. Returns: A `jsonschema.Draft4Validator` with a pre-loaded schema store. """ # Load both common and segmentation schema files schemas_dir = os.path.join(os.path.dirname(__file__), "schemas") seg_schema_path = os.path.join(schemas_dir, "seg-schema.json") with open(seg_schema_path) as ifile: seg_schema = json.load(ifile) with open(os.path.join(schemas_dir, "common-schema.json")) as ifile: common_schema = json.load(ifile) # Create validator with pre-loaded schema store return jsonschema.Draft4Validator( seg_schema, resolver=jsonschema.RefResolver( base_uri="file://" + seg_schema_path, referrer=seg_schema, store={ seg_schema["id"]: seg_schema, common_schema["id"]: common_schema }, ), )
def add_spec(self, spec, transforms): info = spec.get('info', {}) LOG.debug('Adding API: %s %s', info.get('title', 'untitled'), info.get('version', '0.0.0')) self.spec = spec self.spec_resolver = jsonschema.RefResolver('', self.spec) validate(copy.deepcopy(self.spec)) for filter in transforms: for (path, methods) in six.iteritems(spec['paths']): if not re.search(filter, path): continue for (method, endpoint) in six.iteritems(methods): conditions = {'method': [method.upper()]} connect_kw = {} if 'x-requirements' in endpoint: connect_kw['requirements'] = endpoint['x-requirements'] m = self.routes.submapper(_api_path=path, _api_method=method, conditions=conditions) for transform in transforms[filter]: m.connect(None, re.sub(filter, transform, path), **connect_kw) module_name = endpoint['operationId'].split(':', 1)[0] __import__(module_name) for route in sorted(self.routes.matchlist, key=lambda r: r.routepath): LOG.debug('Route registered: %+6s %s', route.conditions['method'][0], route.routepath)
def check_config(config, logger=None): """ Function to check that a configuration abides to the Daijin schema. :param config: the dictionary to validate :param logger: optional logger. If none is provided, one will be created :return: """ if logger is None: logger = create_default_logger("daijin_validator") try: resolver = jsonschema.RefResolver( "file:///{}".format( resource_filename("Mikado.configuration", "configuration")), None) with io.TextIOWrapper(resource_stream(__name__, "daijin_schema.json")) as blue: blue_print = json.load(blue) validator = jsonschema.Draft4Validator(blue_print, resolver=resolver) validator.validate(config) except Exception as exc: logger.exception(exc) sys.exit(1)