def __init__(self, **kwargs): """Initializer for ActionJsonSchema instance. Args: schema (str or object): Schema to validate values against. with_meta (bool): Whether to include metadata (def.=True). Raises: ValueError: If a parameter is invalid. jsonschema.exceptions.SchemaError: If the schema is invalid. """ if "schema" in kwargs: _check_unknown_kwargs(kwargs, {"schema", "with_meta"}) schema = kwargs["schema"] if isinstance(schema, str): try: schema = yaml.safe_load(schema) except Exception as ex: raise type(ex)("Problems parsing schema :: " + str(ex)) jsonvalidator.check_schema(schema) self._validator = self._extend_jsonvalidator_with_default( jsonvalidator)(schema) self._with_meta = kwargs.get("with_meta", True) elif "_validator" not in kwargs: raise ValueError("Expected schema keyword argument.") else: self._validator = kwargs.pop("_validator") self._with_meta = kwargs.pop("_with_meta") kwargs["type"] = str super().__init__(**kwargs)
def get_schema_store(validate_schema: bool = False, schema_search_path: str = None) -> dict: """Return a schema_store as a dict. The default returns schema_store of the default schemas found in this package. """ try: if not schema_search_path: schema_search_path = path.join(path.dirname(__file__), 'schemas') schemastore = {} for dirpath, dirnames, filenames in os.walk(schema_search_path): for fname in filenames: fpath = path.join(dirpath, fname) if fpath[-5:] == '.json': with open(fpath, 'r') as schema_fd: schema = json.load(schema_fd) if '$id' in schema: schemastore[schema['$id']] = schema if validate_schema: for _, schema in schemastore.items(): Draft7Validator.check_schema(schema) return schemastore except (SchemaError, json.JSONDecodeError) as error: # handle schema error raise error
def get_validator(filename, base_uri=''): # Adapated from https://www.programcreek.com/python/example/83374/jsonschema.RefResolver # referencing code from HumanCellAtlas Apache License """Load schema from JSON file; Check whether it's a valid schema; Return a Draft4Validator object. Optionally specify a base URI for relative path resolution of JSON pointers (This is especially useful for local resolution via base_uri of form file://{some_path}/) """ def get_json_from_file(filename): output = '' with open(filename, 'rt') as f: output = f.read() return json.loads(output) schema = get_json_from_file(filename) try: # Check schema via class method call. Works, despite IDE complaining Draft7Validator.check_schema(schema) #print("Schema %s is valid JSON" % filename) except SchemaError: raise sys.exit(1) if base_uri: resolver = RefResolver(base_uri=base_uri, referrer=filename) else: resolver = None return Draft7Validator(schema=schema, resolver=resolver)
def check(definition): try: Validator.check_schema(definition) except BaseException: return False else: return True
def validate(data, schema=None): if schema is None: schema = generate() JsonSchemaValidator.check_schema(schema) validator = JsonSchemaValidator(schema) errors = list(validator.iter_errors(data)) if not errors: return check_unique(data) or [] try: resp = policy_error_scope(specific_error(errors[0]), data) name = isinstance( errors[0].instance, dict) and errors[0].instance.get( 'name', 'unknown') or 'unknown' return [resp, name] except Exception: logging.exception( "specific_error failed, traceback, followed by fallback") return list(filter(None, [ errors[0], best_match(validator.iter_errors(data)), ]))
def report_printer(entry): # handles process of creating top layer of report and calling validation try: if entry["data"] == None: report_extender(f"<ul><b>No entries found</b></ul>") elif entry["event"] in schem_dir.keys(): validate_json(entry["data"], schem_dir[entry["event"]]) Draft7Validator.check_schema(schem_dir[entry["event"]]) elif entry["event"].replace(" ", "") in schem_dir.keys(): report_extender( "<ul><h5>JSON errors:</h5>\ <li>Incorrect event name format</li></ul>" ) validate_json( entry["data"], schem_dir[entry["event"].replace(" ", "")] ) Draft7Validator.check_schema( schem_dir[entry["event"].replace(" ", "")] ) else: report_extender( f"<ul>Schema for <u>{entry['event']}</u> not \ found, unable to confirm</ul>" ) except (TypeError, KeyError): report_extender(f"<ul><b>Empty entry</b></ul>") except SchemaError: report_extender( f"<ul><b>Schema for <u>{entry['event']}</u> is broken, \ unable to confirm</b></ul>" )
def check_history_json(json_content): """ Checkst if some keywords added or changed after Draft4 are contained in the json_content (dictionary). :param json_content: The json content to check. :return: The added keywords and incompatible keywords found and the schema tag. """ # File must be valid to Draft 4, 6 and 7 in order to search for keywords, because only documents valid to these # darfts are used. This script should result only the numbers for the keywords; the filtering based on drafts # is done in schema_drafts. try: Draft4Validator.check_schema(json_content) Draft6Validator.check_schema(json_content) Draft7Validator.check_schema(json_content) except Exception as e: return schema_tag = util.schema_tag(json_content) if schema_tag is not None and "/draft-04/" in schema_tag: # Draft-04 documents doesn't include keywords for Draft 6/7, because they are Draft4... return draft4_to_draft7_added = [ ] # All keywords added from draft 4 until draft 7 draft4_to_draft7_added.extend(draft4_to_draft6_added) draft4_to_draft7_added.extend(draft6_to_draft7_added) addeds = [] for keyword_not_in_draft4 in draft4_to_draft7_added: findings = _find_recursive_or_manually(json_content, keyword_not_in_draft4) if len(findings) > 0: # Found some new keyword for f in findings: addeds.append((keyword_not_in_draft4, f)) # Filter "if" keywords when no "then" or "else" is present added_keywords = set(map(lambda data: data[0], addeds)) if "if" in added_keywords and not ("then" in added_keywords or "else" in added_keywords): # "if" is present but no "then" or "else" - remove "if" from list because the new "if then else" construct # introduced in draft 7 is not used, because otherwise "then" or "else" would also be present addeds = list(filter(lambda data: data[0] != "if", addeds)) draft4_to_draft7_incompatibles = [ ] # All keywords made incompatible from draft 4 until draft 7 draft4_to_draft7_incompatibles.extend(draft4_to_draft6_incompatible) incompatibles = [] for keyword_incompatible_to_draft4 in draft4_to_draft7_incompatibles: # Search for incompatible keywords findings = _find_recursive_or_manually(json_content, keyword_incompatible_to_draft4) if len(findings) > 0: # Found incompatible keywords for f in findings: incompatibles.append((keyword_incompatible_to_draft4, f)) # Return only a result if something was found. if len(addeds) > 0 or len(incompatibles) > 0: return addeds, incompatibles, schema_tag
def test_valid_schema_with_validator(self): with open(self.filename) as schema: loaded_schema = json.load(schema) Draft7Validator.check_schema(self.json_schema) validator = Draft7Validator(self.json_schema) self.assertTrue(validator.is_valid(loaded_schema))
def test_systemProperties(testdoc, schemas, resolvers): s, _, systemProperties = testdoc try: Draft7Validator.check_schema(systemProperties) except: assert False, 'Invalid schema definition in systemProperties in: "%s"!' % s
def test_validate_schemas(self): """Check that our schemas are in fact JSONSchemas.""" for interval_type, schema in SCHEMAS.items(): try: Validator.check_schema(schema) except JSONValidationError: msg = f'JSON schema assoctiated with {interval_type} interval is incorrect.' self.fail(msg)
def __init__(self, schema_path: Optional[str] = None, indent: int = 2): if schema_path is None: schema_path = join(dirname(__file__), "schema.yaml") schema_str = open(schema_path, 'r').read() schema = self._load_yaml_object(schema_str) Draft7Validator.check_schema(schema) self.validator = Draft7Validator(schema) self.indent = indent
def create_validator(schema): with open( os.path.join(os.path.dirname(__file__), 'external_menus', schema)) as f: schema = json.load(f) Draft7Validator.check_schema(schema) return Draft7Validator(schema)
def main(): for filename in Path('..').glob('**/*.schema.json'): with open(filename,'r') as f: try: Draft7Validator.check_schema(json.loads(f.read())) print(str(filename) + ": PASSED") except: print(str(filename) + " failed validation")
def _get_correct_schema(json_file, schemata): """ Get correct validation schema based on filename """ no_ext = os.path.basename(os.path.splitext(json_file)[0]) schema_name = "".join([i for i in no_ext if i.isalpha()]) Draft7Validator.check_schema(schemata[schema_name]) return schemata[schema_name]
def test_check_schema(self): """ Check all Schemas """ for schema in PUBLISHED: with self.subTest(msg='Checking Schema "{}-v{}"'.format( schema.schema_name, schema.schema_version)): Draft7Validator.check_schema(schema.schema_definition)
def test_schema(schema_file, convert_yaml_schemas_to_json, load_schema): schema = load_schema(schema_file) Draft7Validator.check_schema(schema) # everything defined under $defs is not automatically validated unless we # explicitly do so for _, _def in schema["$defs"].items(): Draft7Validator.check_schema(_def)
def validate_schema(cls, path: Path) -> bool: schema = json.loads(open(str(path)).read()) try: Draft7Validator.check_schema(schema) except Exception as e: logger.warning("%s" % e) return False return True
def test_cost_func_schema_is_valid(self): raised = False msg = '' try: Draft7Validator.check_schema(COST_FUNC_SCHEMA) except SchemaError as e: raised = True msg = e self.assertFalse(raised, msg)
def test_price_response_schema_is_valid(self): raised = False msg = '' try: Draft7Validator.check_schema(PRICE_RESPONSE_SCHEMA) except SchemaError as e: raised = True msg = e self.assertFalse(raised, msg)
def test_schemas(self): for schema_type in self.validator.schemas.keys(): ''' #TODO allow path schema validation when we receive valid ones ''' if schema_type == 'path': continue for schema in self.validator.schemas[schema_type].values(): Draft7Validator.check_schema(schema)
def _check_and_build_local_schemas(working_dir): schema_store = {} files = _files_in_working_dir(working_dir) for dataset_file in files: with open(dataset_file) as schema_file: schema = json.load(schema_file) Draft7Validator.check_schema(schema) schema_store[schema['$id']] = schema return schema_store
def test_bundle_request_schema_is_valid(self): raised = False msg = '' try: Draft7Validator.check_schema(BUNDLE_REQUEST_SCHEMA) except SchemaError as e: raised = True msg = e self.assertFalse(raised, msg)
def test_is_business_schema_valid(schema_filename): """Assert that the Schema is a valid Draft7 JSONSchema.""" schema = get_schema(schema_filename) try: Draft7Validator.check_schema(schema) assert True except SchemaError as error: print(error) assert False
def test_bid_result_schema_is_valid(self): raised = False msg = '' try: Draft7Validator.check_schema(BID_RESULT_SCHEMA) except SchemaError as e: raised = True msg = e self.assertFalse(raised, msg)
def parse_jsonschema_definition(schema_definition: str) -> Draft7Validator: """ Parses and validates `schema_definition`. Raises: SchemaError: If `schema_definition` is not a valid Draft7 schema. """ schema = json.loads(schema_definition) Draft7Validator.check_schema(schema) return Draft7Validator(schema)
def test_interface_schemas(): for data_interface in interface_list: # check validity of source schema schema = data_interface.get_source_schema() Draft7Validator.check_schema(schema) # check validity of conversion options schema schema = data_interface.get_conversion_options_schema() Draft7Validator.check_schema(schema)
def validate_response_expectations(expectations: Dict[str, Any]): if not expectations["content_type"] in CONTENT_TO_ATTR.keys(): raise ExpectationsValidationException( f"response.content_type should be one of {CONTENT_TO_ATTR.keys()}") if "expected" in expectations and "json_schema" in expectations: raise ExpectationsValidationException( "expected and json_schema are mutually exclusive") if "json_schema" in expectations: if expectations["content_type"] != "json": raise ExpectationsValidationException( "json_schema is only valid when content_type is set to json") try: Draft7Validator.check_schema(schema=expectations["json_schema"]) except Exception as e: raise ExpectationsValidationException( "json_schema is invalid") from e if "grpc" in expectations: grpc = expectations["grpc"] required_fields = [ "proto_module_pb2", "proto_module_pb2_grpc", "stub_service_name", "input_spec", "output_spec", ] for required_field in required_fields: if required_field not in grpc: raise ExpectationsValidationException( f"missing grpc.{required_field} field") p1 = str(pathlib.Path(grpc["proto_module_pb2"]).parent) p2 = str(pathlib.Path(grpc["proto_module_pb2_grpc"]).parent) if p1 != p2: raise ExpectationsValidationException( "the parent directories of proto_module_pb2 and proto_module_pb2_grpc don't match" ) input_spec = grpc["input_spec"] if "class_name" not in input_spec: raise ExpectationsValidationException( "missing grpc.input_spec.class_name field") if "input" not in input_spec: raise ExpectationsValidationException( "missing grpc.input_spec.input field") output_spec = grpc["output_spec"] if "class_name" not in output_spec: raise ExpectationsValidationException( "missing grpc.output_spec.class_name field") if "stream" not in output_spec: raise ExpectationsValidationException( "missing grpc.output_spec.stream field")
def post_deserialize(self, value, attr, obj, **kwargs): try: for i in list(value.values()): if not i: raise BaseException value["$schema"] = "http://json-schema.org/schema#" Draft7Validator.check_schema(value) return value except BaseException: raise self.make_error("validator_failed")
def test_discover_v2(test_config_v2): source = SourceChargebee() logger_mock = MagicMock() catalog = source.discover(logger_mock, test_config_v2) catalog = AirbyteMessage(type=Type.CATALOG, catalog=catalog).dict(exclude_unset=True) schemas = [ stream["json_schema"] for stream in catalog["catalog"]["streams"] ] for schema in schemas: Draft7Validator.check_schema(schema)
def valid_schema(filename: str): with open(filename, 'r') as schema_file: if valid_json(filename): try: schema = json.load(schema_file) Draft7Validator.check_schema(schema) return True except exc.SchemaError: error = f'Invalid schema file {filename}<br>' logger.info(error) return False