def test_schema_datatype_validations(self, setup): self.logger.info("Test case test_schema_datatype_validations started") schema = { "type": "object", "properties": { "quota_max": { "type": "number" }, "items": { "type": "array", "maxItems": 1, # Check max items in array "items": { "type": "object", "properties": { "badge_type": { "type": "string" }, "award_count": { "type": "number" }, "badge_id": { "type": "number" }, }, "required": ["badge_id"] } } }, "required": ["quota_max"] } Draft6Validator.check_schema(schema) self.logger.info("Test case test_schema_datatype_validations finished")
def check_history_json(json_content): """ Checkst if some keywords added or changed after Draft4 are contained in the json_content (dictionary). :param json_content: The json content to check. :return: The added keywords and incompatible keywords found and the schema tag. """ # File must be valid to Draft 4, 6 and 7 in order to search for keywords, because only documents valid to these # darfts are used. This script should result only the numbers for the keywords; the filtering based on drafts # is done in schema_drafts. try: Draft4Validator.check_schema(json_content) Draft6Validator.check_schema(json_content) Draft7Validator.check_schema(json_content) except Exception as e: return schema_tag = util.schema_tag(json_content) if schema_tag is not None and "/draft-04/" in schema_tag: # Draft-04 documents doesn't include keywords for Draft 6/7, because they are Draft4... return draft4_to_draft7_added = [ ] # All keywords added from draft 4 until draft 7 draft4_to_draft7_added.extend(draft4_to_draft6_added) draft4_to_draft7_added.extend(draft6_to_draft7_added) addeds = [] for keyword_not_in_draft4 in draft4_to_draft7_added: findings = _find_recursive_or_manually(json_content, keyword_not_in_draft4) if len(findings) > 0: # Found some new keyword for f in findings: addeds.append((keyword_not_in_draft4, f)) # Filter "if" keywords when no "then" or "else" is present added_keywords = set(map(lambda data: data[0], addeds)) if "if" in added_keywords and not ("then" in added_keywords or "else" in added_keywords): # "if" is present but no "then" or "else" - remove "if" from list because the new "if then else" construct # introduced in draft 7 is not used, because otherwise "then" or "else" would also be present addeds = list(filter(lambda data: data[0] != "if", addeds)) draft4_to_draft7_incompatibles = [ ] # All keywords made incompatible from draft 4 until draft 7 draft4_to_draft7_incompatibles.extend(draft4_to_draft6_incompatible) incompatibles = [] for keyword_incompatible_to_draft4 in draft4_to_draft7_incompatibles: # Search for incompatible keywords findings = _find_recursive_or_manually(json_content, keyword_incompatible_to_draft4) if len(findings) > 0: # Found incompatible keywords for f in findings: incompatibles.append((keyword_incompatible_to_draft4, f)) # Return only a result if something was found. if len(addeds) > 0 or len(incompatibles) > 0: return addeds, incompatibles, schema_tag
def validate_schema(schemas_bundle, filename, schema_data): kind = ValidatedFileKind.SCHEMA logging.info('validating schema: {}'.format(filename)) try: meta_schema_url = schema_data[u'$schema'] except KeyError as e: return ValidationError(kind, filename, "MISSING_SCHEMA_URL", e) if meta_schema_url in schemas_bundle: meta_schema = schemas_bundle[meta_schema_url] else: meta_schema = fetch_schema(meta_schema_url) resolver = jsonschema.RefResolver( filename, schema_data, handlers=get_handlers(schemas_bundle) ) try: jsonschema_validator.check_schema(schema_data) validator = jsonschema_validator(meta_schema, resolver=resolver) validator.validate(schema_data) except jsonschema.ValidationError as e: return ValidationError(kind, filename, "VALIDATION_ERROR", e, meta_schema_url=meta_schema_url) except (jsonschema.SchemaError, jsonschema.exceptions.RefResolutionError) as e: return ValidationError(kind, filename, "SCHEMA_ERROR", e, meta_schema_url=meta_schema_url) return ValidationOK(kind, filename, meta_schema_url)
def __try_reading_schema(self) -> Draft6Validator: try: schema = self._read_schema_file() Draft6Validator.check_schema(schema) return Draft6Validator(schema) except (FileNotFoundError, OSError, SchemaError, JSONDecodeError): raise InvalidSchemaError( 'JSON schema validation error - please raise issue on github!')
def test_dantimeta_datacite(schema, additional_meta, datacite_checks): """ checking datacite objects for specific metadata dictionaries, posting datacite object and checking the status code """ dandi_id = f"DANDI:000{random.randrange(100, 999)}" # meta data without doi, datePublished and publishedBy meta_dict = { "identifier": dandi_id, "id": f"{dandi_id}/draft", "name": "testing dataset", "description": "testing", "contributor": [{ "name": "A_last, A_first", "roleName": [RoleType("dandi:ContactPerson")], }], "license": [LicenseType("spdx:CC-BY-4.0")], } meta_dict.update(_basic_publishmeta(dandi_id=dandi_id)) meta_dict.update(additional_meta) # creating PublishedDandisetMeta from the dictionary meta = PublishedDandisetMeta(**meta_dict) # creating and validating datacite objects datacite = to_datacite(meta) Draft6Validator.check_schema(schema) validator = Draft6Validator(schema) validator.validate(datacite["data"]["attributes"]) # checking some datacite fields attr = datacite["data"]["attributes"] for key, el in datacite_checks.items(): el_len, el_flds = el if el_len: # checking length and some fields from the first element assert len(attr[key]) == el_len for k, v in el_flds.items(): assert attr[key][0][k] == v else: if isinstance(el_flds, dict): for k, v in el_flds.items(): assert attr[key][k] == v else: assert attr[key] == el_flds # trying to poste datacite datacite_post(datacite, meta.doi)
def load_and_validate_schema(self): verbose = self.verbose if self.schema: return self.schema if not self.sch_file: try: sch = requests.get(MDFSCHEMA_URL) sch.raise_for_status() self.sch_file = sch.text except Exception as e: if self.verbose: print("Error in fetching mdf-schema.yml: \n{e}".format(e=e)) raise e elif isinstance(self.sch_file, str): try: self.sch_file = open(self.sch_file,"r") except IOError as e: raise e else: pass try: print("Checking schema YAML =====") self.schema = yaml.load(self.sch_file, Loader=self.yloader) except ConstructorError as ce: if verbose: print("YAML error in MDF Schema '{fn}':\n{e}".format(fn=self.sch_file.name,e=ce)) return ce except ParserError as e: if verbose: print("YAML error in MDF Schema '{fn}':\n{e}".format(fn=self.sch_file.name,e=e)) return e except Exception as e: if verbose: print("Exception in loading MDF Schema yaml: {}".format(e)) return e print("Checking as a JSON schema =====") try: d6.check_schema(self.schema) except SchemaError as se: if verbose: print("MDF Schema error: {}".format(se)) raise se except Exception as e: if verbose: print("Exception in checking MDF Schema: {}".format(e)) raise e return self.schema
def _jsonschema_errors(self): from django.conf import settings errors = [] schemas = settings.SIMPLE_JSONSCHEMA for url, schema in schemas.items(): try: Draft6Validator.check_schema(schema) except SchemaError as e: errors.append({ 'url': url, 'error': e, 'schema': json.dumps(schema, indent=4, sort_keys=True) }) return errors
def validate(self): print "Checking schema =====" try: d6.check_schema(self.schema) except SchemaError as se: print se return se if (self.instance): print "Checking instance===" try: validate(instance=self.instance.as_dict(), schema=self.schema) except RefResolutionError as re: print re return re except ValidationError as ve: for e in d6(self.schema).iter_errors(self.instance.as_dict()): print e return ve return None
def validate_json(json: dict, schema: dict) -> bool: """Validate json against schema. :param json: ``dict`` json to be verified. :param schema: ``dict`` json schema draft 6. :returns: ``bool`` json valid to schema. """ try: Draft6Validator.check_schema(schema) except SchemaError as err: log.debug(f'Schema does not conform to json schema draft 6: {err}') return False try: validate(instance=json, schema=schema) except ValidationError as err: log.debug(f'JSON does not conform to schema: {err}') return False return True
def test_datacite(dandi_id, schema): """ checking to_datacite for a specific datasets""" # reading metadata taken from exemplary dandisets and saved in json files with (Path(__file__).with_name("data") / "metadata" / f"meta_{dandi_id}.json").open() as f: meta_js = json.load(f) # updating with basic fields required for PublishDandisetMeta meta_js.update( _basic_publishmeta( dandi_id.replace("000", str(random.randrange(100, 999))))) meta = PublishedDandisetMeta(**meta_js) datacite = to_datacite(meta=meta) Draft6Validator.check_schema(schema) validator = Draft6Validator(schema) validator.validate(datacite["data"]["attributes"]) # trying to post datacite datacite_post(datacite, meta.doi)
def yparse(ycontent): schema = yaml.full_load(ycontent) Draft6Validator.check_schema(schema) return schema
def test_api_class_schema(self): with open('schema/api-class.schema.json') as f: schema = json.load(f) Draft6Validator.check_schema(schema)
def test_api_service_schema(self): with open('schema/api-service.schema.json') as f: schema = json.load(f) Draft6Validator.check_schema(schema)
def test_api_field_schema(self): with open('schema/api-field.schema.json') as f: schema = json.load(f) Draft6Validator.check_schema(schema)
def check_schema(schema): return Draft6Validator.check_schema(schema)