def check_history_json(json_content): """ Checkst if some keywords added or changed after Draft4 are contained in the json_content (dictionary). :param json_content: The json content to check. :return: The added keywords and incompatible keywords found and the schema tag. """ # File must be valid to Draft 4, 6 and 7 in order to search for keywords, because only documents valid to these # darfts are used. This script should result only the numbers for the keywords; the filtering based on drafts # is done in schema_drafts. try: Draft4Validator.check_schema(json_content) Draft6Validator.check_schema(json_content) Draft7Validator.check_schema(json_content) except Exception as e: return schema_tag = util.schema_tag(json_content) if schema_tag is not None and "/draft-04/" in schema_tag: # Draft-04 documents doesn't include keywords for Draft 6/7, because they are Draft4... return draft4_to_draft7_added = [ ] # All keywords added from draft 4 until draft 7 draft4_to_draft7_added.extend(draft4_to_draft6_added) draft4_to_draft7_added.extend(draft6_to_draft7_added) addeds = [] for keyword_not_in_draft4 in draft4_to_draft7_added: findings = _find_recursive_or_manually(json_content, keyword_not_in_draft4) if len(findings) > 0: # Found some new keyword for f in findings: addeds.append((keyword_not_in_draft4, f)) # Filter "if" keywords when no "then" or "else" is present added_keywords = set(map(lambda data: data[0], addeds)) if "if" in added_keywords and not ("then" in added_keywords or "else" in added_keywords): # "if" is present but no "then" or "else" - remove "if" from list because the new "if then else" construct # introduced in draft 7 is not used, because otherwise "then" or "else" would also be present addeds = list(filter(lambda data: data[0] != "if", addeds)) draft4_to_draft7_incompatibles = [ ] # All keywords made incompatible from draft 4 until draft 7 draft4_to_draft7_incompatibles.extend(draft4_to_draft6_incompatible) incompatibles = [] for keyword_incompatible_to_draft4 in draft4_to_draft7_incompatibles: # Search for incompatible keywords findings = _find_recursive_or_manually(json_content, keyword_incompatible_to_draft4) if len(findings) > 0: # Found incompatible keywords for f in findings: incompatibles.append((keyword_incompatible_to_draft4, f)) # Return only a result if something was found. if len(addeds) > 0 or len(incompatibles) > 0: return addeds, incompatibles, schema_tag
def test_schema_datatype_validations(self, setup): self.logger.info("Test case test_schema_datatype_validations started") schema = { "type": "object", "properties": { "quota_max": { "type": "number" }, "items": { "type": "array", "maxItems": 1, # Check max items in array "items": { "type": "object", "properties": { "badge_type": { "type": "string" }, "award_count": { "type": "number" }, "badge_id": { "type": "number" }, }, "required": ["badge_id"] } } }, "required": ["quota_max"] } Draft6Validator.check_schema(schema) self.logger.info("Test case test_schema_datatype_validations finished")
def validate_schema(schemas_bundle, filename, schema_data): kind = ValidatedFileKind.SCHEMA logging.info('validating schema: {}'.format(filename)) try: meta_schema_url = schema_data[u'$schema'] except KeyError as e: return ValidationError(kind, filename, "MISSING_SCHEMA_URL", e) if meta_schema_url in schemas_bundle: meta_schema = schemas_bundle[meta_schema_url] else: meta_schema = fetch_schema(meta_schema_url) resolver = jsonschema.RefResolver( filename, schema_data, handlers=get_handlers(schemas_bundle) ) try: jsonschema_validator.check_schema(schema_data) validator = jsonschema_validator(meta_schema, resolver=resolver) validator.validate(schema_data) except jsonschema.ValidationError as e: return ValidationError(kind, filename, "VALIDATION_ERROR", e, meta_schema_url=meta_schema_url) except (jsonschema.SchemaError, jsonschema.exceptions.RefResolutionError) as e: return ValidationError(kind, filename, "SCHEMA_ERROR", e, meta_schema_url=meta_schema_url) return ValidationOK(kind, filename, meta_schema_url)
def __try_reading_schema(self) -> Draft6Validator: try: schema = self._read_schema_file() Draft6Validator.check_schema(schema) return Draft6Validator(schema) except (FileNotFoundError, OSError, SchemaError, JSONDecodeError): raise InvalidSchemaError( 'JSON schema validation error - please raise issue on github!')
def set_schemas(self, simple_jsonschema): self._schemas = {} for key, schema in simple_jsonschema.items(): methods, view_name = key if isinstance(methods, tuple): for method in methods: schema_id = method.upper() + ':' + view_name self._schemas[schema_id] = Draft6Validator(schema) elif isinstance(methods, str): schema_id = methods.upper() + ':' + view_name self._schemas[schema_id] = Draft6Validator(schema)
def test_dantimeta_datacite(schema, additional_meta, datacite_checks): """ checking datacite objects for specific metadata dictionaries, posting datacite object and checking the status code """ dandi_id = f"DANDI:000{random.randrange(100, 999)}" # meta data without doi, datePublished and publishedBy meta_dict = { "identifier": dandi_id, "id": f"{dandi_id}/draft", "name": "testing dataset", "description": "testing", "contributor": [{ "name": "A_last, A_first", "roleName": [RoleType("dandi:ContactPerson")], }], "license": [LicenseType("spdx:CC-BY-4.0")], } meta_dict.update(_basic_publishmeta(dandi_id=dandi_id)) meta_dict.update(additional_meta) # creating PublishedDandisetMeta from the dictionary meta = PublishedDandisetMeta(**meta_dict) # creating and validating datacite objects datacite = to_datacite(meta) Draft6Validator.check_schema(schema) validator = Draft6Validator(schema) validator.validate(datacite["data"]["attributes"]) # checking some datacite fields attr = datacite["data"]["attributes"] for key, el in datacite_checks.items(): el_len, el_flds = el if el_len: # checking length and some fields from the first element assert len(attr[key]) == el_len for k, v in el_flds.items(): assert attr[key][0][k] == v else: if isinstance(el_flds, dict): for k, v in el_flds.items(): assert attr[key][k] == v else: assert attr[key] == el_flds # trying to poste datacite datacite_post(datacite, meta.doi)
def homeloc(): if request.method == "PUT": if request.is_json: schema = TD["properties"]["homeLoc"] valid_input = Draft6Validator(schema).is_valid(request.json) print(valid_input) if valid_input: data = request.get_data() json_data = json.loads(data) Robot.home_pos_x = json_data['x'] Robot.home_pos_y = json_data['y'] Robot.home_pos_z = json_data['z'] return ("", 204) else: abort(400) else: abort(415) else: return_object = { "x": Robot.home_pos_x, "y": Robot.home_pos_y, "z": Robot.home_pos_z } return json.dumps(return_object), {'Content-Type': 'application/json'}
def __init__(self, filenames=None, schema=CfgSchema(), include_sections=None): super(EvkitConfigurations, self).__init__(description='Example: %(prog)s -s') if filenames is None: filenames = self.ROKIX_FILE_LOCATIONS if include_sections is None: include_sections = self.BASE_SECTIONS else: include_sections.extend(self.BASE_SECTIONS) self.schema = schema self.arg_validator = Draft6Validator(schema) self._schema_properties = self.schema['properties'] self.evkit_config = ConfigParser(inline_comment_prefixes=(';', '#'), allow_no_value=True, delimiters=('='), dict_type=dict) self.config_file = self.evkit_config.read(filenames) self._config_options = [] self._sections = self.evkit_config.sections() assert self.evkit_config.getint( 'root', 'version' ) == 2, 'Invalid cfg file version on file %s' % self.config_file # Add base sections to cmd args for section in self._sections: if section in include_sections: self.add_section_args(section)
def setPixel(): if request.is_json: schema = td["actions"]["setPixel"]["input"] valid_input = Draft6Validator(schema).is_valid(request.json) if valid_input: x = 5 y = 5 try: x = int(request.json["x"]) except Exception as e: print(e) try: y = int(request.json["y"]) except Exception as e: print(e) try: bright = float(request.json["brightness"]) scrollphathd.clear() scrollphathd.show() scrollphathd.set_pixel(x, y, bright) scrollphathd.show() return "", 204 except Exception as e: print(e) abort(400) else: abort(400) else: abort(415) # Wrong media type.
def validator(data, raise_params_error): try: if draft_version == 4: Draft4Validator(schema, format_checker=format_checker).validate(data) elif draft_version == 6: Draft6Validator(schema, format_checker=format_checker).validate(data) except jsonschema.exceptions.ValidationError as e: if settings.DEBUG: print(json.dumps(data, ensure_ascii=False, indent=2)) if raise_params_error: if e.context: errs = [ FieldErrorInfo( '{}:{}'.format(format_path_queue(x.absolute_path), format_path_queue(x.schema_path)), x.message) for x in e.context ] raise InvalidParams(errs) else: error_path = format_path_queue(e.absolute_path) if error_path: msg = '{}: {}'.format(error_path, e.message) else: msg = e.message raise InvalidParams(msg) else: raise return data
def is_valid(self, adict, schema, messages=None, draft=4): def trace_error_value(error): if len(error.path) != 0: return (error.path[-1], error.message) return ('keyError', error.message) if draft == 4: self.__errors = dict( trace_error_value(e) for e in sorted(Draft4Validator(schema).iter_errors(adict), key=exceptions.by_relevance())) if draft == 6: self.__errors = dict( trace_error_value(e) for e in sorted(Draft6Validator(schema).iter_errors(adict), key=exceptions.by_relevance())) if draft == 7: self.__errors = dict( trace_error_value(e) for e in sorted(Draft7Validator(schema).iter_errors(adict), key=exceptions.by_relevance())) if len(self.__errors) > 0 and messages: self.__errors = self.remap_error_message(self.__errors, messages) self.__data = adict if len(self.__errors) == 0 else [] return len(self.__errors) == 0
def clearArea(): if request.is_json: schema = td["actions"]["clearRect"]["input"] valid_input = Draft6Validator(schema).is_valid(request.json) if valid_input: x = 0 y = 0 w = 17 h = 6 try: x = int(request.json["x"]) except Exception as e: print(e) try: y = int(request.json["y"]) except Exception as e: print(e) try: w = request.json["width"] except Exception as e: print(e) try: h = request.json["height"] except Exception as e: print(e) scrollphathd.clear_rect(x, y, w, h) scrollphathd.show() return "", 204 else: abort(400) print("wrong input") else: abort(415) # Wrong media type.
def load_and_validate_schema(self): verbose = self.verbose if self.schema: return self.schema if not self.sch_file: try: sch = requests.get(MDFSCHEMA_URL) sch.raise_for_status() self.sch_file = sch.text except Exception as e: if self.verbose: print("Error in fetching mdf-schema.yml: \n{e}".format(e=e)) raise e elif isinstance(self.sch_file, str): try: self.sch_file = open(self.sch_file,"r") except IOError as e: raise e else: pass try: print("Checking schema YAML =====") self.schema = yaml.load(self.sch_file, Loader=self.yloader) except ConstructorError as ce: if verbose: print("YAML error in MDF Schema '{fn}':\n{e}".format(fn=self.sch_file.name,e=ce)) return ce except ParserError as e: if verbose: print("YAML error in MDF Schema '{fn}':\n{e}".format(fn=self.sch_file.name,e=e)) return e except Exception as e: if verbose: print("Exception in loading MDF Schema yaml: {}".format(e)) return e print("Checking as a JSON schema =====") try: d6.check_schema(self.schema) except SchemaError as se: if verbose: print("MDF Schema error: {}".format(se)) raise se except Exception as e: if verbose: print("Exception in checking MDF Schema: {}".format(e)) raise e return self.schema
def _jsonschema_errors(self): from django.conf import settings errors = [] schemas = settings.SIMPLE_JSONSCHEMA for url, schema in schemas.items(): try: Draft6Validator.check_schema(schema) except SchemaError as e: errors.append({ 'url': url, 'error': e, 'schema': json.dumps(schema, indent=4, sort_keys=True) }) return errors
def validate(self): print "Checking schema =====" try: d6.check_schema(self.schema) except SchemaError as se: print se return se if (self.instance): print "Checking instance===" try: validate(instance=self.instance.as_dict(), schema=self.schema) except RefResolutionError as re: print re return re except ValidationError as ve: for e in d6(self.schema).iter_errors(self.instance.as_dict()): print e return ve return None
def validate_json(json: dict, schema: dict) -> bool: """Validate json against schema. :param json: ``dict`` json to be verified. :param schema: ``dict`` json schema draft 6. :returns: ``bool`` json valid to schema. """ try: Draft6Validator.check_schema(schema) except SchemaError as err: log.debug(f'Schema does not conform to json schema draft 6: {err}') return False try: validate(instance=json, schema=schema) except ValidationError as err: log.debug(f'JSON does not conform to schema: {err}') return False return True
def _assert_schema(self, schema, reality): try: if "draft-06" in self.schema['$schema']: validator = Draft6Validator(schema, format_checker=FormatChecker()) else: validator = Draft4Validator(schema, format_checker=FormatChecker()) validator.validate(reality) except ValidationError as e: raise AssertionError(e)
def init_instrument_subscription(instrument_id: str, ws: WebSocket): instrument_data = {"type": "instrument", "id": instrument_id} ws.send(f'sub 1 {json.dumps(instrument_data)}') response = ws.recv() print(response) index = response.split(" ", 1)[0].strip() assert 1 == int(index) code = response.split(" ", 2)[1].strip() assert 'A' == code body = json.loads(response.split(" ", 2)[2]) validator = Draft6Validator(instrument.INSTRUMENT_TOPIC_SCHEMA) validator.validate(body)
def beepwithtime(): if request.is_json: schema = TD["actions"]["beepWithTime"]["input"] valid_input = Draft6Validator(schema).is_valid(request.json) if valid_input: ROS_beepwithtime(request.json) return ("", 204) else: abort(400) else: abort(415)
def test_datacite(dandi_id, schema): """ checking to_datacite for a specific datasets""" # reading metadata taken from exemplary dandisets and saved in json files with (Path(__file__).with_name("data") / "metadata" / f"meta_{dandi_id}.json").open() as f: meta_js = json.load(f) # updating with basic fields required for PublishDandisetMeta meta_js.update( _basic_publishmeta( dandi_id.replace("000", str(random.randrange(100, 999))))) meta = PublishedDandisetMeta(**meta_js) datacite = to_datacite(meta=meta) Draft6Validator.check_schema(schema) validator = Draft6Validator(schema) validator.validate(datacite["data"]["attributes"]) # trying to post datacite datacite_post(datacite, meta.doi)
def turnright(): [x, y, z] = ROS_getlocation() y_new = y - 1 schema = TD["actions"]["turnRight"]["input"]["properties"]["y"] valid_input = Draft6Validator(schema).is_valid(y_new) if valid_input: #ROS ROS_turnright() return ("", 204) else: print("Reached Max Limit") abort(400)
def writeString(): if request.is_json: schema = td["actions"]["writeString"]["input"] valid_input = Draft6Validator(schema).is_valid(request.json) if valid_input: dur = 5 count = 0 try: try: dur = int(request.json["time"]) except Exception as e: print(e) Str = " " + str(request.json["string"]) print(Str) x = int(request.json["x"]) print(x) y = int(request.json["y"]) print(y) bright = float(request.json["brightness"]) scrollphathd.clear() scrollphathd.show() scrollphathd.write_string(Str, x, y, font=None, letter_spacing=1, brightness=bright, monospaced=True, fill_background=False) scrollphathd.flip(x=True, y=True) while count < dur * 10: scrollphathd.show() scrollphathd.scroll(1) time.sleep(0.05) count = count + 1 scrollphathd.clear() scrollphathd.show() return "", 204 except Exception as e: print(e) abort(400) else: abort(400) print("wrong input") else: abort(415) # Wrong media type.
def _assert_schema(self, schema, reality): try: schema_version = self.schema['version'] if schema_version == 'draft04': validator = Draft4Validator(schema, format_checker=FormatChecker()) elif schema_version == 'draft06': validator = Draft6Validator(schema, format_checker=FormatChecker()) else: raise RuntimeError("Unknown JSON Schema version " + "was given:\n%s" % (schema_version)) validator.validate(reality) except ValidationError as e: raise AssertionError(e)
def validate_json(self): config_file_path = self.config_file s = schema.get_schema() errors = None if config_file_path is not None: with open(config_file_path) as json_file: try: instance = json.load(json_file) except: return ['file is not a JSON file'] v = Draft6Validator(s) validation_errors = sorted(v.iter_errors(instance), key=str) errors = self._parse_errors(validation_errors) return errors else: AppLogging.error('No configuration file defined') raise Exception('No configuration file defined')
def gowithspeed(): if request.is_json: schema = TD["actions"]["goWithSpeed"]["input"] valid_input = Draft6Validator(schema).is_valid(request.json) if valid_input: msg = {'x': 0, 'y': 0, 'z': 0} data = request.get_data() json_data = json.loads(data) msg['x'] = json_data['x'] msg['y'] = json_data['y'] msg['z'] = json_data['z'] msg['speed'] = json_data['speed'] ROS_gowithspeed(msg) return ("", 204) else: abort(400) else: abort(415)
def writeChar(): if request.is_json: schema = td["actions"]["writeChar"]["input"] valid_input = Draft6Validator(schema).is_valid(request.json) if valid_input: o_x = 5 o_y = 0 Char = str(request.json["char"]) try: o_x = int(request.json["o_x"]) except Exception as e: print(e) try: o_y = int(request.json["o_y"]) except Exception as e: print(e) bright = float(request.json["brightness"]) scrollphathd.clear() scrollphathd.show() scrollphathd.draw_char(o_x, o_y, Char, font=None, brightness=bright, monospaced=True) scrollphathd.flip(x=True, y=True) scrollphathd.show() time.sleep(5) scrollphathd.clear() scrollphathd.show() return "", 204 else: abort(400) print("wrong input") else: abort(415) # Wrong media type.
def fillArea(): if request.is_json: schema = td["actions"]["fill"]["input"] valid_input = Draft6Validator(schema).is_valid(request.json) if valid_input: x = 0 y = 0 w = 17 h = 7 try: x = int(request.json["x"]) except Exception as e: print(e) try: y = int(request.json["y"]) except Exception as e: print(e) try: w = request.json["width"] except Exception as e: print(e) bright = float(request.json["brightness"]) try: h = request.json["height"] except Exception as e: print(e) scrollphathd.clear() scrollphathd.show() scrollphathd.fill(brightness=bright, x=x, y=y, width=w, height=h) scrollphathd.show() return "", 204 else: abort(400) print("wrong input") else: abort(415) # Wrong media type.
def test_api_field_schema(self): with open('schema/api-field.schema.json') as f: schema = json.load(f) Draft6Validator.check_schema(schema)
def test_api_class_schema(self): with open('schema/api-class.schema.json') as f: schema = json.load(f) Draft6Validator.check_schema(schema)
LOG = logging.getLogger(__name__) DEFAULT_ENDPOINT = "http://127.0.0.1:3001" DEFAULT_FUNCTION = "TestEntrypoint" DEFAULT_REGION = "us-east-1" OVERRIDES_VALIDATOR = Draft6Validator({ "properties": { "CREATE": { "type": "object" }, "UPDATE": { "type": "object" } }, "anyOf": [{ "required": ["CREATE"] }, { "required": ["UPDATE"] }], "additionalProperties": False, }) def empty_override(): return {"CREATE": {}} @contextmanager
DEFAULT_ENDPOINT = "http://127.0.0.1:3001" DEFAULT_FUNCTION = "TypeFunction" DEFAULT_REGION = "us-east-1" DEFAULT_TIMEOUT = "30" INPUTS = "inputs" RESOURCE_OVERRIDES_VALIDATOR = Draft6Validator({ "properties": { "CREATE": { "type": "object" }, "UPDATE": { "type": "object" } }, "anyOf": [{ "required": ["CREATE"] }, { "required": ["UPDATE"] }], "additionalProperties": False, }) HOOK_OVERRIDES_VALIDATOR = Draft6Validator({ "properties": { "CREATE_PRE_PROVISION": { "type": "object" }, "UPDATE_PRE_PROVISION": {
} SETTINGS_VALIDATOR = Draft6Validator({ "properties": { "language": { "type": "string" }, "typeName": { "type": "string", "pattern": TYPE_NAME_REGEX }, "runtime": { "type": "string", "enum": list(LAMBDA_RUNTIMES) }, "entrypoint": { "type": ["string", "null"] }, "testEntrypoint": { "type": ["string", "null"] }, "settings": { "type": "object" }, }, "required": ["language", "typeName", "runtime", "entrypoint"], "additionalProperties": False, }) BASIC_TYPE_MAPPINGS = {
def test_api_service_schema(self): with open('schema/api-service.schema.json') as f: schema = json.load(f) Draft6Validator.check_schema(schema)