def __init__(self, train_set): if type(train_set) is not str: raise TypeError( 'Train set has to be path to .json file, ' 'not {}'.format(train_set) ) if not train_set.endswith('.json') or not isfile(train_set): raise ValueError( 'Train set has to be path to .json file, ' 'not {}'.format(train_set) ) self.schema = { 'type': 'array', 'items': { 'type': 'string' } } schema = { 'type': 'object', 'required': ['data'], 'properties': { 'data': {'type': 'array'} } } with open(train_set, 'r', encoding='utf-8') as train_file: train_data = load(train_file) if not Draft3Validator(schema).is_valid(train_data): exception_data = ['Validation error in: {}'.format(train_data)] for error in sorted( Draft3Validator(self.schema).iter_errors(train_data), key=str): exception_data.append(error.message) raise ValueError(exception_data) self.train_set = train_set
def validate(self, schema=None): """ Validate that we have a valid object. On error, this will raise a `ScrapeValueError` This also expects that the schemas assume that omitting required in the schema asserts the field is optional, not required. This is due to upstream schemas being in JSON Schema v3, and not validictory's modified syntax. ^ TODO: FIXME """ if schema is None: schema = self._schema validator = Draft3Validator( schema, types={'datetime': (datetime.date, datetime.datetime)}, format_checker=FormatChecker()) errors = [ str(error) for error in validator.iter_errors(self.as_dict()) ] if errors: raise ScrapeValueError('validation of {} {} failed: {}'.format( self.__class__.__name__, self._id, '\n\t' + '\n\t'.join(errors)))
def updateALL_product(uuid): for header in request.headers: print(header) data = request.get_json() data = json.dumps(data) data = json.loads(data) data2 = conn.get(uuid) data2 = json.loads(data2) print(data2) print() print(data) print() try: jwt.decode(data2["token"], 'secret', leeway=10, algorithms=['HS256'], verify=True) except jwt.ExpiredSignatureError: return 'Signature expired. Please log in again.' except jwt.InvalidTokenError: return 'Invalid token. Please log in again.' # except Exception as e: # print("Token Expired") # return jsonify({'Token Expired'}) encoded = 'Bearer ' + data2["token"] if (request.headers['Authorization'] != encoded): return "Authorization Error" # print(data) # print(type(data)) # Loading Schema with open(r'''schema.txt''') as json_schema: schema = json.load(json_schema) #validating data against schema myJSONValidation = Draft3Validator(schema).is_valid(data) if (myJSONValidation == True): uniqueId = data2['uuid'] uniqueId = str(uniqueId) data['uuid'] = uniqueId only_token = data2["token"] only_token = str(only_token) data['token'] = only_token data3 = json.dumps(data) if request.method == 'PUT': old_etag = request.headers.get('If-None-Match', '') # Generate hash # data_n = json.dumps(data2) new_etag = md5(data3.encode('utf-8')).hexdigest() if new_etag == old_etag: # Resource has not changed return '', 304 else: conn.set(uuid, data3) return jsonify({'product': data}), 200, {'ETag': new_etag} return 'test'
def test_rudimentary_ref_support(self): schema = { "type": "object", "properties": { "nn": { "$ref": "http://www.example.com/schemas#/neatoNumber" }, "ss": { "$ref": "http://www.example.com/schemas#/superString" } } } schema_store = { "http://www.example.com/schemas": { "neatoNumber": { "type": "number" }, "superString": { "type": "string" } } } validator = Draft3Validator(schema, schema_store=schema_store) validator.validate({"nn": 1}) validator.validate({"ss": "hello"}) with self.assertRaises(ValidationError): validator.validate({"nn": "hello"}) with self.assertRaises(ValidationError): validator.validate({"ss": 1})
def verify(df, table_name): """ Takes a Pandas DataFrame and a Microsoft SQL Server table name to compare the number of rows in each. This function only works if the to_sql function replaces the table (if it appends the number of rows will obviously be off) Parameters: csv_df (Pandas Dataframe): dataframe of the original data from CSV table_name (string): name of table that you want to compare against Returns: nothing """ print("Verifying schema...") df = df.where(pd.notnull(df), None) v = Draft3Validator(schemas[table_name]) errors = set() for row in df.to_dict(orient='records'): for error in sorted(v.iter_errors(row), key=str): errors.add(str(error)) if errors: print('Validation errors when running schema check on {}'.format( table_name)) with open("/tmp/{}_validation_errors.txt".format(table_name), 'w+') as fp: for error in errors: fp.write("{}\n\n\n".format(error)) return False return True
def validate_data(path_schema, path_event, path_log): schemas = [] with os.scandir(path_schema) as entries: for entry in entries: with open(os.path.join(path_schema, entry.name)) as file: schema = json.load(file) schemas.append(dict(schema_file=entry.name, schema=schema)) log = [] for schema in schemas: with os.scandir(path_event) as entries: for entry in entries: with open(os.path.join(path_event, entry.name)) as f: data = json.load(f) try: Draft3Validator(schema['schema']).validate(data) except jsonschema.exceptions.ValidationError as err: log.append(dict(file=entry.name, schema_file=schema['schema_file'], err=err.message)) # Вывод для README print('Файл данных | Файл схемы | Ошибки') print('----------- | ---------- | ------') for line in log: print(f"{line['file']} | {line['schema_file']} | {line['err']}") # Вывод в файл with open(path_log, 'w+') as file: for line in log: file.write(str(line) + '\n')
def validate_input(input, schema): try: validation_errors = Draft3Validator(schema).validate(input) return True except Exception as e: print str(e) return False
def post(self): validator = Draft3Validator(json.loads(self.request.body)) any_error = False for x in validator.iter_errors({}): any_error = True break self.write({"status": "WORKING", "errors": any_error})
def test_it_delegates_to_a_ref_resolver(self): resolver = mock.Mock() resolver.resolve.return_value = {"type": "integer"} schema = {"$ref": mock.Mock()} with self.assertRaises(ValidationError): Draft3Validator(schema, resolver=resolver).validate(None) resolver.resolve.assert_called_once_with(schema, schema["$ref"])
def is_rule_valid(rule): context = tracer.get_context(request_id=str(uuid4()), log_level="INFO") context.start_span(component=__name__) try: return Draft3Validator(rules_schema).is_valid(rule) # TODO raise specific exception except Exception as e: context.log(message=str(e), obj={"tb": traceback.format_exc()}) return False finally: context.end_span()
def validate_config(config): filename = get_config_path(config) try: to_validate = json.load(open(filename)) except ValueError as e: print('{}: {}'.format(filename, e)) return validator = Draft3Validator(schema) for error in sorted(validator.iter_errors(to_validate), key=str): print('{}: {}'.format(filename, error))
def verifySchema(schema, instance): try: validator = Draft3Validator(schema) validator.validate(instance) except ValidationError as e: print 'Data did not comply with jsonschema. Schema: "' + str(schema) + '"' + \ ' Response: "' + str(instance) + '"' raise e except (SchemaError, UnknownType, TypeError) as e: print 'Error in the jsonschema. Schema: "' + str(schema) + '"' raise e
def hello_world(): text = input("Json File Name:") # form1.json form2.json inp = open(text, "r") data = json.loads(inp.read()) instance = data v = Draft3Validator(schema) errors = sorted(v.iter_errors(instance), key=lambda e: e.path) for error in errors: print(error.message) createjavascript(data) intiliazedbfile(data) createhtmlfile(data) return print()
def main(): usage = 'Usage: %prog [ --all --cont ]' parser = optparse.OptionParser(usage=usage) parser.add_option('-f', '--filepath', action='store', default=None, help='Path to files, e.g. paraguay/sample') parser.add_option('-v', '--version', action='store', default='1.1.0', help='Version, e.g. 1.1.0') parser.add_option('-V', '--verbose', action='store_true', default=False, help='Print verbose output') parser.add_option( '-t', '--type', action='store', default='release', help='File type: release-package, record-package or release') (options, args) = parser.parse_args() if not options.filepath: parser.error('You must supply a filepath, using the -f argument') schema = get_schema('%s-schema.json' % options.type, options.version) if options.type == 'record-package' and options.version == '1.1.0': # Fix v1.1 schema error - wrong item is required. schema['required'].remove('releases') schema['required'].append('records') count = 0 files = glob.glob('%s/*.json' % options.filepath) for filename in files: count += 1 if not count % 1000: print('Validating file %s of %s' % (count, len(files))) if not filename.endswith('.json'): print('Skipping non-JSON file %s' % filename) continue with open(filename, 'r') as file: if options.verbose: print('\nValidating %s' % filename) try: data = json.load(file) except Exception as e: print('Problem loading', filename) print(e) continue v = Draft3Validator(schema) errors = sorted(v.iter_errors(data), key=str) data['validationErrors'] = '' for error in errors: location = '/'.join(error.absolute_schema_path) message = "%s: %s\n" % (location, error.message) data['validationErrors'] += message if options.verbose: print(message) with open(filename, 'w') as writefile: writefile.write(json.dumps(data, indent=2))
def validator(self, data, schema): messages = [] is_valid = True validator = Draft3Validator(schema) for i in data: try: validator.validate({str(i): data[i]}) except Exception as e: print(e.message) messages.append('ERROR: ' + e.message) if messages: is_valid = False return (is_valid, messages) return is_valid
def validate_json(event, schema): """Function to validate json format Arguments: event {str} -- Whole message content schema {dict} -- The json schema to use for validation """ logger.debug("Validating message {}".format(event)) try: Draft3Validator(json.loads(schema)).validate(event) except ValidationError: logger.exception("Error in JSON data") raise
def importConfigJson(fullPath, pathToSchema=None): """ Reads JSON formatted master configuration file :param fullPath: full path of the JSON file to load :type fullPath: str :raises: jsonschema.exceptions.ValidationError :returns: dictionary of master configuration information """ configDict = {} file = open(fullPath, 'r') configDict = json.loads(file.read()) if (pathToSchema == None): validator = Draft3Validator(appNetworkConfigSchema.schema) else: schema = separatePathAndModule(pathToSchema) validator = Draft3Validator(schema) # this will raise jsonschema.exceptions.ValidationError exception validator.validate(configDict) file.close() return configDict
def validate(self, json, schema): """Validates catalog entry via JSON Schema. The expected_author param will prevent entry overrides from others than the original author. .. seealso:: http://json-schema.org/ .. seealso:: http://tools.ietf.org/html/draft-zyp-json-schema-03 .. seealso:: https://github.com/json-schema/json-schema """ errors = [] validator = Draft3Validator(schema) for e in validator.iter_errors(json): e.path.reverse() errors.append(e.path) return errors
def validate_params(params, instance, action): root = cherrypy.request.app.root if hasattr(root, 'api_schema'): api_schema = root.api_schema else: return operation = model_fn(instance, action) validator = Draft3Validator(api_schema, format_checker=FormatChecker()) request = {operation: params} try: validator.validate(request) except ValidationError, e: raise InvalidParameter(e.schema['error'], {'value': str(e.instance)})
async def post(self): """Создание объявления. Для валидации передавемых данных используем jsonschema""" data = await self.request.json() v = Draft3Validator(schema, format_checker=FormatChecker()) if v.is_valid(data): async with self.request.app['db'].acquire() as conn: cursor = await conn.execute(db.posts.insert().values(**data)) post_id = await cursor.fetchone() return web.HTTPCreated(body=json.dumps({'id': post_id[0]}), content_type='application/json') else: response_data = { 'errors': dict((err.path.pop(), err.message) for err in v.iter_errors(data)) } return web.HTTPBadRequest(body=json.dumps(response_data), content_type='application/json')
def validate_json(json_data, f): """REF: https://json-schema.org/ """ # Describe what kind of json you expect. execute_api_schema = get_schema(json_data['SchemaName']) errCount = 0 v = Draft3Validator(execute_api_schema) errors = v.iter_errors(json_data) error_text = '' for error in sorted(errors, key=str): #print("Line {} --- ".format(get_error_line(error, json_data)) + error.message) #error_text += "Line {}: ".format(get_error_line(error, json_data)) + error.message + "\n" error_text += ''.join([str(elem) + " " for elem in error.path]) + ": " error_text += error.message + "\n" errCount += 1 if errCount > 0: message = "Given JSON data is InValid" return False, message, error_text else: message = "Given JSON data is Valid" return True, message, error_text
def validate_received_responses(schema_loc, response_loc, array=False): # assuming 10to1 will do tests to validate api output against swagger defined json schema, # TODO it turns out they don't, so I'll do it myself for response in glob(response_loc): test_name = splitext(basename(response))[0] with open(response, "r", "utf-8") as f: v = Draft3Validator(definitions[schema_loc]) full_doc = load(f) if array: doc = full_doc[0] if len(full_doc) > 0 else {} else: doc = full_doc validation_errors = sorted(v.iter_errors(doc), key=str) filtered_errors = [] for error in validation_errors: if "None is not of type" not in error.message: filtered_errors.append(str(error)) if len(filtered_errors) > 0: with open("validation_errors/" + test_name + ".txt", "w", "utf-8") as f: f.write( "\n\n--------------------------------------\n\n".join( [error for error in filtered_errors]))
def test_property_get(self): #with self.assertRaises(ValidationError): # validate({"node_id":"123", "node_address": "tcp://127.0.0.1:10/1"}, SCHEMA_UPDATE_PREDECESSOR) #validate({"node_id":213, "node_address": "tcp://127.0.0.1:10/1"}, SCHEMA_UPDATE_PREDECESSOR) instance = { 'successor': { 'node_id': 78, 'node_address': 'tcp://127.0.0.1:1337/0' }, 'predecessor': { 'node_id': 116, 'node_address': 'tcp://127.0.0.1:1338/0' }, 'node_id': 78, 'node_address': 'tcp://127.0.0.1:1337/0' } schema = SCHEMA_OUTGOING_RPC["rpc_get_node_info"] v = Draft3Validator(schema) errors = sorted(v.iter_errors(instance), key=lambda e: e.path) for error in errors: print(error.message)
def upload_file(): for header in request.headers: print(header) data = request.get_json() with open(r'''usecase_schema.json''') as json_schema: schema = json.load(json_schema) myJSONValidation = Draft3Validator(schema).is_valid(data) if (myJSONValidation == True): for key, value in data.items(): if type(value) is dict: dUId = uuid.uuid4() dUId = str(dUId) value['uuid'] = dUId value2 = json.dumps(value) conn.set(dUId, value2) print('Dict', key) print(conn.get(dUId)) print() #conn.set(key, dUId) data[key] = dUId # print(key) # print(dUId) elif type(value) is list: print("Inside list", key) for i, x in enumerate(value): if (type(x) is dict): sUId = uuid.uuid4() sUId = str(sUId) x['uuid'] = sUId x2 = json.dumps(x) conn.set(sUId, x2) print('Dict', i) print(conn.get(sUId)) print() value[i] = sUId else: conn.set(key, value) # # # uniqueId = uuid.uuid4() encoded = jwt.encode( { 'exp': datetime.datetime.utcnow() + datetime.timedelta(seconds=36000) }, 'secret') #encoded=str(encoded) token = encoded.decode('utf-8') print(type(token)) response = make_response(jsonify(data), 200) response.headers["ETag"] = str( hashlib.sha256("data".encode('utf-8')).hexdigest()) response.headers["Cache-Control"] = "private, max-age=300" print('etag', response.headers["ETag"]) #return response uniqueId = str(uniqueId) data['token'] = token data['uuid'] = uniqueId #data['etag'] = response.headers["ETag"] data2 = json.dumps(data) conn.set(uniqueId, data2) print() print(uniqueId, ":") print('token', '\n', token) print(conn.get(uniqueId)) return jsonify({"product": data2}) else: return "JSON was not validated by the schema"
def updateProduct(myuuid): data = request.get_json() data = json.dumps(data) data = json.loads(data) data2 = conn.get(myuuid) data2 = json.loads(data2) print(data2, type(data2)) try: jwt.decode(data2["token"], 'secret', leeway=10, algorithms=['HS256'], verify=True) except jwt.ExpiredSignatureError: return 'Signature expired. Please log in again.' except jwt.InvalidTokenError: return 'Invalid token. Please log in again.' # except Exception as e: # print("Token Expired") # return jsonify({'Token Expired'}) encoded = 'Bearer ' + data2["token"] print("Encoded ", encoded) print(type(encoded)) print(request.headers["Authorization"]) print(type(request.headers["Authorization"])) if (request.headers['Authorization'] != encoded): return "Authorization Error" print(data) print(type(data)) #Loading Schema with open(r'''usecase_schema.json''') as json_schema: schema = json.load(json_schema) #validating data against schema myJSONValidation = Draft3Validator(schema).is_valid(data) if (myJSONValidation == True): for key, value in data2.items(): #print(value) if conn.exists(value) and str(key) not in ['uuid']: #print("Value exists in redis") redisValue = conn.get(value) redisValue = json.loads(redisValue) data2[key] = redisValue elif type(value) is list: #print("Inside list",key) for i, x in enumerate(value): #print(x) if (conn.exists(x) and key is not 'uuid'): #print("Dict exists in redis") redisValue = conn.get(x) redisValue = str(redisValue) value[i] = redisValue else: conn.set(key, value) # # # #conn.hmset(uuid, data) #print() #print(uuid , ":") #print(conn.hgetall(uuid)) data2 = json.dumps(data2) data2 = json.loads(data2) data = json.dumps(data) data = json.loads(data) print() print("Data after get", data2) print() keyCount = 0 for (keyData, valueData), (keyRedis, valueRedis) in zip(data.items(), data2.items()): if type(valueData) is dict and keyData in data2: dUId = valueRedis['uuid'] dUId = str(dUId) valueData['uuid'] = dUId value2 = json.dumps(valueData) conn.set(dUId, value2) print('Dict', keyData) print(conn.get(dUId)) print() #conn.set(key, dUId) data[keyData] = dUId elif type(valueData) is dict and keyData not in data: dUId = uuid.uuid4() dUId = str(dUId) valueData['uuid'] = dUId value2 = json.dumps(valueData) conn.set(dUId, value2) print('Dict', keyData) print(conn.get(dUId)) print() #conn.set(key, dUId) data[keyData] = dUId elif type(valueData) is list and type(valueRedis) is list: for (iData, xData), (iRedis, xRedis) in zip(enumerate(valueData), enumerate(valueRedis)): #print(xRedis) xRedis = ast.literal_eval(xRedis) if (type(xData) is dict and len(xRedis) - len(xData) == 1): print("Same Data") sUId = xRedis['uuid'] sUId = str(sUId) xData['uuid'] = sUId x2 = json.dumps(xData) conn.set(sUId, x2) print('Dict', iData) print(conn.get(sUId)) print() valueData[iData] = sUId keyCount = keyCount + 1 else: print("Different Data") sUId = uuid.uuid4() sUId = str(sUId) valueData[keyCount]['uuid'] = sUId x2 = json.dumps(xData) conn.set(sUId, x2) print('Dict', iData) print(conn.get(sUId)) print() valueData[keyCount] = sUId uniqueId = data2['uuid'] uniqueId = str(uniqueId) data['uuid'] = uniqueId only_token = data2["token"] only_token = str(only_token) data['token'] = only_token data3 = json.dumps(data) if request.method == 'PUT': old_etag = request.headers.get('If-None-Match', '') # Generate hash #data = json.dumps(data3) new_etag = md5(data3.encode('utf-8')).hexdigest() if new_etag == old_etag: # Resource has not changed return '', 304 else: conn.set(uniqueId, data3) print() #print(uniqueId , ":") print("Unique ID", uniqueId) print("Data after update:") print(data3) # Resource has changed, send new ETag value return jsonify({'product': data3}), 200, {'ETag': new_etag} # return jsonify(data) else: return "JSON was not validated by the schema"
if file_schema.find( ".schema") > 0: # если очередной файл из кучи файлов *.schema with open(file_schema) as f: cur_schema = json.load( f) #открываем его и сохроняем как текущую схему json for file_json in file_list: if file_json.find(".json") > 0: #ищем *.json, снова в той куче with open(file_json) as f: cur_json = json.load( f ) #открываем для дальнейшего прогона на валидность по текущей схеме instance = cur_json v = Draft3Validator(cur_schema) errors = sorted( v.iter_errors(instance), key=lambda e: e.path ) #сохраняем ошибки валидности, если они есть for error in errors: log_file = open('README.md', 'a') log_file.write( file_json[file_json.rfind("\\"):len(file_json)] + " ---> ") log_file.write( file_schema[file_schema. rfind("\\"):len(file_schema)] + ":\n\n") log_file.write("\tError :" + error.message + "\n\n\n")
def validate(self): Draft3Validator(schema).validate(self.fields)
def validate(blob): validator = Draft3Validator(schema) errors = [] for error in sorted(validator.iter_errors(blob), key=str): errors.append('{}'.format(error.message)) return (not bool(errors)), errors
from jsonschema import Draft3Validator, RefResolver from urllib.parse import urljoin from urllib.request import pathname2url def path2url(path): return urljoin('file:', pathname2url(path)) print('-- validate json file') jsonFileName = sys.argv[1] schemaFileName = sys.argv[2] try: with open(schemaFileName) as schemaFile: with open(jsonFileName) as jsonFile: schema = json.load(schemaFile) uri = path2url('%s/schema/' % path.abspath(path.dirname(schemaFileName))) resolver = RefResolver(uri, referrer=schema) instance = json.load(jsonFile) Draft3Validator(schema, resolver=resolver).validate(instance) except Exception as e: print('validation error: ' + jsonFileName + ' ' + schemaFileName + ' (' + str(e) + ')') sys.exit(1) sys.exit(0)
def soft_validate_jsonschema(response, schema): errors = [] validator = Draft3Validator(schema) for error in validator.iter_errors(response): errors.append(error) return errors