def collate(self): """Combine the output from all runs associated with the current app. Uses the collation element held in `self._active_app_collater`. Returns ------- """ app_id = self._active_app['id'] decoder = self._active_app_decoder processed_run_IDs = [] processed_run_results = [] for run_id, run_info in self.campaign_db.runs( status=constants.Status.ENCODED, app_id=app_id): # use decoder to check if run has completed (in general application-specific) if decoder.sim_complete(run_info=run_info): # get the output of the simulation from the decoder run_data = decoder.parse_sim_output(run_info=run_info) if self._active_app['decoderspec'] is not None: v = Validator() v.schema = self._active_app['decoderspec'] if not v.validate(run_data): raise RuntimeError( "the output of he decoder failed to validate: {}". format(run_data)) processed_run_IDs.append(run_id) processed_run_results.append(run_data) # update run statuses to "collated" # self.campaign_db.set_run_statuses(processed_run_IDs, constants.Status.COLLATED) # add the results to the database self.campaign_db.store_results( self._active_app_name, zip(processed_run_IDs, processed_run_results)) return len(processed_run_IDs)
def get_data(**args): with request.urlopen('https://api.covid19india.org/data.json') as response: source = response.read() data = json.loads(source) statewise_dict = data['statewise'] v = Validator() v.schema = {'active': {'required': True, 'type': 'string'}, 'confirmed': {'required': True, 'type': 'string'}, 'deaths': {'required': True, 'type': 'string'}, 'recovered': {'required': True, 'type': 'string'}, 'deltaconfirmed': {'required': True, 'type': 'string'}, 'deltadeaths': {'required': True, 'type': 'string'}, 'deltarecovered': {'required': True, 'type': 'string'}, 'lastupdatedtime': {'required': True, 'type': 'string'}, 'migratedother': {'required': True, 'type': 'string'}, 'statecode': {'required': True, 'type': 'string'}, 'statenotes': {'required': True, 'type': 'string'}, 'state': {'required': True, 'type': 'string'}, } for item in statewise_dict: if not v.validate(item): print(v.errors) raise ValueError('API Data Not Valid') print('API Data is valid') df = pd.DataFrame(statewise_dict, columns=['active', 'confirmed', 'deaths', 'recovered', 'state']) date = [] for i in range(len(df.index)): date.append(today) df['date'] = date sdf = spark.createDataFrame(df) sdf.write.mode("overwrite").csv("hdfs://localhost:9000/user/nineleaps/covid_data.csv") print("Covid Statewise Data CSV is uploaded to HDFS")
def validate_disciplinas_path(data): v = Validator() v.schema = { 'disc_ids': {'required': True, 'type': ['string', 'list']} } if not v.validate(data): return jsonify(error=v.errors), 400
def add_posts_like_list(): params = { "type": f"{media_types[0]}", "owner_id": f"{owner_id}", "item_id": f"{item_id[0]}", "filter": "likes", "friends_only": "1", "offset": "100", "count": "100", "access_token": f"{acces_token}", "v": "5.103" } response = Client.post(add_likes, params) check_status_code_200(response) print(response.json()) v = Validator() v.schema = { "response": { "type": "dict", "schema": { "likes": { "type": "integer" } } } } if v.validate(response.json()): print("valid data") else: print('invalid data') print(v.errors)
def _response_invalid_body(body, schema): v = Validator() v.schema = schema v.validate(body) if v.validate(body): massage = "invalid data" assert massage != "valid data" print(v.errors)
def validate(data): v = Validator() v.schema = { 'nome': {'required': True, 'type': 'string'}, 'conteudo': {'required': True, 'type': 'string'} } if not v.validate(data): return jsonify(error=v.errors), 400
def validator(self, schema=None, data=None): if schema and data: v = Validator() v.schema = schema v.validate(data) return data, v.errors else: return data, True
def assertResponseStructure( self, http_code, method=None, headers=None, payload={}, content="application/json", parameters_url={}, ): contentValidator = Validator() try: contentValidator.schema = self.structure.getContentValidator(content) except Exception as ex: self.fail( "Not defined content for %s - in file %s\nError: %s" % (content, self.structure.name_file, ex) ) http_verb = self.http_verb(method) r = getattr(requests, http_verb)( url=self.url(parameters_url), headers=headers, json=payload ) self.assertEqual(http_code, r.status_code) responseValidator = Validator() try: responseValidator.schema = self.structure.getResponseValidator( http_verb, http_code ) except Exception as ex: self.fail( "Not defined the schema for %s - %d in file %s\nError: %s" % (http_verb, http_code, self.structure.name_file, ex) ) j = r.json() if (responseValidator.schema and not responseValidator.validate(j)) or ( contentValidator.schema and not contentValidator.validate(payload) ): self.fail( "content_errors:%s\nresponse_errors:%s" % (contentValidator.errors, responseValidator.errors,) ) return r
def _response_body_check(body, schema): v = Validator() v.schema = schema v.validate(body) if v.validate(body): a = "valid data" assert a == "valid data" else: b = "invalid data" assert b == "valid data" print(v.errors)
def simulate(): v = Validator() v.schema = payload_input_schema payload = request.form.get('payload', None) if not(payload): raise payLoadIsMissing('There is no payload', status_code=500) try: payload = json.loads(payload) except: raise malformedJson("Payload present but malformed: {}".format(payload)) res = next_payment.next(payload) result = dict(success=True, payload=res) result = json.dumps(result) return result
def validate_all(data): v = Validator() v.schema = { 'nome': { 'required': True, 'type': 'string' }, 'pratica': { 'required': True, 'type': 'integer', 'allowed': [0, 20, 40, 80] }, 'teoria': { 'required': True, 'type': 'integer', 'allowed': [0, 20, 40, 80] }, 'semestre': { 'required': True, 'type': 'integer' }, 'ementa': { 'required': True, 'type': 'dict', 'schema': { 'descricao': { 'type': 'string' }, 'conteudo': { 'type': ['string', 'list'] }, 'competencias': { 'type': ['string', 'list'] }, 'objetivos': { 'type': ['string', 'list'] }, } }, 'basica': { 'type': ['string', 'list'] }, 'complementar': { 'type': ['string', 'list'] } } if not v.validate(data): return jsonify(error=v.errors), 400
def simulate(): v = Validator() v.schema = payload_input_schema payload = request.form.get('payload', None) #LOG.console(payload) if not(payload): raise payLoadIsMissing('There is no payload', status_code=500) try: payload = json.loads(payload) except: raise malformedJson("Payload present but malformed: {}".format(payload)) if v(payload): res = project_diagram.diagram(payload) res = dict(success=True,payload=res) return json.dumps(res) else: raise payloadNotMatchingSchema("Payload didn't match schema ({}\n{})".format(payload_input_schema, v.errors))
def validate_user_create(req, res, resource=None, params=None): schema = { 'username': FIELDS['username'], 'email': FIELDS['email'], 'password': FIELDS['password'], 'role': FIELDS['role'], 'phone': FIELDS['phone'], 'is_active': FIELDS['is_active'] } v = Validator(schema) v.schema = {} v.allow_unknown = True try: if not v.validate(req.context['data']): raise InvalidParameterError(v.errors) except ValidationError: raise InvalidParameterError('Invalid Request %s' % req.context)
def get(self): v=Validator() v.schema=schema data = self.parser.parse_args() data = ast.literal_eval(data["inputs"]) print(data) if not v.validate(data,schema): return {"description":v.errors},400 else: default_input = { "applianceModelFull": "SG6060 (58x10TB FIPS)", "storageNeededInTb": 5000, "avgObjectSize": 4, "smallObjectIngestRateObjps": 1800, "largeObjectIngestThrouhputInMbps": 900, "hwLevelDataProtection": "DDP8", "iLMRuleApplied": "2-site: 2 replicas" } sizer = Sizer(default_input) ret = sizer.size() return ret
def simulate(indicator): v = Validator() v.schema = payload_input_schema payload = request.form.get('payload', None) LOG.console(payload) if not(payload): raise payLoadIsMissing('There is no payload', status_code=500) try: payload = json.loads(payload) except: raise malformedJson("Payload present but malformed: {}".format(payload)) if v(payload): if indicator == 'interest': data = indicators.interest_rate(payload['country']) elif indicator == 'inflation': data = indicators.inflation(payload['country']) else: raise indicatorNotPresent("Didn't find indicator '{}'".format(indicator)) res = dict(success=True,payload=data) return json.dumps(res) else: raise payloadNotMatchingSchema("Payload didn't match schema ({}\n{})".format(payload_input_schema, v.errors))
with open('docker-compose.yml', 'r') as file: data = yaml.load(file, Loader=yaml.FullLoader) files = [f for f in os.listdir('.') if os.path.isfile(f)] for f in files: with open(f) as file2: if "test_" in file2.name: strTestFileName = file2.name with open(strTestFileName, 'r') as schemafile: my_dictionary = yaml.load(schemafile, Loader=yaml.FullLoader) v = Validator() v.schema = my_dictionary if v.validate(data): print('+ ' + strTestFileName + ' PASSED ' + u'\u2713') else: print('+ ' + strTestFileName + ' FAILED ' + u'\u2717') print( '\n *********************************************************' ) print(" Error Details: " + json.dumps(v.errors)) print( ' *********************************************************\n' ) print()
print(json.dumps(data, indent=2)) ### SCHEMA - CHECK ONLY LISTED DATA, IGNORE UNKNOWN ####### v = Validator({}, allow_unknown=True) v.schema = { 'vlan_id': { 'required': True, 'type': 'string' }, 'peer_address': { 'required': True, "type": "string", "regex": "^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$" }, 'rd': { 'required': True, "type": "string", "regex": "^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+\:[0-9]+$" }, 'circuit_id': { 'required': True, "type": "string", "regex": "^[A-Z]+[0-9]+$" } } #v.allow_unknown = {'type': 'string'} ### v.validate({'an_unknown_field': 'john'})
from cerberus import Validator from datetime import datetime def to_date(s): return datetime.strptime(s, "%Y-%m-%d") v = Validator() v.schema = {"start_date": {"type": "datetime", "coerce": to_date}} if v.validate({"start_date": "2019-12-11"}): print("valid data") else: print("invalid data") print(v.errors) if v.validate({"start_date": "2019/12/11"}): print("valid data") else: print("invalid data") print(v.errors)
#!/usr/bin/env python3 from cerberus import Validator import yaml v = Validator() v.schema = {'cities': {'type': 'list', 'schema': {'type': 'string'}}} with open('./cities.yaml') as f: data = yaml.load(f, Loader=yaml.FullLoader) print(data) if v.validate({'cities': data['cities']}): print('valid data') else: print('invalid data') print(v.errors)
#!/usr/bin/env python3 from cerberus import Validator from datetime import datetime def to_date(s): return datetime.strptime(s, '%Y-%m-%d') v = Validator() v.schema = {'start_date': {'type': 'datetime', 'coerce': to_date}} if v.validate({'start_date': '2019-12-11'}): print('valid data') else: print('invalid data') print(v.errors) if v.validate({'start_date': '2019/12/11'}): print('valid data') else: print('invalid data') print(v.errors)
def validate_input(schema, input_file): v = Validator() v.schema = schema if not v.validate(input_file): raise Exception("Error in input file: ", v.errors) return v.document
from cerberus import Validator v = Validator() v.schema = { "name": { "required": True, "type": "string" }, "sex": { "required": True, "type": "string", "regex": "[MF]" }, } v.allow_unknown = True data = {"name": "Austin", "sex": "M", "age": 23} if v.validate(data): print("valid data") else: print("invalid data") print(v.errors) print(v.document)
#!/usr/bin/env python3 from cerberus import Validator v = Validator() v.schema = { 'name': { 'required': True, 'type': 'string' }, 'age': { 'type': 'integer' } } if v.validate({'age': 34}): print('valid data') else: print('invalid data') print(v.errors)
#!/usr/bin/python from cerberus import Validator v = Validator() v.schema = { 'name': { 'type': 'string', 'minlength': 2 }, 'age': { 'type': 'integer', 'min': 18, 'max': 65 } } if v.validate({'name': 'J', 'age': 4}): print('valid data') else: print('invalid data') print(v.errors)
def __init__(self): print("Test Cerberus!!") #Basic test schema = {'name': {'type': 'string'}} v = Validator(schema) response = {'name': 'Shalini Arora'} print("Basic Test: ", v.validate(response)) #Adding more constraints to a field schema = {'name': {'type': 'string', 'maxlength': 10}} print("\nValidating values: ", v.validate({'name': 'ShaliniArora12344'}, schema)) print(v.errors) #Testing extras print("\nTesting Extras: ", v.validate({ 'name': 'Shalini', 'age': 30 }), schema) print(v.errors) # Testing extras + error print("\nTesting value errors and extras: ", v.validate({ 'name': 'Shalini1234567890', 'age': 30 }), schema) print(v.errors) #Allowing the unknown v.allow_unknown = True print("\nAllowing the unknowns: ", v.validate({ 'name': 'Shalini', 'age': 30 }), schema) # allowing the unknown with specific type v.allow_unknown = {'type': 'integer'} print("\nAllowing the unknowns with specific type: ", v.validate({ 'name': 'Shalini', 'age': 30 }), schema) #checking the allowed v.schema = { 'role': { 'type': 'list', 'allowed': ['developer', 'tester', 'engineer'] } } print('\nchecking tha \'allowed\' thing: ', v.validate({'role': ['abc']})) print(v.errors) #Dependenciess schema1 = { 'profession': { 'type': 'string', 'required': False }, 'experience': { 'required': True, 'dependencies': { 'profession': ['developer', 'tester'] } } } v1 = Validator(schema1) print('\n Testing dependencies: ', v1.validate({'profession': 'tester'})) print(v1.errors)
return False v = Validator() v.schema = { "name": { "type": "string", "minlength": 1, "maxlength": 256, "regex": "[A-Za-z0-9 ]+", "required": True }, "type": { "type": "string", "minlength": 1, "maxlength": 256, "regex": "[A-Za-z0-9 ]+", "required": True }, "length": { "type": "integer", "min": 1, "max": 10000, "required": True } } patchv = Validator() patchv.schema = { "name": {
#!/usr/bin/python from cerberus import Validator v = Validator() v.schema = {'words': {'type': ['string', 'list']}} if v.validate({'words': 'falcon'}): print('valid data') else: print('invalid data') if v.validate({'words': ['falcon', 'sky', 'cloud']}): print('valid data') else: print('invalid data')
#!/usr/bin/env python3 from cerberus import Validator import yaml import json #v.schema = {'cities': {'type': 'list', 'schema': {'type': 'string'}}} print() with open('schema.json', 'r') as file: schema = file.read() v = Validator() v.schema = json.loads(schema) print(" *DEBUG* Schema: " + schema) with open('cities.yaml') as f: data = yaml.load(f, Loader=yaml.FullLoader) print(" *DEBUG* Yaml: " + data['cities'][0]) if v.validate({'cities': data['cities']}): print('valid data') else: print('invalid data') print(v.errors) print()
"type": "string", }, "stateflower": { "type": "string" }, }, } v = Validator() v.require_all = True v.schema = { "states": { "type": "dict", "schema": { "utah": state_schema, "texas": state_schema, "illinois": state_schema, "missouri": state_schema, }, }, } with open("cities.yaml") as f: data = yaml.load(f, Loader=yaml.FullLoader) # print(data) if v.validate(data): print("valid data") else: print("invalid data")
from cerberus import Validator schema = {"numbers": {"type": "integer"}} v = Validator(schema) data = {"numbers": 5} if v.validate(data): print("Data is valid") else: print("Data is invalid") v.schema = {"name": {"required": True, "type": "string"}, "age": {"type": "integer"}} if v.validate({"age": 34}): print("valid data") else: print("invalid data") print(v.errors) v.schema = { "name": {"type": "string", "minlength": 5}, "age": {"type": "integer", "min": 18, "max": 65}, } if v.validate({"name": "VJ", "age": 16}): print("Data is valid") else: print("Data is invalid") print(v.errors)