def validate(data, schema=None): if schema is None: schema = generate() Validator.check_schema(schema) validator = Validator(schema) errors = list(validator.iter_errors(data)) if not errors: counter = Counter([p['name'] for p in data.get('policies')]) dupes = [] for k, v in counter.items(): if v > 1: dupes.append(k) if dupes: return [ValueError( "Only one policy with a given name allowed, duplicates: %s" % ( ", ".join(dupes)))] return [] try: resp = specific_error(errors[0]) name = isinstance(errors[0].instance, dict) and errors[0].instance.get('name', 'unknown') or 'unknown' return [resp, name] except Exception: logging.exception( "specific_error failed, traceback, followed by fallback") return filter(None, [ errors[0], best_match(validator.iter_errors(data)), ])
def load_validator(name): """ Load the JSON Schema Draft 4 validator with the given name from the local schema directory. """ with open(os.path.join(SCHEMA_PATH, name)) as fh: schema = json.load(fh) Draft4Validator.check_schema(schema) return Draft4Validator(schema, format_checker=checker)
def validate(self): self.log.info("Checking schemas for validity") for application in self.applications.values(): self.log.info("+ " + application.slug) for collection in application.collections: self.log.info('--- ' + collection.slug) Draft4Validator.check_schema(collection.schema)
def __init__(self, schema=DEFAULT_LTM_SCHEMA): """Choose schema and initialize extended Draft4Validator. Raises: F5CcclSchemaError: Failed to read or validate the CCCL API schema file. """ try: self.schema = read_yaml_or_json(schema) except json.JSONDecodeError as error: LOGGER.error("%s", error) raise cccl_exc.F5CcclSchemaError( 'CCCL API schema could not be decoded.') except IOError as error: LOGGER.error("%s", error) raise cccl_exc.F5CcclSchemaError( 'CCCL API schema could not be read.') try: Draft4Validator.check_schema(self.schema) self.validate_properties = Draft4Validator.VALIDATORS["properties"] validator_with_defaults = validators.extend( Draft4Validator, {"properties": self.__set_defaults}) self.validator = validator_with_defaults(self.schema) except jsonschema.SchemaError as error: LOGGER.error("%s", error) raise cccl_exc.F5CcclSchemaError("Invalid API schema")
def validate(data, schema=None): if schema is None: schema = generate() Validator.check_schema(schema) validator = Validator(schema) errors = list(validator.iter_errors(data)) if not errors: return check_unique(data) or [] try: resp = policy_error_scope(specific_error(errors[0]), data) name = isinstance( errors[0].instance, dict) and errors[0].instance.get( 'name', 'unknown') or 'unknown' return [resp, name] except Exception: logging.exception( "specific_error failed, traceback, followed by fallback") return list(filter(None, [ errors[0], best_match(validator.iter_errors(data)), ]))
def test_select_all(self, testapi): """ Select all link relations and check them for valid JSON schema. """ for rel_id in testapi.get(url_for('v1.LinkRelationsView:index')).json.keys(): resp = testapi.get(url_for('v1.LinkRelationsView:get',id=rel_id)) Draft4Validator.check_schema(resp.json).should.be(None)
def serialize(self): """Serialize the schema to a pure Python data structure. After serializing the schema once, it's not possible to mutate self._schema any more, since these changes would not be reflected in the serialized output. """ # Order keys before serializing. # This is to get a stable sort order when dumping schemas, and a # convenient API at the same time (no need to pass in OrderedDicts # all the time). This keeps calling code more readable. self._schema = order_dict(self._schema, SCHEMA_KEYS_ORDER) if 'properties' in self._schema: for prop_name, prop_def in self._schema['properties'].items(): self._schema['properties'][prop_name] = order_dict( prop_def, PROPERTY_KEYS_ORDER) schema = deepcopy(self._schema) Draft4Validator.check_schema(schema) # Prevent access to self._schema after serialization in order to avoid # gotchas where mutations to self._schema don't take effect any more del self._schema return schema
def __init__(self, json_data, strict=False, live_schema=None): self.live_schema = live_schema if not hasattr(json_data, '__getitem__'): raise TypeError('json_data must be a dict.') if (not self.schema) and (live_schema is None): raise NotImplementedError('schema not implemented!') if live_schema is not None: if not self.schema: self.schema = live_schema else: self.schema['properties'].update(live_schema['properties']) if "required" in self.schema and "required" in live_schema: self.schema['required'] = list( set(self.schema['required']) | set(live_schema["required"]) ) Draft4Validator.check_schema(self.schema) self.data = {} if not strict: self._filter_data(json_data, self.schema['properties'], self.data) else: self.data = json_data self.validator = Draft4Validator(self.schema) self.errors = None
def _validate(self): # Draft4Validator accepts empty JSON, but we don't want to accept it. if not self.json: raise ValueError('Schema is invalid.') try: Draft4Validator.check_schema(self.json) except (SchemaError, ValidationError): raise ValueError('Schema is invalid.')
def test_schemas_are_valid(): root_dir = os.path.join( 'inspirehep', 'modules', 'records', 'jsonschemas', 'records') for schemas_dir, _, schemas in os.walk(root_dir): schemas_path = os.path.sep.join(schemas_dir.split(os.path.sep)[1:]) for schema in schemas: schema_path = os.path.join(schemas_path, schema) Draft4Validator.check_schema(fetch_schema(schema_path))
def validate(schema_filename, data): with open(schema_filename) as f: schema = json.load(f) # cteme JSON Schema primo ze souboru Validator.check_schema(schema) # zkontroluje schema nebo vyhodi vyjimku base_uri = 'file://' + os.path.dirname(schema_filename) + '/' resolver = RefResolver(base_uri, schema) validator = Validator(schema, resolver=resolver) return validator.iter_errors(data) # vraci chyby jednu po druhe
def json_schema_validator(value): """ raises ValidationError if value is not a valid json schema """ try: Draft4Validator.check_schema(value) except SchemaError as e: raise ValidationError(_('Schema is invalid: %(msg)s'), params={"msg": str(e.message)})
def test_schema_validity(): for name in ("schema_base.json", "schema_data.json", "schema_node.json", "schema_prov_exe.json", "schema_workflow.json"): with open(os.path.join(sch_pth, name), 'r') as f: schema = json.load(f) Draft4Validator.check_schema(schema)
def test(): """Tests all included schemata against the Draft4Validator""" from jsonschema import Draft4Validator for schemaname, schemadata in schemastore.items(): hfoslog("[SCHEMATA] Validating schema ", schemaname) Draft4Validator.check_schema(schemadata['schema']) if 'uuid' not in schemadata['schema']: hfoslog("[SCHEMATA] Schema without uuid encountered: ", schemaname, lvl=debug)
def schema(self, schema): """sets the stream's schema. An empty schema is "{}". The schemas allow you to set a specific data type. Both python dicts and strings are accepted.""" if isinstance(schema, basestring): strschema = schema schema = json.loads(schema) else: strschema = json.dumps(schema) Draft4Validator.check_schema(schema) self.set({"schema": strschema})
def test_schema_handler_with_default_uri_normalization(self): response = self.fetch('/person/Gender/_schema') self.assertEqual(response.code, 200) schema = json.loads(response.body) self.assertEqual(schema['id'], u'http://semantica.globo.com/person/Gender') self.assertEqual(schema['$schema'], 'http://json-schema.org/draft-04/schema#') try: Draft4Validator.check_schema(schema) except SchemaError as ex: self.fail("Json-schema for class {0} is not valid. Failed for {1:s}".format('person:Gender', ex))
def load_schema(schema): """Validates the given schema and returns an associated schema validator object that can check other objects' conformance to the schema. :param schema: The JSON schema object. :returns: The object loaded in from the JSON schema file. """ Draft4Validator.check_schema(schema) return Draft4Validator(schema, format_checker=FormatChecker())
def validateSchemasInFolder(folder): path = os.path.abspath(folder) files = [ f for f in listdir(path) if isfile(join(path,f)) ] for schemaFile in files: if (schemaFile.endswith('.json')): print("Validating schema ", schemaFile, "...") schema = json.load(open(join(path,schemaFile))) Draft4Validator.check_schema(schema) print("done.")
def test_valid_schema(self): schema_path = "../schema/phenopacket-schema.json" schema_fh = open(os.path.join(os.path.dirname(__file__), schema_path), "r") schema = json.load(schema_fh) schema_fh.close() ## call validator Draft4Validator.check_schema(schema)
def test_1_generate_json_schema(self): meta_json = SQLJSON() print() f = open(os.path.join(Test_Resource_Dir, "../../../schema/", "sql.json"), "w") _schema = meta_json.generate_schema() json.dump(obj=_schema, fp=f, sort_keys=True, indent=4) f.close() Draft4Validator.check_schema(_schema)
def __parse_schema(self): if '$schema' not in self.schema or self.schema['$schema'].find('draft-03') == -1: Draft4Validator.check_schema(self.schema) else: raise ValueError("Draft-03 schema is not supported currently.") self.object_defines['root'] = self.schema if 'id' in self.schema: self.base_uri = self.schema['id'] self.__parse_object('root',self.schema)
def create(self, schema="{}", **kwargs): """Creates a stream given an optional JSON schema encoded as a python dict. You can also add other properties of the stream, such as the icon, datatype or description. Create accepts both a string schema and a dict-encoded schema.""" if isinstance(schema, basestring): strschema = schema schema = json.loads(schema) else: strschema = json.dumps(schema) Draft4Validator.check_schema(schema) kwargs["schema"] = strschema self.metadata = self.db.create(self.path, kwargs).json()
def derive_invocation_schema(manifest): """ Creates an invocation schema from a gear manifest. This can be used to validate the files and configuration offered to run a gear. """ validate_manifest(manifest) result = { 'title': 'Invocation manifest for ' + manifest['label'], '$schema': 'http://json-schema.org/draft-04/schema#', 'type': 'object', 'properties': { 'config': { 'type': 'object', 'properties': {}, 'required': [] }, 'inputs': { 'type': 'object', 'properties': {}, 'required': [] } }, 'required': [ 'config', 'inputs' ] } # Copy over constraints from manifest for kind in ['config', 'inputs']: for key in manifest[kind]: # Copy constraints, removing 'base' keyword which is not a constraint val = copy.deepcopy(manifest[kind][key]) val.pop('base', None) # The config map holds scalars, while the inputs map holds objects. if kind == 'config': result['properties'][kind]['properties'][key] = val else: result['properties'][kind]['properties'][key] = {} result['properties'][kind]['properties'][key]['properties'] = val result['properties'][kind]['properties'][key]['type'] = 'object' # Require the key be preset. result['properties'][kind]['required'].append(key) # After handling each key, remove required array if none are present. # Required by jsonschema (minItems 1). if len(result['properties'][kind]['required']) == 0: result['properties'][kind].pop('required', None) # Important: check our work - the result must be a valid schema. Draft4Validator.check_schema(result) return result
def generate_models(self, files): loader = JmgLoader() for fname in (f for fileGlob in files for f in glob.glob(fileGlob)): if self.root_name: scope = [self.root_name] else: base_name = os.path.basename(fname) base_uri = os.path.splitext(base_name)[0] base_uri = base_uri.replace('.schema', '') scope = [base_uri] with open(fname) as jsonFile: print("%s" % fname) # root_schema = json.load(jsonFile) # base_uri = 'file://' + os.path.split(os.path.realpath(f))[0] base_uri = 'file://' + os.path.realpath(fname) root_schema = jsonref.load(jsonFile, base_uri=base_uri, jsonschema=False, # resolve references relative to local tree, not against "id" URIs loader=loader, object_pairs_hook=collections.OrderedDict ) # import json # print(json.dumps(root_schema, indent=4, separators=(',', ': '))) if self.validate: # TODO: Add exception handling try: Draft4Validator.check_schema(root_schema) except SchemaError as e: print(e) sys.exit(-1) assert isinstance(root_schema, dict) if JsonSchema2Model.SCHEMA_URI not in root_schema: root_schema[JsonSchema2Model.SCHEMA_URI] = fname self.create_model(root_schema, scope) self.render_models() self.copy_static_files() if self.include_dependencies: self.copy_dependencies()
def validate_schema(path, schemaFile): try: logger.info("Validating schema %s", schemaFile) schema_file = open(join(path, schemaFile)) schema = json.load(schema_file) try: Draft4Validator.check_schema(schema) return True except Exception as e: logger.error(e) return False logger.info("done.") finally: schema_file.close()
def _jsonschema_errors(self): from django.conf import settings errors = [] schemas = settings.SIMPLE_JSONSCHEMA for url, schema in schemas.items(): try: Draft4Validator.check_schema(schema) except SchemaError as e: errors.append({ 'url': url, 'error': e, 'schema': json.dumps(schema, indent=4, sort_keys=True) }) return errors
def setUp(self): self.test_request = { "elements": "K-Ag", "classes": "iodide", "props": "heat capacity", "lattices": "cubic" } network = httplib2.Http() response, content = network.request('http://developer.mpds.io/mpds.schema.json') assert response.status == 200 self.schema = json.loads(content) Draft4Validator.check_schema(self.schema)
def isolate_file_invocation(invocation, input_name): """ Given an invocation schema, isolate just a specific file. Useful to validate a single input. """ inv = copy.deepcopy(invocation) fis = inv['properties']['inputs']['properties'][input_name] fis['title'] = 'Input invocation manifest for ' + input_name fis['$schema'] = 'http://json-schema.org/draft-04/schema#' fis['type'] = 'object' # Important: check our work - the result must be a valid schema. Draft4Validator.check_schema(fis) return fis
def validate(self, obj, check_schema=False): '''Validate obj against the schema and check reference constraints. Returns a dictionary where each key is a tuple to the path of the error and each value is a list of errors which occurred at that path. On successful validation, None is returned. ''' # Validate object against schema schema = self.schema add_instance_constraints(schema, obj) if check_schema: Draft4Validator.check_schema(schema) validator = Draft4Validator(schema) try: validator.validate(obj) except exceptions.ValidationError as e: return {tuple(e.path): [e.message]}
def isolate_config_invocation(invocation): """ Given an invocation schema, isolate just the config portion. Useful to validate configuration options separately from files. """ inv = copy.deepcopy(invocation) fis = inv['properties']['config'] fis['title'] = 'Config invocation manifest' fis['$schema'] = 'http://json-schema.org/draft-04/schema#' fis['type'] = 'object' # Important: check our work - the result must be a valid schema. Draft4Validator.check_schema(fis) return fis
def iter_errors(self, doc): # Note whenever gdcdictionary use a newer version of jsonschema # we need to update the Validator validator = Draft4Validator(self.schemas.schema[doc['type']]) return validator.iter_errors(doc)
def validate(j, s): errors = sorted(Draft4Validator(s).iter_errors(j), key=lambda e: e.path) return [x.message for x in errors]
def test_valid_bods(valid_bods, bods_schema): '''Test if fixture is valid BODS JSON.''' v = Draft4Validator(bods_schema) for error in sorted(v.iter_errors(valid_bods), key=str): print(error.message) assert v.is_valid(valid_bods)
def parse(filename): try: schema = json.loads(open("pl.schema").read()) schema = Draft4Validator(schema, format_checker=FormatChecker()) except ValueError as e: post_error("pl.schema - " + str(e)) return try: pl = json.loads(open(filename).read()) except ValueError as e: post_error(filename + " - " + str(e)) return for error in schema.iter_errors(pl): post_error(error.message) foldernames = [] displaynames = [] repositories = [] if os.path.exists("./" + bitness_from_input): shutil.rmtree("./" + bitness_from_input, True) os.mkdir("./" + bitness_from_input) for plugin in pl["npp-plugins"]: print(plugin["display-name"]) try: response = requests.get(plugin["repository"]) except requests.exceptions.RequestException as e: post_error(str(e)) continue if response.status_code != 200: post_error( f'{plugin["display-name"]}: failed to download plugin. Returned code {response.status_code}' ) continue # Hash it and make sure its what is expected hash = sha256(response.content).hexdigest() if plugin["id"].lower() != hash.lower(): post_error( f'{plugin["display-name"]}: Invalid hash. Got {hash.lower()} but expected {plugin["id"]}' ) continue # Make sure its a valid zip file try: zip = zipfile.ZipFile(io.BytesIO(response.content)) except zipfile.BadZipFile as e: post_error(f'{plugin["display-name"]}: Invalid zip file') continue # The expected DLL name dll_name = f'{plugin["folder-name"]}.dll'.lower() # Notepad++ is not case sensitive, but extracting files from the zip is, # so find the exactfile name to use for file in zip.namelist(): if dll_name == file.lower(): dll_name = file break else: post_error( f'{plugin["display-name"]}: Zip file does not contain {plugin["folder-name"]}.dll' ) continue with zip.open(dll_name) as dll_file, open( "./" + bitness_from_input + "/" + dll_name, 'wb') as f: f.write(dll_file.read()) version = plugin["version"] # Fill in any of the missing numbers as zeros version = version + (3 - version.count('.')) * ".0" try: dll_version = get_version_number("./" + bitness_from_input + "/" + dll_name) except win32api.error: post_error( f'{plugin["display-name"]}: Does not contain any version information' ) continue if dll_version != version: post_error( f'{plugin["display-name"]}: Unexpected DLL version. DLL is {dll_version} but expected {version}' ) continue #check uniqueness of json folder-name, display-name and repository found = False for name in displaynames: if plugin["display-name"] == name: post_error( f'{plugin["display-name"]}: non unique display-name entry') found = True if found == False: displaynames.append(plugin["display-name"]) found = False for folder in foldernames: if plugin["folder-name"] == folder: post_error( f'{plugin["folder-name"]}: non unique folder-name entry') found = True if found == False: foldernames.append(plugin["folder-name"]) found = False for repo in repositories: if plugin["repository"] == repo: post_error( f'{plugin["repository"]}: non unique repository entry') found = True if found == False: repositories.append(plugin["repository"])
def test_no_errors(self): validator = Draft4Validator({}) self.assertIsNone(exceptions.best_match(validator.iter_errors({})))
def validate(data): _, schema = resolver.resolve('entity.json') validator = Draft4Validator(schema, resolver=resolver, format_checker=format_checker) return validator.validate(data, schema)
def _validate_user_remove_schema(self, values): schema = self.schema['components']['schemas']['project-user-remove'] try: Draft4Validator(schema=schema).validate(values) except ValidationError as e: raise FacadeInvalidSchema(e.message)
def __init__(self, *args, **kwargs): self.schema = kwargs.pop('schema') Draft4Validator.check_schema(self.schema) super(JSONSchemaForm, self).__init__(*args, **kwargs)
from newslynx.models import SousChef from newslynx.lib.serialize import yaml_to_obj from newslynx.lib.serialize import obj_to_json, json_to_obj from newslynx.constants import SOUS_CHEF_RESERVED_FIELDS from newslynx.util import here, update_nested_dict # load souschef schema + validator. SOUS_CHEF_JSON_SCHEMA = yaml_to_obj( open(here(__file__, 'sous_chef.yaml')).read()) # these are default options that all sous chefs have. SOUS_CHEF_DEFAULT_OPTIONS = yaml_to_obj( open(here(__file__, 'sous_chef_default_options.yaml')).read()) # a json-schema validator for a sous chef. SOUS_CHEF_VALIDATOR = Draft4Validator(SOUS_CHEF_JSON_SCHEMA) # a regex for validation option + metric names re_opt_name = re.compile(r'^[a-z][a-z_]+[a-z]$') def validate(sc): """ Validate a sous chef schema: First chef against the canoncial json schema. Then check if the `runs` field is a valid python module or an executable script that exists where it has been declared. Then check special metrics options and merge the default sous-chef options with the provied ones. """
def parse_params(form, method='GET', data_format='FORM', error_handler=response_http_404, parse_ua=False): if isinstance(form, dict): if data_format == 'FORM': form = FormValidator(form) else: form = Draft4Validator(form) def _parse_params(func): @wraps(func) def _func(*args, **kwargs): log.warning('HERE MNA --- MM1') if request.method != method: log.warning('view_method_error|url=%s,method=%s', get_request_url().encode('utf-8'), request.method) return error_handler() if isinstance(form, FormValidator): if method == 'GET': formdata = request.args else: formdata = request.form try: data = form.normalize(formdata) except Exception as ex: log.warning( 'view_params_error|format=form,url=%s,error=%s,body=%s', get_request_url().encode('utf-8'), ex, request.get_data()) return error_handler() else: if data_format == 'JSON': log.warning('HERE MNA --- MM') request_body = request.get_data() try: data = jsonutils.from_json(request_body) except Exception as ex: log.warning( 'view_params_error|format=json,url=%s,error=%s,body=%s', get_request_url().encode('utf-8'), ex, request_body) #return error_handler() data = {} if form is not None: params_errors = [ e.message for e in form.iter_errors(data) ] if params_errors: log.warning( 'view_params_error|format=json,url=%s,error=json_validotor:%s,body=%s', get_request_url().encode('utf-8'), ';'.join(params_errors), request_body) return error_handler() else: data = request.values data['request_ip'] = get_request_ip(request) if parse_ua: data['request_ua'] = UserAgent( (request.headers.get('User-Agent', ''))) return func(data, *args, **kwargs) return _func return _parse_params
def validate_global_template(self, data, **kwargs): try: Draft4Validator.check_schema(data["schema"]) except (SchemaError, JSONSchemaValidationError) as e: raise MarshmallowValidationError(str(e))
def validate_configuration(configuration): """ Validate a provided configuration. :param dict configuration: A desired configuration. :raises: jsonschema.ValidationError if the configuration is invalid. """ schema = { "$schema": "http://json-schema.org/draft-04/schema#", "type": "object", "required": ["scenarios", "operations", "metrics"], "properties": { "scenarios": { "type": "array", "minItems": 1, "items": { "type": "object", "required": ["name", "type"], "properties": { "name": { "type": "string" }, "type": { "type": "string" }, }, "additionalProperties": "true", }, }, "operations": { "type": "array", "minItems": 1, "items": { "type": "object", "required": ["name", "type"], "properties": { "name": { "type": "string" }, "type": { "type": "string" }, }, "additionalProperties": "true", }, }, "metrics": { "type": "array", "minItems": 1, "items": { "type": "object", "required": ["name", "type"], "properties": { "name": { "type": "string" }, "type": { "type": "string" }, }, "additionalProperties": "true", }, } } } v = Draft4Validator(schema, format_checker=FormatChecker()) v.validate(configuration)
def validate(data, schema): resolver = get_loom_config().resolver _, schema = resolver.resolve(schema) validator = Draft4Validator(schema, resolver=resolver, format_checker=format_checker) return validator.validate(data, schema)
def __init__(self, schema): self.validator = Draft4Validator(schema)
return errorsMap @staticmethod def getSchema(inputtype): errorLog = list() validSchema = None if inputtype == 'NAMED_ENTITY': try: request_schema = requests.get( 'http://europepmc.org/docs/ne_annotation_schema.json') ne_schemaObj = request_schema.json() validSchema = Draft4Validator(ne_schemaObj, format_checker=FormatChecker()) except Exception, nescherr: errorLog.append(repr(nescherr)) elif inputtype == 'SENTENCE': try: request_schema = requests.get( 'http://europepmc.org/docs/sentence_annotation_schema.json' ) sent_schemaObj = request_schema.json() validSchema = Draft4Validator(sent_schemaObj, format_checker=FormatChecker()) except Exception, sentscherr: errorLog.append(repr(sentscherr)) return (validSchema, errorLog)
def __testValidateOpts(self, databaseNameD, inputPathList=None, schemaLevel="full", mergeContentTypeD=None): # eCount = 0 for databaseName in databaseNameD: mergeContentTypes = mergeContentTypeD[ databaseName] if databaseName in mergeContentTypeD else None _ = self.__schP.makeSchemaDef(databaseName, dataTyping="ANY", saveSchema=True) pthList = inputPathList if inputPathList else self.__rpP.getLocatorObjList( databaseName, mergeContentTypes=mergeContentTypes) for collectionName in databaseNameD[databaseName]: cD = self.__schP.makeSchema(databaseName, collectionName, encodingType="JSON", level=schemaLevel, saveSchema=True, extraOpts=None) # dL, cnL = self.__testPrepDocumentsFromContainers( pthList, databaseName, collectionName, styleType="rowwise_by_name_with_cardinality", mergeContentTypes=mergeContentTypes) # Raises exceptions for schema compliance. try: Draft4Validator.check_schema(cD) except Exception as e: logger.error("%s %s schema validation fails with %s", databaseName, collectionName, str(e)) # valInfo = Draft4Validator(cD, format_checker=FormatChecker()) logger.info("Validating %d documents from %s %s", len(dL), databaseName, collectionName) for ii, dD in enumerate(dL): logger.debug("Schema %s collection %s document %d", databaseName, collectionName, ii) try: cCount = 0 for error in sorted(valInfo.iter_errors(dD), key=str): logger.info( "schema %s collection %s (%s) path %s error: %s", databaseName, collectionName, cnL[ii], error.path, error.message) logger.debug("Failing document %d : %r", ii, list(dD.items())) eCount += 1 cCount += 1 if cCount > 0: logger.info( "schema %s collection %s container %s error count %d", databaseName, collectionName, cnL[ii], cCount) except Exception as e: logger.exception("Validation processing error %s", str(e)) return eCount
def validate(self, value, model_instance): # decrypt secret values so we can validate their contents (i.e., # ssh_key_data format) if not isinstance(value, dict): return super(CredentialInputField, self).validate(value, model_instance) # Backwards compatability: in prior versions, if you submit `null` for # a credential field value, it just considers the value an empty string for unset in [ key for key, v in model_instance.inputs.items() if not v ]: default_value = model_instance.credential_type.default_for_field( unset) if default_value is not None: model_instance.inputs[unset] = default_value decrypted_values = {} for k, v in value.items(): if all([ k in model_instance.credential_type.secret_fields, v != '$encrypted$', model_instance.pk ]): if not isinstance(getattr(model_instance, k), six.string_types): raise django_exceptions.ValidationError( _('secret values must be of type string, not {}'). format(type(v).__name__), code='invalid', params={'value': v}, ) decrypted_values[k] = utils.decrypt_field(model_instance, k) else: decrypted_values[k] = v super(JSONSchemaField, self).validate(decrypted_values, model_instance) errors = {} for error in Draft4Validator( self.schema(model_instance), format_checker=self.format_checker).iter_errors( decrypted_values): if error.validator == 'pattern' and 'error' in error.schema: error.message = six.text_type( error.schema['error']).format(instance=error.instance) if error.validator == 'dependencies': # replace the default error messaging w/ a better i18n string # I wish there was a better way to determine the parameters of # this validation failure, but the exception jsonschema raises # doesn't include them as attributes (just a hard-coded error # string) match = re.search( # 'foo' is a dependency of 'bar' "'" # apostrophe "([^']+)" # one or more non-apostrophes (first group) "'[\w ]+'" # one or more words/spaces "([^']+)", # second group error.message, ) if match: label, extraneous = match.groups() if error.schema['properties'].get(label): label = error.schema['properties'][label]['label'] errors[extraneous] = [ _('cannot be set unless "%s" is set') % label ] continue if 'id' not in error.schema: # If the error is not for a specific field, it's specific to # `inputs` in general raise django_exceptions.ValidationError( error.message, code='invalid', params={'value': value}, ) errors[error.schema['id']] = [error.message] inputs = model_instance.credential_type.inputs for field in inputs.get('required', []): if not value.get(field, None): errors[field] = [ _('required for %s') % (model_instance.credential_type.name) ] # `ssh_key_unlock` requirements are very specific and can't be # represented without complicated JSON schema if (model_instance.credential_type.managed_by_tower is True and 'ssh_key_unlock' in model_instance.credential_type.defined_fields): # in order to properly test the necessity of `ssh_key_unlock`, we # need to know the real value of `ssh_key_data`; for a payload like: # { # 'ssh_key_data': '$encrypted$', # 'ssh_key_unlock': 'do-you-need-me?', # } # ...we have to fetch the actual key value from the database if model_instance.pk and model_instance.ssh_key_data == '$encrypted$': model_instance.ssh_key_data = model_instance.__class__.objects.get( pk=model_instance.pk).ssh_key_data if model_instance.has_encrypted_ssh_key_data and not value.get( 'ssh_key_unlock'): errors['ssh_key_unlock'] = [ _('must be set when SSH key is encrypted.') ] if all([ model_instance.ssh_key_data, value.get('ssh_key_unlock'), not model_instance.has_encrypted_ssh_key_data ]): errors['ssh_key_unlock'] = [ _('should not be set when SSH key is not encrypted.') ] if errors: raise serializers.ValidationError({'inputs': errors})
def create_build(self, yml: Dict[Any, Any], build: Build): from piper_core.model.stages.stage import Stage from piper_core.model.jobs.job import Job from piper_core.model.jobs.environment import Environment from piper_core.model.jobs.command import Command, CommandType hook_schema = self.schema['components']['schemas']['piper-yml'] try: Draft4Validator(schema=hook_schema).validate(yml) except ValidationError as e: raise Exception(e.message) stages = dict() for idx, s in enumerate(yml['stages']): stage = Stage() stage.name = s stage.order = idx stage.build = build stages[s] = stage jobs = list() environments = list() commands = list() for job_name, job_def in yml['jobs'].items(): if job_def['stage'] not in stages: raise Exception job = Job() job.stage = stages[job_def['stage']] jobs.append(job) if 'when' in job_def: job.only = job_def['when'] if 'runner' in job_def: # FIXME check if group exists job.group = job_def['runner'] job.image = job_def['image'] if 'env' in job_def: for env_name, env_value in job_def['env'].items(): env = Environment() env.name = env_name env.value = env_value env.job = job environments.append(env) for idx, command_cmd in enumerate(job_def['commands']): command = Command() command.order = idx command.cmd = command_cmd command.job = job command.type = CommandType.NORMAL commands.append(command) if 'after_failure' in job_def: for idx, command_cmd in enumerate(job_def['after_failure']): command = Command() command.order = idx command.cmd = command_cmd command.job = job command.type = CommandType.AFTER_FAILURE commands.append(command) with database_proxy.atomic(): build.save() for _, stage in stages.items(): stage.save() for job in jobs: job.save() for env in environments: env.save() for command in commands: command.save()
parser.add_argument("-v", "--verbose", dest="verbose_count", action="count", default=0, help="increases log verbosity for each occurence.") arguments = parser.parse_args() logger = colorlog.getLogger() # Start off at Error, reduce by one level for each -v argument logger.setLevel(max(4 - arguments.verbose_count, 0) * 10) handler = colorlog.StreamHandler() handler.setFormatter(colorlog.ColoredFormatter()) logger.addHandler(handler) schema = json.load(io.open('schema.json', encoding='utf-8')) seen_ids = set() resolver = RefResolver('', None) validator = Draft4Validator(schema, resolver=resolver) borkenbuild = False spacesave = 0 for filename in arguments.path: try: ## dict_raise_on_duplicates raises error on duplicate keys in geojson source = json.load(io.open(filename, encoding='utf-8'), object_pairs_hook=dict_raise_on_duplicates) ## jsonschema validate validator.validate(source, schema) sourceid = source['properties']['id'] if sourceid in seen_ids: raise ValidationError('Id %s used multiple times' % sourceid)
class InstructorView(View): schema = Draft4Validator({ '$schema': 'http://json-schema.org/draft-04/schema#', 'type': 'object', 'properties': { 'name': { 'oneOf': [{ 'type': 'boolean', 'enum': [False] }, { 'type': 'string', 'minLength': 1 }] }, 'email-address': { 'oneOf': [{ 'type': 'boolean', 'enum': [False] }, { 'type': 'string', 'format': 'email', 'minLength': 1 }] }, 'picture': { 'oneOf': [{ 'type': 'boolean', 'enum': [False] }, { 'type': 'string', 'minLength': 1 }] }, 'subjects': { 'type': 'array', 'uniqueItems': True, 'additionalItems': True, 'items': { 'type': 'object', 'properties': { 'code': { 'type': 'string' }, 'action': { 'type': 'string', 'enum': ['add', 'rem', 'chg'] }, 'confidence': { 'type': 'number', 'minimum': 0.0, 'maximum': 1.0, 'exclusiveMinimum': False, 'exclusiveMaximum': False } }, 'required': ['code', 'action'] } } }, 'required': ['name', 'email-address', 'picture', 'subjects'] }) @method_decorator(csrf_protect) @method_decorator(login_required) # @method_decorator(role_required('administrator')) def get(self, request, id=''): try: instructor = Instructor.objects.select_subclasses().get_active( user_id=id) except Instructor.DoesNotExist: return JsonResponse({ 'version': '0.1.0', 'status': 404 }, status=404) else: # Serialize the instructor's full data and send the response return JsonResponse({ 'version': '0.1.0', 'status': 200, 'instructor': { 'id': instructor.user_id, 'name': instructor.full_name, 'email': instructor.email_address, 'picture': instructor.picture.url, 'created': mktime(instructor.date_registered.utctimetuple()), 'last-login': mktime(instructor.last_login.utctimetuple()) if instructor.last_login else False, 'specialties': [{ 'id': s.subject.id, 'code': s.subject.code, 'name': s.subject.name, 'confidence': s.confidence } for s in instructor.specialties.all().filter(active=True)] } }) @method_decorator(csrf_protect) @method_decorator(login_required) # @method_decorator(role_required('administrator')) def post(self, request, id=''): data = loads(request.body) try: InstructorView.schema.validate(data) instructor = Instructor.objects.select_subclasses().get_active( user_id=id) # Apply the changes in a transaction since we may need to perform several updates at once with atomic(): # Apply each update available if data['name'] is not False: instructor.full_name = data['name'] if data['email-address'] is not False: instructor.email_address = data['email-address'] if data['picture'] is not False: # In this case, "picture" points to the name of the form element which held the file (for flexibility at frontend) if data['picture'] not in request.FILES: return JsonResponse({ 'version': '0.1.0', 'status': 409 }, status=409) instructor.picture = request.FILES[data['picture']] try: for entry in data['subjects']: # Specialties are not as straightforward, so we update manually - these can be edited as well subject, action = Subject.objects.get_active( code__iexact=entry['code']).id, entry['action'] if action == 'add': Specialty.objects.create( subject_id=subject, instructor_id=instructor.id, confidence=entry.get('confidence', 1.0)) elif action == 'chg': Specialty.objects.active( subject_id=subject, instructor_id=instructor.id).update( confidence=entry.get('confidence', 1.0)) elif action == 'rem': Specialty.objects.get_active( subject_id=subject, instructor_id=instructor.id).delete(soft=True) except Specialty.DoesNotExist, Subject.DoesNotExist: return JsonResponse({ 'version': '0.1.0', 'status': 404 }, status=404) # Commit the changes now, then serialize the updated student instructor.save() return JsonResponse({ 'version': '0.1.0', 'status': 200, 'instructor': { 'id': instructor.user_id, 'name': instructor.full_name, 'email': instructor.email_address, 'picture': instructor.picture.url, 'created': mktime(instructor.date_registered.utctimetuple()), 'last-login': mktime(instructor.last_login.utctimetuple()) if instructor.last_login else False, 'specialties': [{ 'id': s.subject.id, 'code': s.subject.code, 'name': s.subject.name, 'confidence': s.confidence } for s in Specialty.objects.active( instructor_id=instructor.id)] } }) except Instructor.DoesNotExist: return JsonResponse({ 'version': '0.1.0', 'status': 404 }, status=404) except ValidationError: return JsonResponse({ 'version': '0.1.0', 'status': 403 }, status=403) @method_decorator(csrf_protect) @method_decorator(login_required) # @method_decorator(role_required('administrator')) def delete(self, request, id=''): try: instructor = Instructor.objects.select_subclasses().get_active( user_id=id) except Student.DoesNotExist: return JsonResponse({ 'version': '0.1.0', 'status': 404 }, status=404) else: # We use soft-delete here, so no need to worry instructor.delete(soft=True) return JsonResponse({'version': '0.1.0', 'status': 200})
def __init__(self, tgt, top_level_dirs=None, app_config=None): """Construct a mbed configuration Positional arguments: target - the name of the mbed target used for this configuration instance Keyword argumets: top_level_dirs - a list of top level source directories (where mbed_app_config.json could be found) app_config - location of a chosen mbed_app.json file NOTE: Construction of a Config object will look for the application configuration file in top_level_dirs. If found once, it'll parse it. top_level_dirs may be None (in this case, the constructor will not search for a configuration file). """ config_errors = [] self.app_config_location = app_config if self.app_config_location is None and top_level_dirs: self.app_config_location = self.find_app_config(top_level_dirs) try: self.app_config_data = json_file_to_dict(self.app_config_location) \ if self.app_config_location else {} except ValueError as exc: self.app_config_data = {} config_errors.append( ConfigException( "Could not parse mbed app configuration from %s" % self.app_config_location)) if self.app_config_location is not None: # Validate the format of the JSON file based on schema_app.json schema_root = os.path.dirname(os.path.abspath(__file__)) schema_path = os.path.join(schema_root, "schema_app.json") schema = json_file_to_dict(schema_path) url = moves.urllib.request.pathname2url(schema_path) uri = moves.urllib_parse.urljoin("file://", url) resolver = RefResolver(uri, schema) validator = Draft4Validator(schema, resolver=resolver) errors = sorted(validator.iter_errors(self.app_config_data)) if errors: raise ConfigException(",".join(x.message for x in errors)) # Update the list of targets with the ones defined in the application # config, if applicable self.lib_config_data = {} # Make sure that each config is processed only once self.processed_configs = {} if isinstance(tgt, Target): self.target = tgt else: if tgt in TARGET_MAP: self.target = TARGET_MAP[tgt] else: self.target = generate_py_target( self.app_config_data.get("custom_targets", {}), tgt) self.target = deepcopy(self.target) self.target_labels = self.target.labels for override in BOOTLOADER_OVERRIDES: _, attr = override.split(".") setattr(self.target, attr, None) self.cumulative_overrides = { key: ConfigCumulativeOverride(key) for key in CUMULATIVE_ATTRIBUTES } self._process_config_and_overrides(self.app_config_data, {}, "app", "application") self.config_errors = config_errors
class InstructorCreateView(View): schema = Draft4Validator({ '$schema': 'http://json-schema.org/draft-04/schema#', 'type': 'object', 'properties': { 'id': { 'type': 'string' }, 'general': { 'type': 'object', 'properties': { 'name': { 'type': 'string' }, 'email-address': { 'type': 'string', 'format': 'email' }, 'title': { 'type': 'string' } }, 'required': ['name', 'email-address'] }, 'subjects': { 'type': 'array', 'uniqueItems': True, 'minItems': 1, 'items': { 'type': 'string' } } }, 'required': ['id', 'general', 'subjects'] }) @method_decorator(csrf_protect) @method_decorator(login_required) # @method_decorator(role_required('administrator')) def put(self, request): data = loads(request.body) try: # First, validate the input data through a JSON schema InstructorCreateView.schema.validate(data) # Check if this user is not a duplicate (enroll ID is ab-so-lute) if Instructor.objects.active(user_id=data['id']).exists(): return JsonResponse({ 'version': '0.1.0', 'status': 409 }, status=409) # We need to process many queries and related stuff, so we wrap everything in a transaction with atomic(): # Create the student object instructor = Instructor( user_id=data['id'], full_name=data['general']['name'], email_address=data['general']['email-address'], availability=Availability.objects.create( expiry=now() + timedelta(days=15)), role=Role.objects.get(codename='instructor')) # Create a randomized password for the instructor for now (we will let them change it later on) password = User.objects.make_random_password( 12, allowed_chars= 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789' ) instructor.set_password(password) instructor.save() # Connect with each specialty subject - assign full confidence as default value for code in data['subjects']: Specialty.objects.create( subject=Subject.objects.get(code__iexact=code), instructor=instructor, confidence=1.0) # Send an email to the user with his/her password, as it was randomly-generated email = TemplateEmailMessage( subject=_('Your password for Project Rhea'), from_email='noreply@project_rhea.com', to=[instructor.email_address], context=RequestContext(request, { 'instructor': instructor, 'password': password })) email.attach_from_template('rhea/email/new-instructor.txt', 'text/plain') email.send() return JsonResponse( { 'version': '0.1.0', 'status': 201, 'email': email.send(fail_silently=True) == 1, 'instructor': { 'id': instructor.user_id, 'name': instructor.full_name, 'email': instructor.email_address, 'created': mktime(instructor.date_registered.utctimetuple()) } }, status=201) except ValidationError: return JsonResponse({ 'version': '0.1.0', 'status': 403 }, status=403)
def persist_lines(config, lines) -> None: state = None flushed_state = None schemas = {} key_properties = {} validators = {} records_to_load = {} csv_files_to_load = {} row_count = {} stream_to_sync = {} total_row_count = {} batch_size_rows = config.get('batch_size_rows', DEFAULT_BATCH_SIZE_ROWS) # Loop over lines from stdin for line in lines: try: o = json.loads(line) except json.decoder.JSONDecodeError: logger.error("Unable to parse:\n{}".format(line)) raise if 'type' not in o: raise Exception( "Line is missing required key 'type': {}".format(line)) t = o['type'] if t == 'RECORD': if 'stream' not in o: raise Exception( "Line is missing required key 'stream': {}".format(line)) if o['stream'] not in schemas: raise Exception( "A record for stream {} was encountered before a corresponding schema" .format(o['stream'])) # Get schema for this record's stream stream = o['stream'] adjust_timestamps_in_record(o['record'], schemas[stream]) # Validate record try: validators[stream].validate(float_to_decimal(o['record'])) except Exception as ex: if type(ex).__name__ == "InvalidOperation": logger.error( "Data validation failed and cannot load to destination. RECORD: {}\n'multipleOf' validations that allows long precisions are not supported (i.e. with 15 digits or more). Try removing 'multipleOf' methods from JSON schema." .format(o['record'])) raise ex primary_key_string = stream_to_sync[ stream].record_primary_key_string(o['record']) if not primary_key_string: primary_key_string = 'RID-{}'.format(total_row_count[stream]) if stream not in records_to_load: records_to_load[stream] = {} # increment row count only when a new PK is encountered in the current batch if primary_key_string not in records_to_load[stream]: row_count[stream] += 1 total_row_count[stream] += 1 # append record if config.get('add_metadata_columns') or config.get('hard_delete'): records_to_load[stream][ primary_key_string] = add_metadata_values_to_record( o, stream_to_sync[stream]) else: records_to_load[stream][primary_key_string] = o['record'] if row_count[stream] >= batch_size_rows: # flush all streams, delete records if needed, reset counts and then emit current state if config.get('flush_all_streams'): filter_streams = None else: filter_streams = [stream] # Flush and return a new state dict with new positions only for the flushed streams flushed_state = flush_streams(records_to_load, row_count, stream_to_sync, config, state, flushed_state, filter_streams=filter_streams) # emit last encountered state emit_state(copy.deepcopy(flushed_state)) elif t == 'SCHEMA': if 'stream' not in o: raise Exception( "Line is missing required key 'stream': {}".format(line)) stream = o['stream'] schemas[stream] = float_to_decimal(o['schema']) validators[stream] = Draft4Validator( schemas[stream], format_checker=FormatChecker()) # flush records from previous stream SCHEMA # if same stream has been encountered again, it means the schema might have been altered # so previous records need to be flushed if row_count.get(stream, 0) > 0: flushed_state = flush_streams(records_to_load, row_count, stream_to_sync, config, state, flushed_state) # emit latest encountered state emit_state(flushed_state) # key_properties key must be available in the SCHEMA message. if 'key_properties' not in o: raise Exception("key_properties field is required") # Log based and Incremental replications on tables with no Primary Key # cause duplicates when merging UPDATE events. # Stop loading data by default if no Primary Key. # # If you want to load tables with no Primary Key: # 1) Set ` 'primary_key_required': false ` in the target-bigquery config.json # or # 2) Use fastsync [postgres-to-bigquery, mysql-to-bigquery, etc.] if config.get('primary_key_required', True) and len( o['key_properties']) == 0: logger.critical( "Primary key is set to mandatory but not defined in the [{}] stream" .format(stream)) raise Exception("key_properties field is required") key_properties[stream] = o['key_properties'] if config.get('add_metadata_columns') or config.get('hard_delete'): stream_to_sync[stream] = DbSync( config, add_metadata_columns_to_schema(o)) else: stream_to_sync[stream] = DbSync(config, o) try: stream_to_sync[stream].create_schema_if_not_exists() stream_to_sync[stream].sync_table() except Exception as e: logger.error(""" Cannot sync table structure in BigQuery schema: {} . """.format(stream_to_sync[stream].schema_name)) raise e row_count[stream] = 0 total_row_count[stream] = 0 csv_files_to_load[stream] = NamedTemporaryFile(mode='w+b') elif t == 'ACTIVATE_VERSION': logger.debug('ACTIVATE_VERSION message') elif t == 'STATE': logger.debug('Setting state to {}'.format(o['value'])) state = o['value'] # Initially set flushed state if not flushed_state: flushed_state = copy.deepcopy(state) else: raise Exception("Unknown message type {} in message {}".format( o['type'], o)) # if some bucket has records that need to be flushed but haven't reached batch size # then flush all buckets. if sum(row_count.values()) > 0: # flush all streams one last time, delete records if needed, reset counts and then emit current state flushed_state = flush_streams(records_to_load, row_count, stream_to_sync, config, state, flushed_state) # emit latest state emit_state(copy.deepcopy(flushed_state))
def _validate_schema(completions): # Ensure completions-1.json file adheres to a JSON schema. validator = Draft4Validator(COMPLETIONS_SCHEMA) errors = list(e.message for e in validator.iter_errors(completions)) if errors: raise AssertionError('\n'.join(errors))
from zerorobot import service_collection as scol from zerorobot import template_collection as tcol from zerorobot import blueprint from zerorobot.service_collection import ServiceConflictError from zerorobot.template.exceptions import BadActionArgumentError from zerorobot.template_collection import TemplateConflictError, TemplateNameError, TemplateNotFoundError from zerorobot.template_uid import TemplateUID from zerorobot.server import auth from .views import task_view, service_view dir_path = os.path.dirname(os.path.realpath(__file__)) Blueprint_schema = JSON.load(open(dir_path + "/schema/Blueprint_schema.json")) Blueprint_schema_resolver = jsonschema.RefResolver( "file://" + dir_path + "/schema/", Blueprint_schema) Blueprint_schema_validator = Draft4Validator( Blueprint_schema, resolver=Blueprint_schema_resolver) @auth.admin_user.login_required def ExecuteBlueprintHandler(): """ Execute a blueprint on the ZeroRobot It is handler for POST /blueprints """ inputs = request.get_json() try: Blueprint_schema_validator.validate(inputs) except jsonschema.ValidationError as err: return jsonify(code=400, message=str(err)), 400 try:
def _validate_command(command_config): options = command_config.keys() assert 'label' in options assert 'schema' in options assert 'callable' in options Draft4Validator(command_config['schema'])
def _validate_schema(self): """Validate via JSON schema.""" try: Draft4Validator(yaml_load(self.SCHEMA_FILE)).validate(self._content) except ValidationError as e: raise TankTestCaseError('Failed to validate testcase {}'.format(self._filename), e)
"parent_header": { "type": "object" }, "metadata": { "type": "object" }, "content": { "type": "object" }, # Checked separately "buffers": { "type": "array" } }, "required": ["header", "parent_header", "metadata", "content"], } msg_structure_validator = Draft4Validator(msg_schema) def get_error_reply_validator(version_minor): return Draft4Validator({ "$schema": "http://json-schema.org/draft-04/schema#", "description": "Jupyter 'error' reply schema", "type": "object", "properties": { "status": { "const": "error" }, "ename": {
def validate_schema(schema): Draft4Validator.check_schema(schema)
def __init__(self, schema): self.validator = Draft4Validator(schema, resolver=resolver)