def __init__(self, site_host=None): """ Arguments: site_host(str): The site host to post the data to, by default http://openconnectome.me. Returns: None """ self.channels = {} self.dataset = [] self.project = [] self.metadata = '' if site_host is not None: self.oo = nd(site_host) else: self.oo = nd() rd = requests.get('{}/dataset_schema.json'.format(SCHEMA_BASE)) if (rd.status_code < 300): self.DATASET_SCHEMA = load(eval(str(rd.text))) else: raise OSError("Dataset schema not available") rc = requests.get('{}/channel_schema.json'.format(SCHEMA_BASE)) if (rc.status_code < 300): self.CHANNEL_SCHEMA = load(eval(str(rc.text))) else: raise OSError("Channel schema not available") rp = requests.get('{}/project_schema.json'.format(SCHEMA_BASE)) if (rp.status_code < 300): self.PROJECT_SCHEMA = load(eval(str(rp.text))) else: raise Value("Project schema not available")
def test_errors_object(document, expected, reason): try: load(schema).validate(document) assert False, 'error expected: {}'.format(reason) except ValidationError as error: f = error.flatten() assert f == expected, (reason, expected, f)
def test_common(schema, description, data, valid, src): try: load(schema, provider=provider, spec='http://json-schema.org/draft-03/schema#').validate(data) if not valid: assert False, description except (ValidationError, CompilationError) as error: if valid: logger.exception(error) assert False, description
def test_common(schema, description, data, valid, src): try: load(schema, provider=provider).validate(data) if not valid: assert False, description except (ValidationError, CompilationError) as error: if valid: logger.exception(error) assert False, description
async def test_devices(): data = await system_grafts.devices_data() assert data.namespace == 'devices' assert load({ 'type': 'object', 'patternProperties': { '(/[^/]+)+$': { 'type': 'object', 'properties': { 'device': { 'type': 'string' }, 'fstype': { 'type': 'string' }, 'mountpoint': { 'type': 'string' }, 'opts': { 'type': 'string' }, 'usage': { 'properties': { 'free': {'type': 'integer'}, 'percent': {'type': 'number'}, 'size': {'type': 'integer'}, 'used': {'type': 'integer'} } } } } } }).validate(data.value)
async def test_memory(): data = await system_grafts.memory_data() assert data.namespace == 'memory' assert load({ 'type': 'object', 'properties': { 'virtual': { 'type': 'object', 'properties': { 'free': {'type': 'integer'}, 'percent': {'type': 'number'}, 'total': {'type': 'integer'} }, 'required': ['free', 'percent', 'total'] }, 'swap': { 'type': 'object', 'properties': { 'free': {'type': 'integer'}, 'percent': {'type': 'number'}, 'total': {'type': 'integer'} }, 'required': ['free', 'percent', 'total'] }, }, 'required': ['virtual', 'swap'] }).validate(data.value)
async def test_os(): data = await system_grafts.os_info() assert data.namespace is None assert load({ 'type': 'object', 'properties': { 'path': { 'type': 'array', 'items': { 'type': 'string', 'format': 'facts:path' } }, 'shell': {'type': 'string'}, 'uname': { 'type': 'object', 'properties': { 'machine': {'type': 'string'}, 'node': {'type': 'string'}, 'processor': {'type': 'string'}, 'release': {'type': 'string'}, 'system': {'type': 'string'}, 'version': {'type': 'string'} }, 'required': ['machine', 'node', 'processor', 'release', 'system', 'version'] } }, 'required': ['path', 'shell', 'uname'] }).validate(data.value)
def test_issue4(): validator = load({ '$schema': 'http://json-schema.org/draft-04/schema#', 'type': 'object', 'properties': { 'props': { 'type': 'array', 'items': { 'oneOf': [{ 'type': 'string' }, { 'type': 'number' }] } } } }) assert {'props': ['hello']} == validator.validate({'props': ['hello']}) assert {'props': [42, 'you']} == validator.validate({'props': [42, 'you']}) with pytest.raises(ValidationError): validator.validate({'props': [None]}) with pytest.raises(ValidationError): validator.validate({'props': None}) with pytest.raises(ValidationError): validator.validate({'props': 'hello'}) with pytest.raises(ValidationError): validator.validate({'props': 42})
def json_validate(json_file): with open(json_file) as data_file: data = json.load(data_file) validator = load(data) return validator
def test_tuple(): doc = { "default": {"ipv4": ("10.140.65.1", "en4")}, "ipv4": [("10.140.65.1", "en4", True), ("10.140.66.1", "en0", False)], } assert load( { "type": "object", "properties": { "default": { "type": "object", "properties": { "ipv4": {"type": "array", "items": [{"type": "string", "format": "ipv4"}, {"type": "string"}]}, "ipv6": {"type": "array", "items": [{"type": "string", "format": "ipv6"}, {"type": "string"}]}, }, }, "ipv4": { "type": "array", "items": { "type": "array", "items": [{"type": "string", "format": "ipv4"}, {"type": "string"}, {"type": "boolean"}], }, }, "ipv6": { "type": "array", "items": { "type": "array", "items": [{"type": "string", "format": "ipv6"}, {"type": "string"}, {"type": "boolean"}], }, }, }, "required": ["default"], } ).validate(doc)
async def test_mac_addr(): data = await system_grafts.mac_addr_info() assert data.namespace is None assert load({ 'type': 'object', 'properties': { 'mac': { 'type': 'string' } }, 'required': ['mac'] }).validate(data.value)
def validateResult(result): # data will validate against this schema with open('metadata_model_no_uri.json') as data_file: schema = json.load(data_file) #loading into jsonspec validator = load(schema) # validate result against schema validator.validate(result)
async def test_cpu(): data = await system_grafts.cpu_info() assert data.namespace == 'cpu' assert load({ 'type': 'object', 'properties': { 'count': {'type': 'integer'}, 'logical': {'type': 'integer'} }, 'required': ['count', 'logical'] }).validate(data.value)
async def test_uptime_data(): data = await system_grafts.uptime_data() assert load({ 'type': 'object', 'properties': { 'uptime': { 'type': 'number' }, 'boottime': {}, }, 'required': ['boottime', 'uptime'] }).validate(data.value)
async def test_network(): data = await system_grafts.network_info() assert data.namespace is None assert load({ 'type': 'object', 'properties': { 'hostname': { 'type': 'string' }, 'ipv4': { 'type': ['string', 'null'] }, 'ipv6': { 'type': ['string', 'null'] }, }, 'required': ['hostname', 'ipv4', 'ipv6'] }).validate(data.value)
def test_check(): validator = load(fixture('five.schema.json')) try: validator.validate({ 'creditcard': { 'provider': 'visa' } }) except ValidationError: pass else: self.fail("shouldn't happen")
def test_check(self): validator = load(fixture('five.schema.json')) try: validator.validate({ 'creditcard': { 'provider': 'visa' } }) except ValidationError: pass else: self.fail("shouldn't happen")
async def test_locale(): data = await system_grafts.locale_info() assert data.namespace == 'locale' assert 'language' in data.value assert 'encoding' in data.value assert load({ 'type': 'object', 'properties': { 'encoding': { 'type': ['string', 'null'] }, 'language': { 'type': ['string', 'null'] } }, 'required': ['encoding', 'language'] }).validate(data.value)
def test_check_2(): validator = load(fixture('five.schema.json')) try: validator.validate({ 'creditcard': { 'provider': 'mastercard', 'securitycode': 123 } }) except ValidationError: pass else: self.fail("shouldn't happen")
def test_check_2(self): validator = load(fixture('five.schema.json')) try: response = validator.validate({ 'creditcard': { 'provider': 'mastercard', 'securitycode': 123 } }) except ValidationError: pass else: self.fail("shouldn't happen")
async def test_gateways(): data = await system_grafts.gateways_info() assert data.namespace == 'gateways' assert load({ 'type': 'object', 'properties': { 'default': { 'type': 'object', 'properties': { 'ipv4': { 'type': 'array', 'items': [ {'type': 'string', 'format': 'ipv4'}, {'type': 'string'}, ] }, 'ipv6': { 'type': 'array', 'items': [ {'type': 'string', 'format': 'ipv6'}, {'type': 'string'}, ] } } }, 'ipv4': { 'type': 'array', 'items': { 'type': 'array', 'items': [ {'type': 'string', 'format': 'ipv4'}, {'type': 'string'}, {'type': 'boolean'}, ] } }, 'ipv6': { 'type': 'array', 'items': { 'type': 'array', 'items': [ {'type': 'string', 'format': 'ipv6'}, {'type': 'string'}, {'type': 'boolean'}, ] } } }, 'required': ['default'] }).validate(data.value)
async def test_facts(): data = await system_grafts.facts_info() assert data.namespace is None assert load({ 'type': 'object', 'properties': { 'facts_version': {'type': 'string'}, 'grafts_dirs': { 'type': 'array', 'items': { 'type': 'string', 'format': 'facts:path' } } }, 'required': ['facts_version', 'grafts_dirs'] }).validate(data.value)
def run(self, args): parse_document(args) parse_schema(args) from jsonspec.validators import load from jsonspec.validators import ValidationError try: validated = load(args.schema).validate(args.document) return driver.dumps(validated, indent=args.indent) except ValidationError as error: msg = 'document does not validate with schema.\n\n' for pointer, reasons in error.flatten().items(): msg += ' {}\n'.format(pointer) for reason in reasons: msg += ' - reason {}\n'.format(reason) msg += '\n' raise Exception(msg)
async def test_python(): data = await system_grafts.python_info() assert data.namespace == 'python' assert load({ 'type': 'object', 'properties': { 'version': {'type': 'string'}, 'executable': {'type': 'string'}, 'path': { 'type': 'array', 'items': { 'type': 'string', 'format': 'facts:path' } } }, 'required': ['version', 'executable', 'path'] }).validate(data.value)
def run(self, args): parse_document(args) parse_schema(args) from jsonspec.validators import load from jsonspec.validators import ValidationError try: validated = load(args.schema).validate(args.document) return driver.dumps(validated, indent=args.indent) except ValidationError as error: msg = 'document does not validate with schema.\n\n' for pointer, reasons in error.flatten.items(): msg += ' {}\n'.format(pointer) for reason in reasons: msg += ' - reason {}\n'.format(reason) msg += '\n' raise Exception(msg)
def test_issue5(): import os try: prev_sep = os.sep os.sep = '\\' validator = load({ '$schema': 'http://json-schema.org/draft-04/schema#', 'type': 'object', 'definitions': { 'test': { 'type': 'object', 'properties': { 'foo': {'type': 'string'} }, 'additionalProperties': False } }, 'properties': { 'bar': { '$ref': '#/definitions/test' } } }) assert {'bar': {'foo': 'test'}} == validator.validate({ 'bar': { 'foo': 'test', } }) with pytest.raises(ValidationError): validator.validate({ 'bar': { 'foo': 'test', 'more': 2 } }) finally: os.sep = prev_sep
def test_issue4(): validator = load({ '$schema': 'http://json-schema.org/draft-04/schema#', 'type': 'object', 'properties': { 'props': { 'type': 'array', 'items': { 'oneOf': [ {'type': 'string'}, {'type': 'number'} ] } } } }) assert {'props': ['hello']} == validator.validate({'props': ['hello']}) assert {'props': [42, 'you']} == validator.validate({'props': [42, 'you']}) with pytest.raises(ValidationError): validator.validate({ 'props': [None] }) with pytest.raises(ValidationError): validator.validate({ 'props': None }) with pytest.raises(ValidationError): validator.validate({ 'props': 'hello' }) with pytest.raises(ValidationError): validator.validate({ 'props': 42 })
async def test_interfaces(): data = await system_grafts.interfaces_info() assert data.namespace == 'interfaces' assert load({ 'type': 'object', 'patternProperties': { '^\w+$': { 'type': 'object', 'properties': { 'ipv6': { 'type': 'array', 'items': { 'type': 'object', 'properties':{ 'addr': {'type': 'string'}, 'netmask': {'type': 'string'}, } } } } } } }).validate(data.value)
CHANNEL_SCHEMA = load({ "$schema": "http://json-schema.org/draft-04/schema#", "title": "Channel", "description": "Schema for Channel JSON object for ingest", "type": "object", "properties": { "channel_name": { "description": "Channel name for the channel", "type": "string" }, "datatype": { "description": "Datatype of the channel", "enum": ["uint8", "uint16", "uint32", "uint64", "float32"] }, "channel_type": { "description": "Type of Scaling - Isotropic(1) or Normal(0)", "enum": ["image", "annotation", "probmap", "timeseries"] }, "exceptions": { "description": "Enable exceptions - Yes(1) or No(0) (for annotation data)", "type": "integer" }, "resolution": { "description": "Start Resolution (for annotation data)", "type": "integer" }, "windowrange": { "description": "Window clamp function for 16-bit channels with low max value of pixels", "type": "array" }, "readonly": { "description": "Read-only Channel(1) or Not(0). You can remotely post to channel if it is not readonly and overwrite data", "type": "integer" }, "data_url": { "description": "This url points to the root directory of the files. Dropbox is not an acceptable HTTP Server.", "type": "string" }, "file_format": { "description": "This is the file format type. For now we support only Slice stacks and CATMAID tiles.", "enum": ["SLICE", "CATMAID"] }, "file_type": { "description": "This the file type the data is stored in", "enum": ["tif", "png", "tiff"] }, }, "required": ["channel_name", "channel_type", "data_url", "datatype", "scalinglevels"] })
from jsonspec.validators import load # data will validate against this schema validator = load({ 'title': 'Example Schema', 'type': 'object', 'properties': { 'age': { 'description': 'Age in years', 'minimum': 0, 'type': 'integer' }, 'firstName': { 'type': 'string' }, 'lastName': { 'type': 'string' } }, 'required': [ 'firstName', 'lastName' ] }) # validate this data validator.validate({ 'firstName': 'John', 'lastName': 'Noone', 'age': 33, })
def test_tuple(): doc = { 'default': { 'ipv4': ('10.140.65.1', 'en4') }, 'ipv4': [('10.140.65.1', 'en4', True), ('10.140.66.1', 'en0', False)] } assert load({ 'type': 'object', 'properties': { 'default': { 'type': 'object', 'properties': { 'ipv4': { 'type': 'array', 'items': [ { 'type': 'string', 'format': 'ipv4' }, { 'type': 'string' }, ] }, 'ipv6': { 'type': 'array', 'items': [ { 'type': 'string', 'format': 'ipv6' }, { 'type': 'string' }, ] } } }, 'ipv4': { 'type': 'array', 'items': { 'type': 'array', 'items': [ { 'type': 'string', 'format': 'ipv4' }, { 'type': 'string' }, { 'type': 'boolean' }, ] } }, 'ipv6': { 'type': 'array', 'items': { 'type': 'array', 'items': [ { 'type': 'string', 'format': 'ipv6' }, { 'type': 'string' }, { 'type': 'boolean' }, ] } } }, 'required': ['default'] }).validate(doc)
DATASET_SCHEMA = load( { "$schema": "http://json-schema.org/draft-04/schema#", "title": "Schema for Dataset JSON object for ingest", "type": "object", "properties": { "dataset_name": { "description": "The name of the dataset", "type": "string" }, "imagesize": { "description": "The image dimensions of the dataset", "type": "array", }, "voxelres": { "description": "The voxel resolutoin of the data", "type": "array", }, "offset": { "type": "array", "description": "The dimensions offset from origin", }, "timerange": { "description": "The timerange of the data", "type": "array", }, "scalinglevels": { "description": "Required Scaling levels/ Zoom out levels", "type": "integer" }, "scaling": { "description": "Type of Scaling - Isotropic(1) or Normal(0)", "type": "integer" }, }, "required": ["dataset_name", "imagesize", "voxelres"] } )
def _validateSchema(self): # TODO: add fields for default values for inputs if there are no parents try: jobs = self._schema["pipelines"] except KeyError as e: raise PipelineSchemaValidationError("There was a problem getting the list of pipelines from the specification: {reason}".format(reason=e)) jobNames = [] for job in jobs: jobNames.append(job["name"]) if len(jobNames) != len(set(jobNames)): raise PipelineSchemaValidationError("Pipeline names must be unique") for job in jobs: if "children" in job.keys() and "parents" not in job.keys(): for child in job["children"]: if child not in jobNames: raise PipelineSchemaValidationError("job '{jobName}' specifies a child that doesn't exist".format(jobName=job["name"])) pipelineSchema = { "description": "Pipeline Graph Schema", "type": "object", "properties": { "pipelines": { "type": "array", "items": { "type": "object", "additionalProperties": {"$ref": "#/definitions/pipeline"}, } } }, "definitions": { "pipeline": { "name": { "description": "The name of the pipeline to run on an input file", "type": "string", "required": True }, "tag": { "description": "An arbitrary identifier for the pipeline", "type": "string", "required": True }, "children": { "description": "The names of the child pipelines, if there are any -- must exist in the 'pipelines' array", "type": "array", "items": { "type": "string" }, "required": False }, "request": { "description": "The Google Genomics Pipelines API request object", "type": "object" # TODO: schema validation for the request object } } } } validator = load(pipelineSchema) validator.validate(self._schema) try: validator.validate(self._schema) except ValidationError as e: # what kind of exception? raise PipelineSchemaValidationError("Couldn't validate the pipeline schema: {reason}".format(reason=e))
from django.http import HttpResponseForbidden, HttpResponse from django.contrib.auth import authenticate from jsonspec.validators import load import logging logger=logging.getLogger("neurodata") USER_SCHEMA=load({ "type": "object", "properties": { "user": { "type": "string", "pattern": "(?=^[^$&+,:;=?@#|'<>.^*()%!-]+$)(?=^[a-zA-Z0-9_]*$)" }, "password": { "type": "string", "pattern": "(?=^[^$&+,:;=?@#|'<>.^*()%!-]+$)(?=^[a-zA-Z0-9]*$)" }, "secret": { "type": "string", "pattern": "(?=^[^$&+,:;=?@#|'<>.^*()%!-]+$)(?=^[a-zA-Z0-9]*$)" }, }, "required": ["user","password","secret"] }) # Create your views here. @api_view(['GET']) @permission_classes([AllowAny,]) def validate(request, webargs): """Restful URL to Validate User Credentials""" try:
def _validateSchema( self ): # TODO: add fields for default values for inputs if there are no parents try: jobs = self._schema["pipelines"] except KeyError as e: print "There was a problem getting the list of pipelines from the specification" exit(-1) jobNames = [] for job in jobs: jobNames.append(job["name"]) if len(jobNames) != len(set(jobNames)): print "ERROR: pipeline names must be unique" exit(-1) for job in jobs: if "children" in job.keys() and "parents" not in job.keys(): for child in job["children"]: if child not in jobNames: print "ERROR: job '{jobName}' specifies a child that doesn't exist".format( jobName=job["name"]) exit(-1) pipelineSchema = { "description": "Pipeline Graph Schema", "type": "object", "properties": { "pipelines": { "type": "array", "items": { "type": "object", "additionalProperties": { "$ref": "#/definitions/pipeline" }, } } }, "definitions": { "pipeline": { "name": { "description": "The name of the pipeline to run on an input file", "type": "string", "required": True }, "tag": { "description": "An arbitrary identifier for the pipeline", "type": "string", "required": True }, "children": { "description": "The names of the child pipelines, if there are any -- must exist in the 'pipelines' array", "type": "array", "items": { "type": "string" }, "required": False }, "request": { "description": "The Google Genomics Pipelines API request object", "type": "object" # TODO: schema validation for the request object } } } } validator = load(pipelineSchema) validator.validate(self._schema) try: validator.validate(self._schema) except: # what kind of exception? exit(-1)
from django.http import HttpResponseForbidden, HttpResponse from django.contrib.auth import authenticate from jsonspec.validators import load import logging logger = logging.getLogger("neurodata") USER_SCHEMA = load({ "type": "object", "properties": { "user": { "type": "string", "pattern": "(?=^[^$&+,:;=?@#|'<>.^*()%!-]+$)(?=^[a-zA-Z0-9_]*$)" }, "password": { "type": "string", "pattern": "(?=^[^$&+,:;=?@#|'<>.^*()%!-]+$)(?=^[a-zA-Z0-9]*$)" }, "secret": { "type": "string", "pattern": "(?=^[^$&+,:;=?@#|'<>.^*()%!-]+$)(?=^[a-zA-Z0-9]*$)" }, }, "required": ["user", "password", "secret"] }) # Create your views here. @api_view(['GET']) @permission_classes([ AllowAny, ])
config = fastjsonschema.Config(meta_schema='draft4') fastjsonschema_validate = fastjsonschema.compile(JSON_SCHEMA, config=config) fast_compiled = lambda value, _: fastjsonschema_validate(value) fast_not_compiled = lambda value, json_schema: fastjsonschema.compile( json_schema, config=config)(value) name, code = fastjsonschema.compile_to_code(JSON_SCHEMA, config=config) with open('temp/performance.py', 'w') as f: f.write(code) from temp.performance import validate fast_file = lambda value, _: validate(value) jsonspec = load(JSON_SCHEMA) jsonschema_validator = jsonschema.Draft4Validator(JSON_SCHEMA) jsonschema_compiled = lambda value, _: jsonschema_validator.validate(value) def t(func, valid_values=True): module = func.split('.')[0] setup = """from __main__ import ( JSON_SCHEMA, VALUES_OK, VALUES_BAD, validictory, jsonschema_compiled, jsonschema, jsonspec,
CHANNEL_SCHEMA = load( { "$schema": "http://json-schema.org/draft-04/schema#", "title": "Schema for Channel JSON object for ingest", "type": "object", "properties": { "channel_name": { "description": "Channel name for the channel", "type": "string", "pattern": "^[^$&+,:;=?@#|'<>.^*()%!-]+$" }, "datatype": { "description": "Datatype of the channel", "enum": ["uint8", "uint16", "uint32", "uint64", "float32"], "pattern": "^(uint8|uint16|uint32|uint64|float32)$" }, "channel_type": { "description": "Type of Scaling - Isotropic(1) or Normal(0)", "enum": ["image", "annotation", "probmap", "timeseries"], "pattern": "^(image|annotation|probmap|timeseries)$" }, "exceptions": { "description": "Enable exceptions - Yes(1) or No(0) (for annotation data)", "type": "integer" }, "resolution": { "description": "Start Resolution (for annotation data)", "type": "integer" }, "windowrange": { "description": "Window clamp function for 16-bit channels with low max value of pixels", "type": "array", "pattern": "^\\([0-9]+,[0-9]+\\)$" }, "readonly": { "description": "Read-only Channel(1) or Not(0). You can remotely post to channel if it is not readonly and overwrite data", "type": "integer" }, "data_url": { "description": "This url points to the root directory of the files. Dropbox is not an acceptable HTTP Server.", "type": "string", "pattern": "^http:\/\/.*\/" }, "file_format": { "description": "This is the file format type. For now we support only Slice stacks and CATMAID tiles.", "enum": ["SLICE", "CATMAID"], "pattern": "^(SLICE|CATMAID)$" }, "file_type": { "description": "This the file type the data is stored in", "enum": ["tif", "png", "tiff"], "pattern": "^(tif|png|tiff)$" }, }, "required": ["channel_name", "channel_type", "data_url", "datatype", "file_format", "file_type"] } )