def test_telemedical_parser(self): """Check conformity of parser's output. Proceed in two steps: 1. Check the conformity of output items 2. Check the conformity of output keys Use Cerberus for checking conformity. """ output = parsers.laboratory.parser(self.params) schema_path = settings.VALIDATOR_PATH / "parsers" / "telemedical.json" schema = json.loads(schema_path.read_text()) group_type = TypeDefinition( name='equipment_group', included_types=(models.EquipmentGroup,), excluded_types=() ) image_field = TypeDefinition( name='image_field', included_types=(ImageFieldFile,), excluded_types=() ) Validator.types_mapping['equipment_group'] = group_type Validator.types_mapping['image_field'] = image_field validator = Validator(schema) dict_result = {"data": output} result = validator.validate(dict_result) if not result: log.error(validator.errors) log.debug(output) self.assertTrue(result)
def test_create_molecules(self): required = rescue_bag.get_required(self.params) output = rescue_bag.create_molecules(required["molecules"].keys(), required["molecules"]) image_field = TypeDefinition(name='image_field', included_types=(ImageFieldFile, ), excluded_types=()) Validator.types_mapping['image_field'] = image_field schema_path = settings.VALIDATOR_PATH.joinpath("parsers", "rescue_bag", "single_item.json") schema = json.loads(schema_path.read_text()) validator = Validator(schema) for item in output: result = validator.validate(output[item]) if not result: log.error(validator.errors) log.debug(output[item]) self.assertTrue(result) schema = {"data": {"type": "dict", "keysrules": {"type": "integer"}}} validator = Validator(schema) self.assertTrue(validator.validate({"data": output}))
class IPV6Address(Type): schema = {'type': 'ipv6address'} types_mapping = { 'ipv6address': TypeDefinition('ipv6address', (IPv6Address, ), ()) } to_db = lambda self, value: str(value) from_db = lambda self, value: IPv6Address(value.decode('utf-8'))
def test_merge_bags(self): required = rescue_bag.get_required(self.params) equipments = rescue_bag.get_articles(self.params, required["equipments"], [110, 111]) molecules = rescue_bag.get_medicines(self.params, required["molecules"], [110, 111]) bags = models.RescueBag.objects.all() output = rescue_bag.merge_bags(bags, molecules, equipments) image_field = TypeDefinition(name='image_field', included_types=(ImageFieldFile, ), excluded_types=()) Validator.types_mapping['image_field'] = image_field schema_path = settings.VALIDATOR_PATH.joinpath("parsers", "rescue_bag", "merged_bags.json") schema = json.loads(schema_path.read_text()) validator = Validator(schema) result = validator.validate({"data": output}) if not result: log.error(validator.errors) log.debug(output) self.assertTrue(result)
class Validator(DefaultValidator): """ Custom validator with additional types """ types_mapping = DefaultValidator.types_mapping.copy() types_mapping['filestorage'] = TypeDefinition('filestorage', (WerkzeugFileStorage, ), ())
class SequencingRun: # Define Project as a type for validation _project_type = TypeDefinition('project', (Project,), ()) Validator.types_mapping['project'] = _project_type uploadable_schema = { '_project_list': { 'type': 'list', 'empty': False, # must have at least 1 project 'schema': {'type': 'project'} }, '_metadata': { 'type': 'dict', 'schema': { 'layoutType': { 'type': 'string', 'required': True, 'allowed': ['PAIRED_END', 'SINGLE_END'] } } }, '_sequencing_run_type': { 'type': 'string', 'required': True } } def __init__(self, metadata, project_list, sequencing_run_type): self._project_list = project_list self._metadata = metadata self._sequencing_run_type = sequencing_run_type @property def metadata(self): return self._metadata @metadata.setter def metadata(self, metadata_dict): self._metadata = metadata_dict @property def project_list(self): return self._project_list @project_list.setter def project_list(self, p_list): self._project_list = p_list @property def sequencing_run_type(self): return self._sequencing_run_type @sequencing_run_type.setter def upload_route_string(self, sequencing_run_type): self._sequencing_run_type = sequencing_run_type def get_dict(self): return self.__dict__
class Validator(_Validator): """Base validator extends standard *Cerberos* validator with more types.""" types_mapping = _Validator.types_mapping.copy() types_mapping['callable'] = TypeDefinition('callable', (Callable, ), ()) types_mapping['iterable'] = TypeDefinition('iterable', (Iterable), ()) types_mapping['sequence'] = TypeDefinition('sequence', (Sequence, ), ()) types_mapping['mapping'] = TypeDefinition('mapping', (Mapping, ), ()) def __init__(self, *args, allow_unknown=True, purge_unknown=False, **kwds): """Initialization method. See Also -------- cerberus.Validator : `Validator` class and its `__init__` method """ kwds.update(allow_unknown=allow_unknown, purge_unknown=purge_unknown) super().__init__(*args, **kwds)
def register_custom_types(cls): Validator.types_mapping.update({ 'ipaddress': TypeDefinition('ipaddress', (IPv4Address, IPv6Address), ()), 'ipv4address': TypeDefinition('ipv4address', (IPv4Address, ), ()), 'ipv6address': TypeDefinition('ipv6address', (IPv6Address, ), ()), }) CerbeRedis.rules.update({ 'ipaddress': [lambda x: str(x), lambda x: ip_address(x.decode('utf-8'))], 'ipv4address': [lambda x: str(x), lambda x: IPv4Address(x.decode('utf-8'))], 'ipv6address': [lambda x: str(x), lambda x: IPv6Address(x.decode('utf-8'))], })
class IPV6Address(FieldType): schema = {'type': 'ipv6address'} types_mapping = { 'ipv6address': TypeDefinition('ipv6address', (IPv6Address, ), ()) } rules = { 'ipv6address': [lambda x: str(x), lambda x: IPv6Address(x.decode('utf-8'))] }
def register_type(cls, name: str) -> None: """Register `name` as a type to validate as an instance of class `cls`. Args: cls: a class name (str): the name to be registered. """ x = TypeDefinition(name, (cls, ), ()) Validator.types_mapping[name] = x
def register_quantity(quantity: pq.Quantity, name: str) -> None: """Register `name` as a type to validate as an instance of the class of `quantity`. Args: quantity (pq.Quantity): a quantity. name (str): the name to be registered. """ x = TypeDefinition(name, (quantity.__class__, ), ()) Validator.types_mapping[name] = x
class IPAddress(FieldType): schema = {'type': 'ipaddress'} types_mapping = { 'ipaddress': TypeDefinition('ipaddress', (IPv4Address, IPv6Address), ()) } # dictionary of: <cerberus type name>: [to bytes, from bytes] rules = { 'ipaddress': [lambda x: str(x), lambda x: ip_address(x.decode('utf-8'))] }
def test_dynamic_types(): decimal_type = TypeDefinition("decimal", (Decimal,), ()) document = {"measurement": Decimal(0)} schema = {"measurement": {"type": "decimal"}} validator = Validator() validator.types_mapping["decimal"] = decimal_type assert_success(document, schema, validator) class MyValidator(Validator): types_mapping = Validator.types_mapping.copy() types_mapping["decimal"] = decimal_type validator = MyValidator() assert_success(document, schema, validator)
def test_dynamic_types(): decimal_type = TypeDefinition('decimal', (Decimal, ), ()) document = {'measurement': Decimal(0)} schema = {'measurement': {'type': 'decimal'}} validator = Validator() validator.types_mapping['decimal'] = decimal_type assert_success(document, schema, validator) class MyValidator(Validator): types_mapping = Validator.types_mapping.copy() types_mapping['decimal'] = decimal_type validator = MyValidator() assert_success(document, schema, validator)
def test_parser(self): output = rescue_bag.parser(self.params) image_field = TypeDefinition(name='image_field', included_types=(ImageFieldFile, ), excluded_types=()) Validator.types_mapping['image_field'] = image_field schema_path = settings.VALIDATOR_PATH.joinpath("parsers", "rescue_bag", "rescue_bag.json") schema = json.loads(schema_path.read_text()) validator = Validator(schema) result = validator.validate({"data": output}) if not result: log.error(validator.errors) log.debug(output) self.assertTrue(result)
def test_parser(self): kits = models.FirstAidKit.objects.all() output = first_aid.parser(self.params, kits) self.assertIsInstance(output, list) image_field = TypeDefinition(name='image_field', included_types=(ImageFieldFile, ), excluded_types=()) Validator.types_mapping['image_field'] = image_field schema_path = settings.VALIDATOR_PATH.joinpath("parsers", "first_aid", "first_aid_kit.json") schema = json.loads(schema_path.read_text()) validator = Validator(schema) for item in output: result = validator.validate(item) if not result: log.error(validator.errors) log.debug(item) self.assertTrue(result)
def test_medicine_parser(self): """Check conformity of parser's output. Proceed in two steps: 1. Check the conformity of output items 2. Check the conformity of output keys Use Cerberus for checking conformity. """ output = parsers.medicines.parser(self.params) schema_path = settings.VALIDATOR_PATH / "parsers" / "medicines.json" schema = json.loads(schema_path.read_text()) group_type = TypeDefinition( name='molecule_group', included_types=(models.MoleculeGroup,), excluded_types=() ) Validator.types_mapping['molecule_group'] = group_type validator = Validator(schema) for item in output: dict_result = {"data": output[item]} result = validator.validate(dict_result) if not result: log.error(validator.errors) log.debug(output[item]) self.assertTrue(result) schema = { "data": { "type": "dict", "keysrules": { "type": "molecule_group" } } } validator = Validator(schema) self.assertTrue(validator.validate({"data": output}))
def test_create_equipment(self): required = rescue_bag.get_required(self.params) equipment = models.Equipment.objects.get(id=2) output = rescue_bag.create_equipment(equipment, required["equipments"]) image_field = TypeDefinition(name='image_field', included_types=(ImageFieldFile, ), excluded_types=()) Validator.types_mapping['image_field'] = image_field schema_path = settings.VALIDATOR_PATH.joinpath("parsers", "rescue_bag", "single_item.json") schema = json.loads(schema_path.read_text()) validator = Validator(schema) result = validator.validate(output) if not result: log.error(validator.errors) log.debug(output) self.assertTrue(result)
def test_create_molecule(self): required = first_aid.get_required(self.params) molecule = models.Molecule.objects.get(id=3) output = first_aid.create_molecule( item=molecule, content_type=self.params.content_types["molecule"], required=required["molecules"]) image_field = TypeDefinition(name='image_field', included_types=(ImageFieldFile, ), excluded_types=()) Validator.types_mapping['image_field'] = image_field schema_path = settings.VALIDATOR_PATH.joinpath("parsers", "first_aid", "single_item.json") schema = json.loads(schema_path.read_text()) validator = Validator(schema) result = validator.validate(output) if not result: log.error(validator.errors) log.debug(output) self.assertTrue(result)
from functools import wraps from apscheduler.schedulers.base import BaseScheduler from cerberus import TypeDefinition, Validator from src.exceptions import InvalidRequestException Validator.types_mapping["scheduler"] = TypeDefinition("scheduler", (BaseScheduler, ), ()) def validate_input(schema): def decorator(func): @wraps(func) def decorated_func(*args, **kwargs): validator = Validator(schema, require_all=True) res = validator.validate(kwargs) if not res: raise InvalidRequestException(validator.errors) return func(*args, **kwargs) return decorated_func return decorator UUID_REGEX = ( "[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}" ) EMAIL_REGEX = "^.+@.+$"
#pylint: disable=invalid-name import sys import logging import types from cerberus import Validator, TypeDefinition from colorama import init, Fore, Style import sympy from .elements.base import Element from .boundary import BoundaryMethod from .algorithm import BaseAlgorithm log = logging.getLogger(__name__) #pylint: disable=invalid-name init(autoreset=True) symbol_type = TypeDefinition('symbol', (sympy.Symbol, sympy.IndexedBase), ()) Validator.types_mapping['symbol'] = symbol_type expr_type = TypeDefinition('expr', (sympy.Expr, ), ()) Validator.types_mapping['expr'] = expr_type matrix_type = TypeDefinition('matrix', (sympy.Matrix, ), ()) Validator.types_mapping['matrix'] = matrix_type element_type = TypeDefinition('element', (Element, ), ()) Validator.types_mapping['element'] = element_type function_type = TypeDefinition('function', (types.FunctionType, ), ()) Validator.types_mapping['function'] = function_type valid_prompt = lambda indent: ' | ' + ' ' * indent
# -*- coding: utf-8 -*- from openelevationservice.server.api import api_exceptions from openelevationservice.server.utils import logger from cerberus import Validator, TypeDefinition log = logger.get_logger(__name__) object_type = TypeDefinition("object", (object, ), ()) Validator.types_mapping['object'] = object_type v = Validator() schema_post = { 'geometry': { 'anyof_type': ['object', 'list', 'string'], 'required': True }, 'format_in': { 'type': 'string', 'allowed': ['geojson', 'point', 'encodedpolyline', 'polyline'], 'required': True }, 'format_out': { 'type': 'string', 'allowed': ['geojson', 'point', 'encodedpolyline', 'polyline'], 'default': 'geojson' }, 'dataset': { 'type': 'string', 'allowed': ['srtm'],
def register_type(cls, name): """Register `name` as a type to validate as an instance of class `cls`.""" x = TypeDefinition(name, (cls,), ()) Validator.types_mapping[name] = x
def register_quantity(quantity, name): """Register `name` as a type to validate as an instance of class `cls`.""" x = TypeDefinition(name, (quantity.__class__,), ()) Validator.types_mapping[name] = x
import os from bson import ObjectId from datetime import datetime from json import JSONEncoder from flask import Flask from flask_cors import CORS from cerberus import Validator, TypeDefinition from ml_blink_api.resources.crawl.controllers import crawl from ml_blink_api.resources.users.controllers import users from ml_blink_api.resources.missions.controllers import missions from ml_blink_api.resources.comments.controllers import comments from ml_blink_api.resources.candidates.controllers import candidates from ml_blink_api.resources.potential_anomalies.controllers import potential_anomalies # Extend `cerberus` validator types with an ObjectId Type Validator.types_mapping['object_id'] = TypeDefinition('object_id', (ObjectId, ), ()) class CustomJSONEncoder(JSONEncoder): '''Extend the JSONEncoder class to handle MongoDB ObjectID and timestamps''' def default(self, obj): if isinstance(obj, ObjectId): return str(obj) if isinstance(obj, datetime): return obj.isoformat() return JSONEncoder.default(self, obj) # Configure the Flask application app = Flask(__name__) app.json_encoder = CustomJSONEncoder
self.name = name # Classes must be imported from the right modules. Some class names do not match the module name and need to be set explicitly. module = "aminer.events" if name in ("EventHandlerInterface", "EventSourceInterface"): module += ".EventInterfaces" elif name == "VolatileLogarithmicBackoffEventHistory": module += ".Utils" else: module += "." + name self.func = getattr(__import__(module, fromlist=[name]), name) def __str__(self): return self.name parser_type = TypeDefinition("parsermodel", (ParserModelType, str), ()) analysis_type = TypeDefinition("analysistype", (AnalysisType, str), ()) event_handler_type = TypeDefinition("eventhandlertype", (EventHandlerType, str), ()) class ConfigValidator(Validator): """Validates values from the configs.""" def _validate_has_start(self, has_start, field, value): """ Test if there is a key named "has_start". The rule's arguments are validated against this schema: {'type': 'boolean'} """ seen_start = False for var in value:
class CustomValidator(Validator): # добавляем кастомный тип # https://docs.python-cerberus.org/en/stable/customize.html#custom-data-types types_mapping = Validator.types_mapping.copy() types_mapping['decimal'] = TypeDefinition('decimal', (Decimal, ), ())
class Sample: # Define SequenceFile as a type for validation _sample_type = TypeDefinition('sequence_file', (SequenceFile, ), ()) Validator.types_mapping['sequence_file'] = _sample_type uploadable_schema = { '_sequence_file': { 'type': 'sequence_file', 'nullable': False, 'required': True, }, '_sample_name': { 'type': 'string', 'nullable': False, 'required': True, 'minlength': 3 # Minimum sample name length is 3 }, '_description': { 'type': 'string', 'nullable': True, 'required': False }, '_sample_number': { 'anyof_type': ['string', 'integer'], 'nullable': True, 'required': False }, '_sample_id': { 'type': 'integer', 'nullable': True, 'required': False }, '_skip': { 'type': 'boolean', 'nullable': True, 'required': False } } def __init__(self, sample_name, description='', sample_number=None, samp_dict=None, sample_id=None): """ :param sample_name: string: displayed sample name on IRIDA :param description: string: :param sample_number: string or int: used during parsing step for some parsers that define their own numbers for samples :param samp_dict: dictionary of additional values :param sample_id: int: unique identifier defined by irida """ self._sample_name = sample_name self._description = description self._sample_number = sample_number self._sample_id = sample_id if samp_dict is None: samp_dict = {} self._sample_dict = dict(samp_dict) self._sequence_file = None self._skip = False @property def sample_name(self): return self._sample_name @property def description(self): return self._description @property def sample_number(self): return self._sample_number @property def sample_id(self): return self._sample_id @property def sequence_file(self): return self._sequence_file @sequence_file.setter def sequence_file(self, sq): self._sequence_file = sq @property def skip(self): return self._skip @skip.setter def skip(self, skip): self._skip = skip def get_uploadable_dict( self): # formatting for sending to irida when creating a project uploadable_dict = deepcopy(self._sample_dict) uploadable_dict['sampleName'] = self.sample_name uploadable_dict['description'] = self.description return uploadable_dict def __getitem__(self, key): if key in self._sample_dict: return self._sample_dict[key] return None def get(self, key): return self.__getitem__(key) def __str__(self): return str(self.get_uploadable_dict) + str(self.sequence_file) def get_dict(self): return self.__dict__
def __init__(self, *args: Any, **kwargs: Any): super().__init__(*args, **kwargs) self.purge_unknown = True self.error_handler = self.FlatErrorHandler() self.types_mapping['path'] = TypeDefinition('path', (Path, ), ())
from datawald_abstract import Abstract from datetime import datetime import traceback import sys from time import sleep from decimal import Decimal from cerberus import Validator, errors, SchemaError, TypeDefinition Validator.types_mapping['decimal'] = TypeDefinition('decimal', (Decimal,), ()) class FrontEnd(Abstract): def __init__(self, logger=None, feApp=None, dataWald=None): """Return a new FrontEnd object. """ self.feApp = feApp self.dataWald = dataWald self.logger = logger def setRawData(self, **params): pass def feOrdersFt(self, cutDt): return ([], []) def feOrdersExtFt(self, orders, rawOrders): pass def getOrders(self, cutDt): """Gert orders from the fronted application. """ (orders, rawOrders) = self.feOrdersFt(cutDt) self.feOrdersExtFt(orders, rawOrders)