class ORTTrainerOptionsValidator(cerberus.Validator): _LR_SCHEDULER = cerberus.TypeDefinition('lr_scheduler', (lr_scheduler._LRScheduler, ), ()) _LOSS_SCALER = cerberus.TypeDefinition('loss_scaler', (loss_scaler.LossScaler, ), ()) types_mapping = cerberus.Validator.types_mapping.copy() types_mapping['lr_scheduler'] = _LR_SCHEDULER types_mapping['loss_scaler'] = _LOSS_SCALER
def __init__(self, *args, **kwargs): super(EasyVVUQValidator, self).__init__(*args, **kwargs) # Add numpy.int64 as an acceptable 'integer' type integer_type = cerberus.TypeDefinition('integer', (int, numpy.int64), ()) cerberus.Validator.types_mapping['integer'] = integer_type # Add 'fixture' type (for now, it's expected just to be a string) fixture_type = cerberus.TypeDefinition('fixture', (str), ()) cerberus.Validator.types_mapping['fixture'] = fixture_type
class ORTTrainerOptionsValidator(cerberus.Validator): _LR_SCHEDULER = cerberus.TypeDefinition('lr_scheduler', (lr_scheduler._LRScheduler, ), ()) _LOSS_SCALER = cerberus.TypeDefinition('loss_scaler', (loss_scaler.LossScaler, ), ()) _SESSION_OPTIONS = cerberus.TypeDefinition('session_options', (ort.SessionOptions, ), ()) types_mapping = cerberus.Validator.types_mapping.copy() types_mapping['lr_scheduler'] = _LR_SCHEDULER types_mapping['loss_scaler'] = _LOSS_SCALER types_mapping['session_options'] = _SESSION_OPTIONS
class ORTTrainerOptionsValidator(cerberus.Validator): _LR_SCHEDULER = cerberus.TypeDefinition("lr_scheduler", (lr_scheduler._LRScheduler,), ()) _LOSS_SCALER = cerberus.TypeDefinition("loss_scaler", (loss_scaler.LossScaler,), ()) _SESSION_OPTIONS = cerberus.TypeDefinition("session_options", (ort.SessionOptions,), ()) _PROPAGATE_CAST_OPS_STRATEGY = cerberus.TypeDefinition( "propagate_cast_ops_strategy", (PropagateCastOpsStrategy,), () ) types_mapping = cerberus.Validator.types_mapping.copy() types_mapping["lr_scheduler"] = _LR_SCHEDULER types_mapping["loss_scaler"] = _LOSS_SCALER types_mapping["session_options"] = _SESSION_OPTIONS types_mapping["propagate_cast_ops_strategy"] = _PROPAGATE_CAST_OPS_STRATEGY
class MyValidator(cerberus.Validator): types_mapping = cerberus.Validator.types_mapping.copy() types_mapping['number'] = cerberus.TypeDefinition('number', (int,), ()) def _validate_min_number(self, min_number, field, value): """{'type': 'number'}""" if value < min_number: self._error(field, 'Below the min')
def _get_validator(cls) -> "cerberus.Validator": validator = cerberus.Validator(allow_unknown=True) classes = get_sub_classes(ZeroRegistrableObject) + [ OrderedDict, defaultdict ] for type_cls in classes: validator.types_mapping[ type_cls.__name__] = cerberus.TypeDefinition( type_cls.__name__, (type_cls, ), ()) _get_extra_types_method = getattr(type_cls, "_get_extra_types", None) if isinstance(_get_extra_types_method, Callable): classes += _get_extra_types_method() return validator
_schema_fname. Returns: dict: cis_interface YAML options. """ global _schema if fname is None: init_schema() out = _schema else: out = load_schema(fname) return out function_type = cerberus.TypeDefinition('function', types.FunctionType, ()) def str_to_function(value): r"""Convert a string to a function. Args: value (str, list): String or list of strings, specifying function(s). The format should be "<package.module>:<function>" so that <function> can be imported from <package>. Returns: func: Callable function. """ if isinstance(value, list):
password = get_config_setting(request, 'app.email.password') if username and password: smtp.login(username, password) smtp.sendmail(sender, recipient, email.as_string()) smtp.quit() except Exception as e: logging.getLogger("toja").error(str(e)) print(text) # noqa TODO: Remove else: logging.getLogger("toja").error( 'Could not send e-mail as "app.email.smtp_host" setting not specified' ) print(text) # noqa TODO: Remove fieldstorage_type = cerberus.TypeDefinition('fieldstorage', (FieldStorage, ), ()) class Validator(cerberus.Validator): """Extended Validator that can check whether two fields match.""" types_mapping = cerberus.Validator.types_mapping.copy() types_mapping['fieldstorage'] = fieldstorage_type def _validate_matches(self, other, field, value): if other not in self.document: self._error(field, 'You must provide a value.') if self.document[other] != value: self._error(field, 'The value does not match.')
from pprint import pprint as pp import re import cerberus RULE_LABELS = "description,action,source_ip,destination_ip,source_port,destination_port,protocol,comment".split( ',') decimal_type = cerberus.TypeDefinition('decimal', (str, ), ()) class RuleStoreValidator(cerberus.Validator): def __init__(self): super().__init__() self.schema = { "description": { "type": 'string' }, "action": { "type": 'string', "allowed": ["accept", "drop", ""] }, "source_ip": { "type": "string", "check_with": "ipaddr" }, "destination_ip": { "type": "string", "check_with": "ipaddr" }, "source_port": { "type": "string",
import cerberus from bson import ObjectId from .schemas import SCHEMAS class DataValidationError(Exception): """ Custom Exception that provides validator errors as property """ def __init__(self, errors): super().__init__() self.errors = errors OBJECTID_TYPE = cerberus.TypeDefinition('objectid', (ObjectId, ), ()) class ApiValidator(cerberus.Validator): """ Custom Validator extending types_mapping """ types_mapping = cerberus.Validator.types_mapping.copy() types_mapping['objectid'] = OBJECTID_TYPE class RequestDataValidator: """ Initialization. :param schema: schema name
import cerberus from cerberus import Validator, errors from cerberus.errors import BasicErrorHandler from django.core.files.uploadedfile import InMemoryUploadedFile from rest_framework.exceptions import ValidationError file_type = cerberus.TypeDefinition('file', (InMemoryUploadedFile, ), ()) def check_validation(schema, **data): validator = MyValidator(schema, error_handler=BasicErrorHandler) if not validator.validate(data): for key, value in validator.errors.items(): result = {'field': key, 'error_detail': value} raise ValidationError(result) return data class MyValidator(Validator): types_mapping = Validator.types_mapping.copy() types_mapping['file'] = file_type
"""Module with functions for validating a config file. The validation is done with the cerberus package. For this, a schema needs to be defined. This is the structure that the config file needs to follow. The schema is specific to tha project. """ import json import itertools from json.decoder import JSONDecodeError import cerberus from cerberus.validator import Validator from get_logger import logger Validator.types_mapping["integer"] = cerberus.TypeDefinition( "integer", (int, ), (bool, )) def get_schema(): """Get the schema of the ceberus's schema of the config file. Args: None. Returns: dict: Config file. """ return { 'train': { 'type': 'boolean', 'default': True }, 'predict': { 'type': 'boolean', 'default': False },
import logging from collections.abc import Mapping import cerberus import pkg_resources from fipy import PhysicalField from sympy import Symbol, SympifyError, sympify from .yaml_setup import yaml # TODO: Allow equation with no diffusion term physical_unit_type = cerberus.TypeDefinition('physical_unit', (PhysicalField,), ()) class MicroBenthosSchemaValidator(cerberus.Validator): """ A :mod:`cereberus` validator for schema.yml in MicroBenthos """ logger = logging.getLogger(__name__) logger.addHandler(logging.NullHandler()) logger.propagate = False types_mapping = cerberus.Validator.types_mapping.copy() types_mapping['physical_unit'] = physical_unit_type # def __init__(self, *args, **kwargs): # # self.logger.propagate = False # super(MicroBenthosSchemaValidator, self).__init__(*args, **kwargs) def _check_with_importpath(self, field, value): """ Validates if the value is a usable import path for an entity class
import cerberus from uuid import UUID cerberus.Validator.types_mapping['UUID'] = cerberus.TypeDefinition('UUID', (UUID), ())
import re import copy from collections import Iterable import cerberus from werkzeug.datastructures import FileStorage from flask_babel import gettext, lazy_gettext # when https://github.com/nicolaiarocci/cerberus/issues/174 will be solved, it can be rewritten MIN_LENGTH_NOSTRIP = cerberus.errors.ErrorDefinition(0x4127, 'minlengthnostrip') MAX_LENGTH_NOSTRIP = cerberus.errors.ErrorDefinition(0x4128, 'maxlengthnostrip') DYNREGEX_MISMATCH = cerberus.errors.ErrorDefinition(0x4141, 'dynregex') file_type = cerberus.TypeDefinition('file', (FileStorage,), ()) class ValidationError(ValueError): def __init__(self, errors): super().__init__() self.errors = errors if not isinstance(errors, dict): raise TypeError('errors must be dict') def __str__(self): # pragma: no cover return self._str_value(self.errors)[:-2] def _str_value(self, e): if isinstance(e, list): return ', '.join(str(x) for x in e)
class PostgresValidator(Validator): types_mapping = Validator.types_mapping.copy() types_mapping.update({ 'varchar': cerberus.TypeDefinition('varchar', (str, ), ()), 'int4': cerberus.TypeDefinition('int4', (int, ), ()), 'int': cerberus.TypeDefinition('int', (int, ), ()), 'text': cerberus.TypeDefinition('text', (str, ), ()), 'citext': cerberus.TypeDefinition('citext', (str, ), ()), 'bool': cerberus.TypeDefinition('bool', (bool, ), ()), 'json': cerberus.TypeDefinition('json', (Json, ), ()), 'jsonb': cerberus.TypeDefinition('jsonb', (Json, ), ()), }) types_mapping.pop('date') types_mapping.pop('datetime') @staticmethod def all_types(self): simple_types = list(self.types_mapping.keys()) complex_types = [ x.replace('_validate_type_', '') for x in dir(self) if x.startswith('_validate_type_') ] return simple_types + complex_types @staticmethod def coerce_types(self): return [ x.replace('_normalize_coerce_', '') for x in dir(self) if x.startswith('_normalize_coerce_') ] def _validate_type_int4range(self, value): if isinstance(value, NumericRange): return True def _normalize_coerce_int4range(self, value): assert all(isinstance(x, int) for x in value) assert len(value) == 2 return NumericRange(*value) def _validate_type_date(self, value): if isinstance(value, datetime.date): return True def _normalize_coerce_date(self, value): return datetime.datetime.strptime(value, '%Y-%m-%d').date() def _validate_type_datetime(self, value): if isinstance(value, datetime.datetime): return True def _normalize_coerce_datetime(self, value): return datetime.datetime.strptime(value, '%Y-%m-%d') def _validate_type_daterange(self, value): if isinstance(value, DateRange): return True def _normalize_coerce_daterange(self, value): datetimes = map( lambda x: datetime.datetime.strptime(x, '%Y-%m-%d %H:%M:%S').date, value) return DateRange(*datetimes) def _validate_type_tsrange(self, value): if isinstance(value, DateTimeRange): return True def _normalize_coerce_tsrange(self, value): datetimes = map( lambda x: datetime.datetime.strptime(x, '%Y-%m-%d %H:%M:%S'), value) return DateTimeRange(*datetimes) def _normalize_coerce_json(self, value): return Json(value) def _normalize_coerce_jsonb(self, value): return Json(value)