def test_noneof_repr(): assert (repr(validate.NoneOf( iterable=[1, 2, 3], error=None)) == '<NoneOf(iterable=[1, 2, 3], error={0!r})>'.format( 'Invalid input.')) assert (repr(validate.NoneOf(iterable=[1, 2, 3], error='foo')) == '<NoneOf(iterable=[1, 2, 3], error={0!r})>'.format('foo'))
def test_noneof_custom_message(): with pytest.raises(ValidationError, match="<not valid>"): validate.NoneOf([1, 2], error="<not valid>")(1) none_of = validate.NoneOf([1, 2], error="{input} cannot be one of {values}") with pytest.raises(ValidationError, match="1 cannot be one of 1, 2"): none_of(1)
def test_noneof_repr(): assert repr( validate.NoneOf(iterable=[1, 2, 3], error=None) ) == "<NoneOf(iterable=[1, 2, 3], error={!r})>".format("Invalid input.") assert repr( validate.NoneOf(iterable=[1, 2, 3], error="foo") ) == "<NoneOf(iterable=[1, 2, 3], error={!r})>".format("foo")
def test_noneof_custom_message(): with pytest.raises(ValidationError) as excinfo: validate.NoneOf([1, 2], error="<not valid>")(1) assert "<not valid>" in str(excinfo) none_of = validate.NoneOf([1, 2], error="{input} cannot be one of {values}") with pytest.raises(ValidationError) as excinfo: none_of(1) assert "1 cannot be one of 1, 2" in str(excinfo)
def test_noneof_custom_message(): with pytest.raises(ValidationError) as excinfo: validate.NoneOf([1, 2], error='<not valid>')(1) assert '<not valid>' in str(excinfo) none_of = validate.NoneOf([1, 2], error='{input} cannot be one of {values}') with pytest.raises(ValidationError) as excinfo: none_of(1) assert '1 cannot be one of 1, 2' in str(excinfo)
class ProductDeserializeSchema(ProductSchema): class Meta: model = models.Product fields = ('name', 'description', 'price', 'category', 'status', 'tags') category = ma.Function(deserialize=lambda v: models.Category.query.get( v), required=True, validate=[validate.NoneOf([None])]) status = ma.Function(deserialize=models.ProductStatusEnum.find, required=True, validate=[validate.NoneOf([None])]) tags = ma.List(ma.String(), required=True)
class FeatureRequestCreateUpdateSchema(ma.ModelSchema): class Meta: model = FeatureRequest fields = ('title', 'description', 'priority', 'target_date', 'client', 'area', 'is_archived') client = ma.Function(deserialize=load_from_id(Client), required=True, validate=[validate.NoneOf([None])]) area = ma.Function(deserialize=load_from_id(Area), required=True, validate=[validate.NoneOf([None])])
class IOSchema(BaseCamelSchema): name = fields.Str(required=True, validate=validate.NoneOf(IO_NAME_BLACK_LIST, error=IO_NAME_ERROR)) description = fields.Str(allow_none=True) iotype = fields.Str(allow_none=True, data_key="type", validate=validate.OneOf(types.VALUES)) value = fields.Raw(allow_none=True) is_optional = fields.Bool(allow_none=True) is_list = fields.Bool(allow_none=True) is_flag = fields.Bool(allow_none=True) arg_format = fields.Str(allow_none=True) delay_validation = fields.Bool(allow_none=True) options = fields.List(fields.Raw(), allow_none=True) connection = fields.Str(allow_none=True) to_init = fields.Bool(allow_none=True) @staticmethod def schema_config(): return V1IO @validates_schema def validate_io(self, values, **kwargs): validate_io( name=values.get("name"), iotype=values.get("iotype"), value=values.get("value"), is_list=values.get("is_list"), is_optional=values.get("is_optional"), is_flag=values.get("is_flag"), options=values.get("options"), )
class CustomerDeserializeSchema(CustomerSchema): class Meta: model = models.Customer fields = ('email', 'firstname', 'lastname', 'country') country = ma.Function(deserialize=lambda v: models.Country.query.get( v), required=True, validate=[validate.NoneOf([None])])
class CommentSchema(ma.ModelSchema): text = ma.String(required=True, validate=validate.NoneOf([''])) post_id = ma.Integer(required=True) class Meta: model = Comment sqla_session = db.session
def test_noneof(): assert validate.NoneOf([1, 2, 3])(4) == 4 assert validate.NoneOf("abc")("d") == "d" assert validate.NoneOf("")([]) == [] assert validate.NoneOf([])("") == "" assert validate.NoneOf([])([]) == [] assert validate.NoneOf([1, 2, 3])(None) is None with pytest.raises(ValidationError, match="Invalid input."): validate.NoneOf([1, 2, 3])(3) with pytest.raises(ValidationError): validate.NoneOf("abc")("c") with pytest.raises(ValidationError): validate.NoneOf([1, 2, None])(None) with pytest.raises(ValidationError): validate.NoneOf("")("")
class PostSchema(ma.ModelSchema): description = ma.String(required=True, validate=validate.NoneOf([''])) photo = ma.String() class Meta: model = Post sqla_session = db.session
class OrderUpdateSchema(ma.ModelSchema): class Meta: model = models.Order fields = ('status',) status = ma.Function(deserialize=models.OrderStatusEnum.find, required=True, validate=[validate.NoneOf([None])])
def test_noneof(): assert validate.NoneOf([1, 2, 3])(4) == 4 assert validate.NoneOf('abc')('d') == 'd' assert validate.NoneOf('')([]) == [] assert validate.NoneOf([])('') == '' assert validate.NoneOf([])([]) == [] assert validate.NoneOf([1, 2, 3])(None) is None with pytest.raises(ValidationError) as excinfo: validate.NoneOf([1, 2, 3])(3) assert 'Invalid input.' in str(excinfo) with pytest.raises(ValidationError): validate.NoneOf('abc')('c') with pytest.raises(ValidationError): validate.NoneOf([1, 2, None])(None) with pytest.raises(ValidationError): validate.NoneOf('')('')
def test_noneof(): assert validate.NoneOf([1, 2, 3])(4) == 4 assert validate.NoneOf("abc")("d") == "d" assert validate.NoneOf("")([]) == [] assert validate.NoneOf([])("") == "" assert validate.NoneOf([])([]) == [] assert validate.NoneOf([1, 2, 3])(None) is None with pytest.raises(ValidationError) as excinfo: validate.NoneOf([1, 2, 3])(3) assert "Invalid input." in str(excinfo) with pytest.raises(ValidationError): validate.NoneOf("abc")("c") with pytest.raises(ValidationError): validate.NoneOf([1, 2, None])(None) with pytest.raises(ValidationError): validate.NoneOf("")("")
class _TimeSeriesSpecSchema(_BaseSchema): _default_spec = TimeSeriesSpec id = fields.Int() externalId = fields.Str(attribute="external_id") start = fields.Int(required=True) end = fields.Int(required=True) aggregate = fields.Str(validate=validate.NoneOf( INVALID_AGGREGATE_FUNCTIONS, error="Not a valid aggregate function. Cannot use shorthand name.")) granularity = fields.Str() includeOutsidePoints = fields.Bool(attribute="include_outside_points") @validates_schema(skip_on_field_errors=False) def validate_identifiers(self, data): errors = {} if ("id" in data and "external_id" in data) or ("id" not in data and "external_id" not in data): errors["external_id"] = [ "Exactly one of id and external_id must be specified." ] errors["id"] = [ "Exactly one of id and external_id must be specified." ] if errors: raise ValidationError(errors) @validates_schema(skip_on_field_errors=False) def validate_aggregate(self, data): errors = {} if "aggregate" in data: if "granularity" not in data: errors["granularity"] = [ "granularity must be specified for aggregates." ] if "include_outside_points" in data and data[ "include_outside_points"]: errors["includeOutsidePoints"] = [ "Can't include outside points for aggregates." ] else: if "granularity" in data: errors["granularity"] = [ "granularity can only be specified for aggregates." ] if errors: raise ValidationError(errors) @validates("granularity") def validate_granularity(self, granularity): try: granularity_to_ms(granularity) except ValueError as e: raise ValidationError(str(e)) from e
class OrderCreateSchema(ma.Schema): customer = ma.Function(deserialize=lambda v: models.Customer.query.get( v), required=True, validate=[validate.NoneOf([None])]) detail = ma.Nested(OrderDetailCreateSchema, many=True) @post_load def create_order(self, data): order = models.Order(customer=data['customer']) for detail in data['detail']: order.add_product(detail['product'], detail['quantity']) return order
def __init__(self, validate=None, requirement=None, **metadata): """ Args: validate: metadata: """ if validate is None: validate = validate_.Length(max=self.DEFAULT_MAX_LENGTH) if requirement is not None: validate = validate_.NoneOf(error='Dau vao khong hop le!', iterable={'full_name'}) super(FieldString, self).__init__(validate=validate, required=requirement, **metadata)
def __init__(self, **kwargs): # Ensure that the 'keys' and 'values' arguments cannot be specified. if "keys" in kwargs: raise TypeError( "The Notebooks field does not accept a 'keys' argument.") if "values" in kwargs: raise TypeError( "The Notebooks field does not accept a 'values' argument.") super().__init__( keys=fields.String(validate=validate.NoneOf( ["reference_date", "date_ranges", "flowapi_url"])), values=fields.Nested(NotebookSchema), **kwargs, )
class TransferSchemaDump(Schema): accountOrigin = fields.String( required=True, nullable=False, validate=[validate.NoneOf([''], error='Field may not be null.')]) accountDestination = fields.String( required=True, nullable=False, validate=[validate.NoneOf([''], error='Field may not be null.')]) value = fields.Float(required=True, validate=[ validate.Range( min=0, min_inclusive=False, error="Value must be greater then 0.") ]) _id = fields.UUID(missing=uuid.uuid4()) status = fields.String( missing='in_queue', validate=[ validate.OneOf(['in_queue', 'processing', 'confirmed', 'error']) ]) message = fields.String()
class FeatureRequestListArgsSchema(ma.Schema): limit = ma.Integer(required=False, validate=[validate.Range(1, 100)], missing=20) offset = ma.Integer(required=False, validate=[validate.Range(0)], missing=0) include_archived = ma.Boolean(required=False, missing=False) area = ma.Function(deserialize=load_from_id(Area), required=False) client = ma.Function(deserialize=load_from_id(Client), required=True, validate=[validate.NoneOf([None])]) search = ma.String(required=False) sort = ma.String( required=False, validate=[validate.OneOf(['priority', 'id', 'target_date'])], missing='priority')
class OrderDetailCreateSchema(ma.Schema): product = ma.Function(deserialize=lambda v: models.Product.query.get( v), required=True, validate=[validate.NoneOf([None])]) quantity = ma.Integer(required=True, validate=[validate.Range(1)])
from eidaws.utils.sncl import StreamEpoch def _merge_fields(data, map_iterable): for alt_key, key in map_iterable: if alt_key in data and key not in data: data[key] = data[alt_key] data.pop(alt_key) validate_percentage = validate.Range(min=0, max=100) validate_latitude = validate.Range(min=-90.0, max=90) validate_longitude = validate.Range(min=-180.0, max=180.0) validate_radius = validate.Range(min=0.0, max=180.0) validate_net_sta_cha = validate.Regexp(r"[A-Za-z0-9_*?]*$") not_empty = validate.NoneOf([None, ""]) def NotEmptyField(field_type, **kwargs): return functools.partial(field_type, validate=not_empty, **kwargs) Percentage = functools.partial(fields.Float, validate=validate_percentage) NotEmptyString = NotEmptyField(fields.Str) NotEmptyInt = NotEmptyField(fields.Int, as_string=True) NotEmptyFloat = NotEmptyField(fields.Float, as_string=True) Degree = functools.partial(fields.Float, as_string=True) Latitude = functools.partial(Degree, validate=validate_latitude) Longitude = functools.partial(Degree, validate=validate_longitude) Radius = functools.partial(Degree, validate=validate_radius)
class TextFieldDataSchema(mm.Schema): min_length = fields.Integer(load_default=0, validate=validate.And(validate.Range(0), validate.NoneOf((1,)))) max_length = fields.Integer(load_default=0, validate=validate.Range(0)) @validates_schema(skip_on_field_errors=True) def validate_min_max(self, data, **kwargs): if data['min_length'] and data['max_length'] and data['min_length'] > data['max_length']: raise ValidationError('Maximum value must be less than minimum value.', 'max_length')
import datetime import functools from marshmallow import (Schema, fields, validate, ValidationError, pre_load, post_load, post_dump, validates_schema) from eidangservices import settings, utils from eidangservices.utils import sncl validate_percentage = validate.Range(min=0, max=100) validate_latitude = validate.Range(min=-90., max=90) validate_longitude = validate.Range(min=-180., max=180.) validate_radius = validate.Range(min=0., max=180.) validate_net_sta_cha = validate.Regexp(r'[A-Za-z0-9_*?]*$') not_empty = validate.NoneOf([None, '']) def NotEmptyField(field_type, **kwargs): return functools.partial(field_type, validate=not_empty, **kwargs) Percentage = functools.partial(fields.Float, validate=validate_percentage) NotEmptyString = NotEmptyField(fields.Str) NotEmptyInt = NotEmptyField(fields.Int, as_string=True) NotEmptyFloat = NotEmptyField(fields.Float, as_string=True) Degree = functools.partial(fields.Float, as_string=True) Latitude = functools.partial(Degree, validate=validate_latitude) Longitude = functools.partial(Degree, validate=validate_longitude) Radius = functools.partial(Degree, validate=validate_radius)
class BusSchema(Schema): id = fields.String(dump_only=True) model_ = fields.String(validate=validate.NoneOf(['Sprinter'])) seats = fields.Int(validate=validate.Range(min=4, max=100))
class WorkflowConfigSchema(Schema): """ Schema for parameters that the available dates sensor will use to run a workflow. Fields ------ workflow_name : str Name of the workflow to run. parameters : dict, optional Parameters with which the workflow will run. earliest_date : date, optional Earliest date of CDR data for which the workflow should run. date_stencil : list of int, date and/or pairs of int/date, optional Date stencil describing a pattern of dates that must be available for the workflow to run. """ # Parameter names that will always be passed to the workflow by the available dates sensor. # TODO: Allow automatic parameter names to be specified at schema initialisation, instead of hard-coded. _automatic_parameters = ("reference_date", "date_ranges") # Fields workflow_name = fields.String(required=True) parameters = fields.Dict( keys=fields.String(validate=validate.NoneOf(_automatic_parameters)), required=False, ) earliest_date = DateField(required=False) date_stencil = DateStencilField(required=False) @validates("workflow_name") def validate_workflow(self, value): """ Raise a ValidationError if the named workflow does not exist, or doesn't accept parameters 'reference_date' and 'date_ranges'. """ # Check that workflow exists try: if value not in self.context["workflow_storage"]: raise ValidationError( "Workflow does not exist in this storage.") except KeyError: raise ValidationError( "'workflow_storage' was not provided in the context. Cannot check for workflow existence." ) # Check that workflow accepts parameters that will automatically be passed when it runs workflow_parameter_names = { p.name for p in self.context["workflow_storage"].get_flow( value).parameters() } missing_automatic_parameters = set( self._automatic_parameters).difference(workflow_parameter_names) if missing_automatic_parameters: raise ValidationError( f"Workflow does not accept parameters {missing_automatic_parameters}." ) @validates_schema def validate_workflow_parameters(self, data, **kwargs): """ Raise a ValidationError if any required workflow parameters are not provided, or if any unexpected parameters are provided. """ errors = {} # Parameters workflow expects workflow_parameters = (self.context["workflow_storage"].get_flow( data["workflow_name"]).parameters()) parameter_names = {p.name for p in workflow_parameters} required_parameter_names = { p.name for p in workflow_parameters if p.required } # Parameters workflow will receive provided_parameter_names = set(data.get( "parameters", {}).keys()).union(self._automatic_parameters) # Required parameters that are not provided missing_parameters = required_parameter_names.difference( provided_parameter_names) if missing_parameters: errors["parameters"] = errors.get("parameters", []) + [ f"Missing required parameters {missing_parameters} for workflow '{data['workflow_name']}'." ] # Extra parameters that the workflow is not expecting unexpected_parameters = provided_parameter_names.difference( parameter_names) if unexpected_parameters: errors["parameters"] = errors.get("parameters", []) + [ f"Unexpected parameters provided for workflow '{data['workflow_name']}': {unexpected_parameters}." ] if errors: raise ValidationError(errors) @post_load def make_workflow_config(self, data, **kwargs) -> WorkflowConfig: """ Return the provided workflow config parameters in a WorkflowConfig namedtuple. """ return WorkflowConfig(**data)
class LainYamlSchema(Schema): appname = fields.Str(required=True, validate=validate.NoneOf(INVALID_APPNAMES)) build = fields.Nested(BuildSchema) release = fields.Nested(ReleaseSchema, missing=ReleaseSchema().load({})) test = fields.Nested(TestSchema, missing=TestSchema().load({})) # this field is populated during pre_load # this field cannot be written directly in lain.yaml procs = fields.Dict(values=fields.Nested(ProcSchema), required=True, error_messages={'required': 'missing proc definition'}) @staticmethod def tell_proc_info(key): ''' >>> LainYamlSchema.tell_proc_info('web') ('web', 'web') >>> LainYamlSchema.tell_proc_info('web.shit') ('web', 'shit') >>> LainYamlSchema.tell_proc_info('worker.shit') ('worker', 'shit') >>> LainYamlSchema.tell_proc_info('proc.web') (None, 'web') >>> LainYamlSchema.tell_proc_info('proc.') Traceback (most recent call last): ... marshmallow.exceptions.ValidationError: bad split: proc. >>> LainYamlSchema.tell_proc_info('whatever') (None, None) ''' if '.' not in key: if key in VALID_PROC_CLAUSE_PREFIX: return key, key return None, None parts = key.split('.') if not all(parts): raise ValidationError(f'bad split: {key}') length = len(parts) if length > 2: raise ValidationError(f'weird proc key {key}') type_, name = parts if type_ not in VALID_PROC_CLAUSE_PREFIX: raise ValidationError( f'proc key prefix must be in {VALID_PROC_CLAUSE_PREFIX}, got {key}' ) if type_ == 'proc': return None, name return type_, name @pre_load def preprocess(self, data): if not isinstance(data, dict): data = yaml.load(data) # collect all proc clauses and put them in a single dict if 'procs' in data: raise ValidationError( 'must not write procs in lain.yaml, its generated by program') procs = {} for key, clause in list(data.items()): type_, name = self.tell_proc_info(key) if not name: continue clause['name'] = name if not clause.get('type'): clause['type'] = type_ if not clause['type']: raise ValidationError( f'cannot infer proc type of {key}:{clause}') if name in procs: raise ValidationError(f'duplicate proc name: {name}') procs[name] = data.pop(key) data['procs'] = procs return data @staticmethod def complete_mountpoint(mountpoint, domains, main_entrance=False): ''' >>> LainYamlSchema.complete_mountpoint(['/foo', 'pornhub.com/bar'], ['baidu.com', 'google.com']) ['pornhub.com/bar', 'baidu.com/foo', 'google.com/foo'] >>> LainYamlSchema.complete_mountpoint(['/foo', 'pornhub.com/bar'], ['baidu.com', 'google.com'], main_entrance=True) ['pornhub.com/bar', 'baidu.com/foo', 'google.com/foo', 'baidu.com', 'google.com'] ''' for path in mountpoint[:]: if path.startswith('/'): # we want full urls, not path full_paths = [f'{domain}{path}' for domain in domains] mountpoint.extend(full_paths) if main_entrance: mountpoint.extend(domains) return [path for path in mountpoint if not path.startswith('/')] @post_load def finalize(self, data): appname = data['appname'] meta_version = data['meta_version'] = self.context['meta_version'] default_image = gen_image_name(appname, 'release', meta_version=meta_version, registry=self.context['registry']) for proc in itervalues(data['procs']): if not proc['image']: proc['image'] = default_image type_ = proc['type'] name = proc['name'] proc['pod_name'] = f'{appname}.{type_.name}.{name}' if type_ is ProcType.web: is_main = name == 'web' mountpoint = proc['mountpoint'] domains = [ '%s.%s' % (appname, domain) for domain in self.context.get('domains', [DOMAIN]) ] domains.append('%s.lain' % (appname, )) if name == 'web' and not mountpoint: proc['mountpoint'] = domains elif not mountpoint: raise ValidationError( f'you must define mountpoint for proc {name}, only proc named web will have free mountpoints' ) else: proc['mountpoint'] = self.complete_mountpoint( mountpoint, domains, main_entrance=is_main) proc['annotation'] = json.dumps(proc, cls=RichEncoder) return data