def test_pydantic_validation_error(self): self.add_route_raises_exception( ExceptionHandlingSpec( Exception, broad_exception_handler, ValidationError( [ ErrorWrapper(NoneIsNotAllowedError(), "foo"), ErrorWrapper(BoolError(), "bar"), ], BaseModel, ), )) resp = self.request() self.assertEqual(400, resp.status_code) self.assertDictEqual( { "error": [ { "loc": ["foo"], "msg": "none is not an allowed value", "type": "type_error.none.not_allowed", }, { "loc": ["bar"], "msg": "value could not be parsed to a boolean", "type": "type_error.bool", }, ] }, resp.json, )
async def request_body_to_args( required_params: List[ModelField], received_body: Optional[Union[Dict[str, Any], FormData]], ) -> Tuple[Dict[str, Any], List[ErrorWrapper]]: values = {} errors = [] if required_params: field = required_params[0] field_info = get_field_info(field) embed = getattr(field_info, "embed", None) if len(required_params) == 1 and not embed: received_body = {field.alias: received_body} for field in required_params: value: Any = None if received_body is not None: if field.shape in sequence_shapes and isinstance( received_body, FormData): value = received_body.getlist(field.alias) else: value = received_body.get(field.alias) if (value is None or (isinstance(field_info, params.Form) and value == "") or (isinstance(field_info, params.Form) and field.shape in sequence_shapes and len(value) == 0)): if field.required: if PYDANTIC_1: errors.append( ErrorWrapper(MissingError(), loc=("body", field.alias))) else: # pragma: nocover errors.append( ErrorWrapper( # type: ignore MissingError(), loc=("body", field.alias), config=BaseConfig, )) else: values[field.name] = deepcopy(field.default) continue if (isinstance(field_info, params.File) and lenient_issubclass(field.type_, bytes) and isinstance(value, UploadFile)): value = await value.read() elif (field.shape in sequence_shapes and isinstance(field_info, params.File) and lenient_issubclass(field.type_, bytes) and isinstance(value, sequence_types)): awaitables = [sub_value.read() for sub_value in value] contents = await asyncio.gather(*awaitables) value = sequence_shape_to_type[field.shape](contents) v_, errors_ = field.validate(value, values, loc=("body", field.alias)) if isinstance(errors_, ErrorWrapper): errors.append(errors_) elif isinstance(errors_, list): errors.extend(errors_) else: values[field.name] = v_ return values, errors
def test_from_request_validation_error(): problem = middleware.from_request_validation_error( RequestValidationError(errors=[ ErrorWrapper(ValueError('foo'), 'here'), ErrorWrapper(ValueError('bar'), 'there'), ], )) assert problem == middleware.Problem( type='', title='Validation Error', status=400, detail='One or more user-provided parameters are invalid', errors=[ { 'loc': ('here', ), 'msg': 'foo', 'type': 'value_error', }, { 'loc': ('there', ), 'msg': 'bar', 'type': 'value_error', }, ], )
async def test_exception_handler_pydantic_validationerror_model(): async def test_receive(): return { "type": "http.request", "body": json.dumps({"id": "str", "name": []}).encode("utf-8"), } request = Request( {"type": "http", "method": "GET", "path": "/"}, receive=test_receive ) exc = Exception() exc.raw_errors = [ErrorWrapper(loc=("hello", "world"), exc=Exception())] error = ValidationError( [ErrorWrapper(loc=("hello", "world"), exc=exc)] ) raw_response = await validation_exception_handler(request, error) response = json.loads(raw_response.body.decode("utf-8")) assert response["code"] == 400 assert response["detail"] == "Validation error" assert response["fields"] == [{"name": "hello", "message": "World: "}] exc = Exception() error = ValidationError([ErrorWrapper(loc=("hello", "world"), exc=exc)]) raw_response = await validation_exception_handler(request, error) response = json.loads(raw_response.body.decode("utf-8")) assert response["code"] == 400 assert response["detail"] == "Validation error" assert response["fields"] == [{"name": "hello", "message": "World: "}]
def parse_geometry_obj(obj) -> Geometry: """ `obj` is an object that is supposed to represent a GeoJSON geometry. This method returns the reads the `"type"` field and returns the correct pydantic Geometry model. """ if "type" not in obj: raise ValidationError([ ErrorWrapper(ValueError("Missing 'type' field in geometry"), "type"), "Geometry", ]) if obj["type"] == "Point": return Point.parse_obj(obj) elif obj["type"] == "MultiPoint": return MultiPoint.parse_obj(obj) elif obj["type"] == "LineString": return LineString.parse_obj(obj) elif obj["type"] == "MultiLineString": return MultiLineString.parse_obj(obj) elif obj["type"] == "Polygon": return Polygon.parse_obj(obj) elif obj["type"] == "MultiPolygon": return MultiPolygon.parse_obj(obj) raise ValidationError([ErrorWrapper(ValueError("Unknown type"), "type")], "Geometry")
def validate_required_primitive_elements_1864( cls, values: typing.Dict[str, typing.Any] ) -> typing.Dict[str, typing.Any]: """https://www.hl7.org/fhir/extensibility.html#Special-Case In some cases, implementers might find that they do not have appropriate data for an element with minimum cardinality = 1. In this case, the element must be present, but unless the resource or a profile on it has made the actual value of the primitive data type mandatory, it is possible to provide an extension that explains why the primitive value is not present. """ required_fields = [ ("latitude", "latitude__ext"), ("longitude", "longitude__ext"), ] _missing = object() def _fallback(): return "" errors: typing.List["ErrorWrapper"] = [] for name, ext in required_fields: field = cls.__fields__[name] ext_field = cls.__fields__[ext] value = values.get(field.alias, _missing) if value not in (_missing, None): continue ext_value = values.get(ext_field.alias, _missing) missing_ext = True if ext_value not in (_missing, None): if isinstance(ext_value, dict): missing_ext = len(ext_value.get("extension", [])) == 0 elif ( getattr(ext_value.__class__, "get_resource_type", _fallback)() == "FHIRPrimitiveExtension" ): if ext_value.extension and len(ext_value.extension) > 0: missing_ext = False else: validate_pass = True for validator in ext_field.type_.__get_validators__(): try: ext_value = validator(v=ext_value) except ValidationError as exc: errors.append(ErrorWrapper(exc, loc=ext_field.alias)) validate_pass = False if not validate_pass: continue if ext_value.extension and len(ext_value.extension) > 0: missing_ext = False if missing_ext: if value is _missing: errors.append(ErrorWrapper(MissingError(), loc=field.alias)) else: errors.append( ErrorWrapper(NoneIsNotAllowedError(), loc=field.alias) ) if len(errors) > 0: raise ValidationError(errors, cls) # type: ignore return values
def get_missing_field_error(field_alias: str) -> ErrorWrapper: if PYDANTIC_1: missing_field_error = ErrorWrapper(MissingError(), loc=("body", field_alias)) else: # pragma: no cover missing_field_error = ErrorWrapper( # type: ignore MissingError(), loc=("body", field_alias), config=BaseConfig, ) return missing_field_error
def get_missing_field_error(loc: Tuple[str, ...]) -> ErrorWrapper: if PYDANTIC_1: missing_field_error = ErrorWrapper(MissingError(), loc=loc) else: # pragma: no cover missing_field_error = ErrorWrapper( # type: ignore MissingError(), loc=loc, config=BaseConfig, ) return missing_field_error
def get_organization_scope_from_slug(slug: str) -> SessionLocal: """Iterate all organizations looking for a relevant channel_id.""" db_session = SessionLocal() organization = organization_service.get_by_slug(db_session=db_session, slug=slug) db_session.close() if organization: schema_engine = engine.execution_options( schema_translate_map={ None: f"dispatch_organization_{slug}", }) return sessionmaker(bind=schema_engine)() raise ValidationError( [ ErrorWrapper( NotFoundError( msg= f"Organization slug '{slug}' not found. Check your spelling." ), loc="organization", ) ], model=BaseModel, )
def create_organization( *, db_session: Session = Depends(get_db), organization_in: OrganizationCreate, current_user: DispatchUser = Depends(get_current_user), ): """Create a new organization.""" try: organization = create(db_session=db_session, organization_in=organization_in) except IntegrityError: raise ValidationError( [ ErrorWrapper(ExistsError( msg="An organization with this name already exists."), loc="name") ], model=OrganizationCreate, ) # add creator as organization owner add_user(db_session=db_session, organization=organization, user=current_user, role=UserRoles.owner) return organization
def get_by_name_or_raise( *, db_session, project_id: int, incident_priority_in=IncidentPriorityRead) -> IncidentPriority: """Returns the incident_priority specified or raises ValidationError.""" incident_priority = get_by_name(db_session=db_session, project_id=project_id, name=incident_priority_in.name) if not incident_priority: raise ValidationError( [ ErrorWrapper( NotFoundError( msg="IncidentPriority not found.", incident_priority=incident_priority_in.name, ), loc="incident_priority", ) ], model=IncidentPriorityRead, ) return incident_priority
def request_params_to_args( required_params: Sequence[ModelField], received_params: Union[Mapping[str, Any], QueryParams, Headers], ) -> Tuple[Dict[str, Any], List[ErrorWrapper]]: values = {} errors = [] for field in required_params: if is_scalar_sequence_field(field) and isinstance( received_params, (QueryParams, Headers)): value = received_params.getlist(field.alias) or field.default else: value = received_params.get(field.alias) field_info = field.field_info assert isinstance(field_info, params.Param), "Params must be subclasses of Param" if value is None: if field.required: errors.append( ErrorWrapper(MissingError(), loc=(field_info.in_.value, field.alias))) else: values[field.name] = deepcopy(field.default) continue v_, errors_ = field.validate(value, values, loc=(field_info.in_.value, field.alias)) if isinstance(errors_, ErrorWrapper): errors.append(errors_) elif isinstance(errors_, list): errors.extend(errors_) else: values[field.name] = v_ return values, errors
def _validate_feature_server_config(cls, values): # Having no feature server is the default. if "feature_server" not in values: return values # Skip if we aren't creating the configuration from a dict if not isinstance(values["feature_server"], Dict): return values # Make sure that the provider configuration is set. We need it to set the defaults if "provider" not in values: raise FeastProviderNotSetError() feature_server_type = FEATURE_SERVER_TYPE_FOR_PROVIDER.get( values["provider"]) defined_type = values["feature_server"].get("type") # Make sure that the type is either not set, or set correctly, since it's defined by the provider if defined_type not in (None, feature_server_type): raise FeastFeatureServerTypeSetError(defined_type) values["feature_server"]["type"] = feature_server_type # Validate the dict to ensure one of the union types match try: feature_server_config_class = get_feature_server_config_from_type( feature_server_type) feature_server_config_class(**values["feature_server"]) except ValidationError as e: raise ValidationError( [ErrorWrapper(e, loc="feature_server")], model=RepoConfig, ) return values
def correct_length_mapping(cls, mapping): """Validates that each mapping contains only 2 values (original & changed) and at most 1000 values""" # if no mapping is provided (i.e. when updating only name), do nothing if not mapping: return mapping invalid_value_idx = [] for idx, elem in enumerate(mapping): if len(elem) != 2: invalid_value_idx.append(idx) if invalid_value_idx: raise ValidationError( [ ErrorWrapper( ValueError( f'This mapping contains {len(mapping[idx])} values instead of 2' ), str(idx + 1)) for idx in invalid_value_idx ], cls, ) if len(mapping) > _MAX_MAPPING_CNT: raise ValueError( f'There is {len(mapping)} mappings, but Husky only supports {_MAX_MAPPING_CNT} mappings.' ) return mapping
def _validate_offline_store_config(cls, values): # Set empty offline_store config if it isn't set explicitly if "offline_store" not in values: values["offline_store"] = dict() # Skip if we aren't creating the configuration from a dict if not isinstance(values["offline_store"], Dict): return values # Make sure that the provider configuration is set. We need it to set the defaults assert "provider" in values # Set the default type if "type" not in values["offline_store"]: if values["provider"] == "local": values["offline_store"]["type"] = "file" elif values["provider"] == "gcp": values["offline_store"]["type"] = "bigquery" elif values["provider"] == "aws": values["offline_store"]["type"] = "redshift" offline_store_type = values["offline_store"]["type"] # Validate the dict to ensure one of the union types match try: offline_config_class = get_offline_config_from_type( offline_store_type) offline_config_class(**values["offline_store"]) except ValidationError as e: raise ValidationError( [ErrorWrapper(e, loc="offline_store")], model=RepoConfig, ) return values
def get_class_by_tablename(table_fullname: str) -> Any: """Return class reference mapped to table.""" def _find_class(name): for c in Base._decl_class_registry.values(): if hasattr(c, "__table__"): if c.__table__.fullname.lower() == name.lower(): return c mapped_name = resolve_table_name(table_fullname) mapped_class = _find_class(mapped_name) # try looking in the 'dispatch_core' schema if not mapped_class: mapped_class = _find_class(f"dispatch_core.{mapped_name}") if not mapped_class: raise ValidationError( [ ErrorWrapper( NotFoundError( msg="Model not found. Check the name of your model."), loc="filter", ) ], model=BaseModel, ) return mapped_class
def get_by_name_or_raise( *, db_session, project_id, source_transport_in=SourceTransportRead) -> SourceTransportRead: """Returns the source transport specified or raises ValidationError.""" source = get_by_name(db_session=db_session, project_id=project_id, name=source_transport_in.name) if not source: raise ValidationError( [ ErrorWrapper( NotFoundError( msg="SourceTransport not found.", source=source_transport_in.name, ), loc="source", ) ], model=SourceTransportRead, ) return source
def create_vps_server(db: Session = Depends(get_db), *, vps_profile: VpsCreateSchema): # validate vps_isp_obj = crud_isp.get(db_session=db, id=vps_profile.isp_id) if not vps_isp_obj: raise ValidationError( [ErrorWrapper(Exception('provider_name is not matched'), loc="isp_id")], model=VpsCreateSchema, ) # create base vps data rp = RedisPool() vps_spec_data = rp.get_vps_spec_value( db_session=db, isp_id=vps_profile.isp_id, os_code=vps_profile.os_code, plan_code=vps_profile.plan_code, region_code=vps_profile.region_code ) vps_config = dict( hostname=vps_profile.hostname, isp_id=vps_profile.isp_id, ssh_keys=vps_profile.ssh_keys, remark=vps_profile.remark, status=vps_profile.status, **vps_spec_data ) vps_obj = crud_vps.create(db_session=db, obj_in=vps_config, serializer=None) task = celery_app.send_task( "create_vps", args=[vps_profile.dict(), vps_obj.id] ) return dict(result=task)
def get_by_name_or_raise( *, db_session, project_id, source_status_in=SourceStatusRead) -> SourceStatusRead: """Returns the status specified or raises ValidationError.""" status = get_by_name(db_session=db_session, project_id=project_id, name=source_status_in.name) if not status: raise ValidationError( [ ErrorWrapper( NotFoundError( msg="SourceStatus not found.", status=source_status_in.name, ), loc="status", ) ], model=SourceStatusRead, ) return status
def update_organization( *, db_session: Session = Depends(get_db), organization_id: PrimaryKey, organization_in: OrganizationUpdate, ): """Update an organization.""" organization = get(db_session=db_session, organization_id=organization_id) if not organization: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail=[{ "msg": "An organization with this id does not exist." }], ) try: organization = update(db_session=db_session, organization=organization, organization_in=organization_in) except IntegrityError: raise ValidationError( [ ErrorWrapper(ExistsError( msg="An organization with this name already exists."), loc="name") ], model=OrganizationUpdate, ) return organization
def create_service( *, db_session: Session = Depends(get_db), service_in: ServiceCreate = Body( ..., example={ "name": "myService", "type": "pagerduty", "is_active": True, "external_id": "234234", }, ), ): """Create a new service.""" service = get_by_external_id_and_project_name( db_session=db_session, external_id=service_in.external_id, project_name=service_in.project.name, ) if service: raise ValidationError( [ ErrorWrapper( ExistsError( msg="A service with this external_id already exists."), loc="external_id", ) ], model=ServiceCreate, ) service = create(db_session=db_session, service_in=service_in) return service
def get_by_name_or_raise( *, db_session, project_id, source_data_format_in=SourceDataFormatRead) -> SourceDataFormatRead: """Returns the source specified or raises ValidationError.""" data_format = get_by_name(db_session=db_session, project_id=project_id, name=source_data_format_in.name) if not data_format: raise ValidationError( [ ErrorWrapper( NotFoundError( msg="SourceDataFormat not found.", source=source_data_format_in.name, ), loc="dataFormat", ) ], model=SourceDataFormatRead, ) return data_format
def update_service(*, db_session: Session = Depends(get_db), service_id: PrimaryKey, service_in: ServiceUpdate): """Update an existing service.""" service = get(db_session=db_session, service_id=service_id) if not service: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail=[{ "msg": "A service with this id does not exist." }], ) try: service = update(db_session=db_session, service=service, service_in=service_in) except IntegrityError: raise ValidationError( [ ErrorWrapper(ExistsError( msg="A service with this name already exists."), loc="name") ], model=ServiceUpdate, ) return service
def validate_joins_correct_taxons(cls, values): """Check list of fields in joins against all available taxons on model.""" if 'attributes' in values and 'joins' in values: attributes: List[FdqModelAttribute] = values['attributes'] joins: List[FdqModelJoin] = values['joins'] # get set of available taxon slugs available_taxon_slugs = set( cls._get_available_attrs_taxon_slugs(attributes)) # for each join, verify that all its taxons are available in this model invalid_joins: Dict[int, Set[str]] = {} for idx, join in enumerate(joins): missing_taxons = set(join.taxons) - available_taxon_slugs if len(missing_taxons): invalid_joins[idx] = missing_taxons if invalid_joins: # report invalid joins raise ValidationError( [ ErrorWrapper( ValueError( f'Join {idx + 1} contains missing fields {",".join(taxon_slugs)}' ), 'joins') for idx, taxon_slugs in invalid_joins.items() ], cls, ) return values
async def request_body_to_args( required_params: List[Field], received_body: Dict[str, Any]) -> Tuple[Dict[str, Any], List[ErrorWrapper]]: values = {} errors = [] if required_params: field = required_params[0] embed = getattr(field.schema, "embed", None) if len(required_params) == 1 and not embed: received_body = {field.alias: received_body} for field in required_params: value = received_body.get(field.alias) if value is None: if field.required: errors.append( ErrorWrapper(MissingError(), loc=("body", field.alias), config=BaseConfig)) else: values[field.name] = deepcopy(field.default) continue v_, errors_ = field.validate(value, values, loc=("body", field.alias)) if isinstance(errors_, ErrorWrapper): errors.append(errors_) elif isinstance(errors_, list): errors.extend(errors_) else: values[field.name] = v_ return values, errors
class MockException: raw_errors = [ ValidationError( errors=[ErrorWrapper(AnyStrMaxLengthError(limit_value=10), ('name',))], model=MockModel ) ]
def validate(cls: Type["Range"], value: Any) -> "Range": """ Validate the passed in value """ if isinstance(value, str): match = RANGE_REGEX.fullmatch(value) if not match: raise ValidationError( [ ErrorWrapper(ValueError(f"Unable to parse Range!"), loc=cls.__name__) ], cls, ) match_groups = match.groupdict() return cls( unit=match_groups["unit"], ranges=[ Subrange(**{ k: int(v) if v else None for k, v in m.groupdict().items() }) for m in SUBRANGE_REGEX.finditer(match_groups["ranges"]) ], ) return super().validate(value)
def update_tag_type(*, db_session: Session = Depends(get_db), tag_type_id: PrimaryKey, tag_type_in: TagTypeUpdate): """Update a tag type.""" tag_type = get(db_session=db_session, tag_type_id=tag_type_id) if not tag_type: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail=[{ "msg": "A tag type with this id does not exist." }], ) try: tag_type = update(db_session=db_session, tag_type=tag_type, tag_type_in=tag_type_in) except IntegrityError: raise ValidationError( [ ErrorWrapper(ExistsError( msg="A tag type with this name already exists."), loc="name") ], model=TagTypeUpdate, ) return tag_type
def request_params_to_args( required_params: Sequence[Field], received_params: Union[Mapping[str, Any], QueryParams, Headers], ) -> Tuple[Dict[str, Any], List[ErrorWrapper]]: values = {} errors = [] for field in required_params: if field.shape in sequence_shapes and isinstance( received_params, (QueryParams, Headers) ): value = received_params.getlist(field.alias) else: value = received_params.get(field.alias) schema: params.Param = field.schema assert isinstance(schema, params.Param), "Params must be subclasses of Param" if value is None: if field.required: errors.append( ErrorWrapper( MissingError(), loc=(schema.in_.value, field.alias), config=BaseConfig, ) ) else: values[field.name] = deepcopy(field.default) continue v_, errors_ = field.validate(value, values, loc=(schema.in_.value, field.alias)) if isinstance(errors_, ErrorWrapper): errors.append(errors_) elif isinstance(errors_, list): errors.extend(errors_) else: values[field.name] = v_ return values, errors
def __init__(self, **data: Any) -> None: """Initializes a BaseModel with the provided data. Raises: ValidationError: A validation error when the data is invalid. """ try: super().__init__(**data) except ValidationError as e: # Give a special message for faulty list input for re in e.raw_errors: if (hasattr(re, "_loc") and hasattr(re.exc, "msg_template") and isinstance(data.get(to_key(re._loc)), list)): re.exc.msg_template += ( f". The key {re._loc} might be duplicated in the input file." ) # Update error with specific model location name identifier = self._get_identifier(data) if identifier is None: raise e else: # If there is an identifier, include this in the ValidationError messages. raise ValidationError([ErrorWrapper(e, loc=identifier)], self.__class__)