class LogRootConfig(FOCABaseConfig): """Model for root log configuration. Args: level: Numeric value of logging level. handlers: List of logging handlers by name. Attributes: level: Numeric value of logging level. handlers: List of logging handlers by name. Raises: pydantic.ValidationError: The class was instantianted with an illegal data type. Example: >>> LogRootConfig( ... level=logging.INFO, ... handlers=["console"], ... ) LogRootConfig(level=20, handlers=['console']) """ level: int = 10 handlers: Optional[List[str]] = ["console"] _validate_level = validator('level', allow_reuse=True)(validate_log_level_choices)
class ScrapeCommittee(BaseModel): name: str chamber: CommitteeChamber classification: CommitteeType = CommitteeType.COMMITTEE parent: typing.Optional[str] = None sources: typing.List[Link] = [] links: typing.List[Link] = [] other_names: typing.List[OtherName] = [] members: typing.List[Membership] = [] extras: dict = {} _validate_strs = validator("name", allow_reuse=True)(validate_str_no_newline) @root_validator def validate_parent_and_classification( cls, values: dict[str, typing.Any] ) -> dict[str, typing.Any]: if values.get("classification") == "subcommittee" and not values.get("parent"): raise ValueError("subcommittees must have a parent") if values.get("classification") == "committee" and values.get("parent"): raise ValueError( "committees may not have a parent, set classification=subcommittee" ) return values def add_member(self, name: str, role: str = "member") -> None: self.members.append(Membership(name=name, role=role)) def add_link(self, url: str, note: str = "") -> None: self.links.append(Link(url=url, note=note)) def add_source(self, url: str, note: str = "") -> None: self.sources.append(Link(url=url, note=note))
class PinnedRecording(BaseModel): """Represents a pinned recording object. Args: user_id: the row id of the user in the DB row_id: the row id of the pinned_recording in the DB recording_mbid: the MusicBrainz ID of the recording blurb_content: (Optional) the custom text content of the pinned recording created: the datetime containing tzinfo representing when the pinned recording record was inserted into DB pinned_until: the datetime containing tzinfo representing when the pinned recording is set to expire/unpin Validates that pinned_until contains tzinfo() and is greater than created. """ user_id: int user_name: Optional[str] row_id: int recording_msid: str recording_mbid: str = None blurb_content: constr(max_length=MAX_BLURB_CONTENT_LENGTH) = None created: datetime pinned_until: datetime _validate_recording_msid: classmethod = validator( "recording_msid", allow_reuse=True)(check_rec_mbid_msid_is_valid_uuid) _validate_recording_mbid: classmethod = validator( "recording_mbid", allow_reuse=True)(check_rec_mbid_msid_is_valid_uuid) _validate_created_tzinfo: classmethod = validator( "created", always=True, allow_reuse=True)(check_datetime_has_tzinfo) _validate_pin_until_tzinfo: classmethod = validator( "pinned_until", always=True, allow_reuse=True)(check_datetime_has_tzinfo) # also must validate that pinned_until datetime greater than created @validator("pinned_until", always=True) def check_pin_until_greater_than_created(cls, pin_until, values): try: if pin_until <= values["created"]: raise ValueError return pin_until except (ValueError, AttributeError): raise ValueError( """Pinned_until of returned PinnedRecording must be greater than created. See https://pydantic-docs.helpmanual.io/usage/types/#datetime-types for acceptable formats.""" )
class UserBase(BaseModel): name: str document: str birth_date: date _empty_name = validator("name", allow_reuse=True)(validators.check_field_not_empty) _empty_document = validator("document", allow_reuse=True)( validators.check_field_not_empty) _normalize_name = validator("name")(validators.normalize) @validator("name") def name_must_contain_spaces(cls, name: str): name_split = name.split() if len(name_split) == 1: raise ValueError("must contain first and second name") return name @validator("document") def validate_document(cls, document: str): import re try: if not re.match(r"\d{3}\.\d{3}\.\d{3}-\d{2}", document): raise numbers = [int(digit) for digit in document if digit.isdigit()] if len(numbers) != 11 or len(set(numbers)) == 1: raise sum_of_products = sum( a * b for a, b in zip(numbers[0:9], range(10, 1, -1))) expected_digit = (sum_of_products * 10 % 11) % 10 if numbers[9] != expected_digit: raise sum_of_products = sum( a * b for a, b in zip(numbers[0:10], range(11, 1, -1))) expected_digit = (sum_of_products * 10 % 11) % 10 if numbers[10] != expected_digit: raise except: raise ValueError("Invalid document") return document
class FaceOneSideModel(BaseConfig, DummyDataModel): """Data model holding quantities on one side of cell faces normal to one direction.""" nx: conint(gt=0) ny: conint(gt=0) dtype: DummyDtype w: nplike.ndarray hu: nplike.ndarray hv: nplike.ndarray h: nplike.ndarray u: nplike.ndarray v: nplike.ndarray a: nplike.ndarray flux: WHUHVModel # validator _val_arrays = validator("w", "hu", "hv", "h", "u", "v", "a", "flux", allow_reuse=True)(_pydantic_val_arrays) _val_valid_numbers = validator("w", "hu", "hv", "h", "u", "v", "a", allow_reuse=True)(_pydantic_val_nan_inf) def __init__(self, nx, ny, dtype): dtype = DummyDtype.validator(dtype) super().__init__( # trigger pydantic validation nx=nx, ny=ny, dtype=dtype, w=nplike.zeros((ny, nx), dtype=dtype), hu=nplike.zeros((ny, nx), dtype=dtype), hv=nplike.zeros((ny, nx), dtype=dtype), h=nplike.zeros((ny, nx), dtype=dtype), u=nplike.zeros((ny, nx), dtype=dtype), v=nplike.zeros((ny, nx), dtype=dtype), a=nplike.zeros((ny, nx), dtype=dtype), flux=WHUHVModel(nx, ny, dtype))
class UserRecommendationsRecord(BaseModel): """ Each individual record for a user's recommendations. """ recording_mbid: constr(min_length=1) score: float _validate_recording_mbid: classmethod = validator( "recording_mbid", allow_reuse=True)(check_valid_uuid)
class PortForwardRuleEdit(BaseModel): method: MethodEnum config: t.Dict _config = validator("config", pre=True, allow_reuse=True)(check_config) class Config: orm_mode = True
class WritablePlaylist(Playlist): id: int = None mbid: Optional[str] creator: str = None recordings: List[PlaylistRecording] = [] created: datetime.datetime = None _validate_mbid: classmethod = validator("mbid", allow_reuse=True)(check_valid_uuid)
class Category(BaseModel): id: int = None name: str = None description: typing.Optional[str] = None subcategories: typing.List[typing.Union[dict, Subcategory]] = None _normalize_subcategories = validator( "subcategories", allow_reuse=True)(normalize_subcategories)
class OrderItem(CoreModel): order_id: ORDER_ID weight: Union[confloat(strict=True, gt=0.0), conint(strict=True, gt=0)] region: conint(strict=True, gt=0) delivery_hours: HOURS_LIST _normalize_delivery_hours = validator("delivery_hours", allow_reuse=True)(hours_validate)
class Operation(BaseModel): tags: Optional[List[str]] summary: Optional[str] description: Optional[str] externalDocs: Optional[ExternalDocumentation] operationId: Optional[str] parameters: Optional[List[Union[Reference, Parameter]]] requestBody: Optional[Union[Reference, RequestBody]] responses: Responses callbacks: Optional[Dict[str, Union[Reference, Callback]]] deprecated: bool = False security: Optional[List[SecurityRequirement]] servers: Optional[List[Server]] _check_parameters = validator("parameters", allow_reuse=True)(check_unique) _check_responses = validator("responses", allow_reuse=True)(check_responses)
class M3PlacementNode(AstacusModel): # In Aiven-internal case, most of these are redundant fields (we # could derive node_id and hostname from endpoint); however, for # generic case, we configure all of them (and expect them to be # configured). node_id: str _validate_node_id = validator("node_id", allow_reuse=True)(non_empty_and_sane_length) endpoint: str _validate_endpoint = validator("endpoint", allow_reuse=True)(non_empty_and_sane_length) hostname: str _validate_hostname = validator("hostname", allow_reuse=True)(non_empty_and_sane_length)
class Record(_StagesConfig): record: str location: str mime: str = "" is_directory: bool = False smry_keys: Optional[List[str]] = None _ensure_record_mime = validator("mime", allow_reuse=True)(ensure_mime("location"))
class NewUser(BaseModel): user_id: str email: EmailStr password: str user_name: Optional[str] = None _validate_user_id_1 = validator("user_id", allow_reuse=True)(custom_charset)
def test_make_generic_validator(fields, result): exec(f"""def testing_function({', '.join(fields)}): return {' + "," + '.join(fields)}""") func = locals()['testing_function'] validator = make_generic_validator(func) assert validator.__qualname__ == 'testing_function' assert validator.__name__ == 'testing_function' # args: cls, v, values, field, config assert validator('_cls_', '_v_', '_values_', '_field_', '_config_') == result
def test_make_generic_validator_cls_kwargs(): def test_validator(cls, v, **kwargs): return ', '.join(f'{k}: {v}' for k, v in kwargs.items()) validator = make_generic_validator(test_validator) assert validator.__name__ == 'test_validator' assert validator('_cls_', '_v_', '_vs_', '_f_', '_c_') == 'values: _vs_, field: _f_, config: _c_'
def treat_scalar_as_single_value_set(*fields): # We need this as ckan coerses single value list as scalars 🤦 # from: # ckan/views/resource.py:185 # ckan/logic/__init__.py:150 return pydantic.validator(*fields, pre=True, allow_reuse=True)( _treat_scalar_as_single_value_set )
class CacheSettings(_Settings): dir: DirectoryPath = Field(default=os.path.join(ROOTDIR, 'cache'), title="cache directory") size_limit_gb: float = Field(default=4.0, title="cache size limit (GB)") do_cache: bool = Field(default=True, title="use the cache") resolve_frame_number: bool = Field(default=True, title="resolve to (nearest) cached frame numbers") block_timeout: float = Field(default=0.1, title="wait for blocked item (s)") _validate_dir = validator('dir', allow_reuse=True, pre=True)(_Settings._validate_directorypath)
class Round(BaseModel): class_: RoundClass board: Optional[Board] = Field(alias="board") class Config: orm_mode = True _board = validator("board", pre=True, allow_reuse=True)(first)
class CourierUpdateResponseModel(CoreModel): courier_id: COURIER_ID courier_type: CourierType regions: REGIONS working_hours: HOURS_LIST _normalize_working_hours = validator("working_hours", allow_reuse=True)(hours_validate)
class UserInCreate(BaseModel): email: EmailStr username: str password: str active: bool = True confirmed: bool = False permissions: List[str] = [] info: list = [] created_at: Optional[datetime] = None last_login: Optional[datetime] = None _created_at = validator("created_at", pre=True, always=True, allow_reuse=True)( set_created_at ) _last_login = validator("last_login", pre=True, always=True, allow_reuse=True)( set_last_login )
class NoteEditing(pydantic.BaseModel): rubric_id: Optional[int] content: str = pydantic.fields.Field(min_length=3) # validators _convert_empty_values = pydantic.validator('rubric_id', allow_reuse=True, pre=True)(convert_empty_value)
class TransportableCommand(_StagesConfig): name: str location: FilePath mime: str = "" _ensure_transportablecommand_mime = validator("mime", allow_reuse=True)( ensure_mime("location") )
class ReleaseRecord(BaseModel): """ Each individual record for a user's release stats """ artist_mbids: List[constr(min_length=1)] = [] release_mbid: Optional[str] release_name: str listen_count: NonNegativeInt artist_name: str # to add empty fields to stats API response, for compatibility artist_msid: Optional[str] release_msid: Optional[str] _validate_uuids: classmethod = validator( "release_mbid", allow_reuse=True)(check_valid_uuid) _validate_artist_mbids: classmethod = validator( "artist_mbids", each_item=True, allow_reuse=True)(check_valid_uuid)
def __call__(self, schema: Schema, variant: ModelVariant) -> type: key = (schema, variant) if key in self.__cache: return self.__cache[key] if variant is ModelVariant.LIST: entity_model = self(schema=schema, variant=ModelVariant.GET) model = create_model( self.clean_modelname(schema.slug, variant), total=(int, Field( description= 'Total number of entities satisfying conditions')), entities=(List[entity_model], Field(description='List of returned entities'))) self.__cache[key] = model return model class Config: extra = 'forbid' attr_fields = { self.clean_fieldname(i.attribute.name): self.fieldtype(i, variant != ModelVariant.CREATE) for i in schema.attr_defs } entity_fields = { "slug": (Optional[str] if variant is ModelVariant.UPDATE else str, Field(description='Slug of this entity')), "name": (Optional[str] if variant is ModelVariant.UPDATE else str, Field(description='Name of this entity')) } if variant is ModelVariant.GET: entity_fields.update({ 'id': (int, Field(description='ID of this entity')), 'deleted': (bool, Field( description='Indicates whether this entity is marked as ' 'deleted')), }) if variant == ModelVariant.UPDATE: entity_fields.update() model = create_model(self.clean_modelname(schema.slug, variant), **entity_fields, **attr_fields, __config__=Config, __validators__={ 'slug_validator': validator('slug', allow_reuse=True)(validate_slug) }) self.__cache[key] = model return model
class TokenInfo(TokenBase): """Information about a token returned by the token-info endpoint. This is all the information about the token that's stored in the underlying database. It includes some fields not present in Redis. """ token: str = Field( ..., title="Token key", example="5KVApqcVbSQWtO3VIRgOhQ", min_length=22, max_length=22, ) token_name: Optional[str] = Field( None, title="User-given name of the token", example="laptop token", min_length=1, max_length=64, ) service: Optional[str] = Field( None, title="Service", description=( "Service to which the token was delegated. Only present for" " internal tokens"), example="some-service", min_length=1, max_length=64, ) last_used: Optional[datetime] = Field( None, title="Last used", description="When the token was last used in seconds since epoch", example=1614986130, ) parent: Optional[str] = Field( None, title="Parent token", example="DGO1OnPohl0r3C7wqhzRgQ", min_length=22, max_length=22, ) class Config: orm_mode = True json_encoders = {datetime: lambda v: int(v.timestamp())} _normalize_created = validator("created", "last_used", "expires", allow_reuse=True, pre=True)(normalize_datetime)
class SingleTorsion(ResultsConfig): """ A class used to mark torsions that will be driven for torsiondrive datasets. """ torsion1: Tuple[int, int, int, int] scan_range1: Optional[Tuple[int, int]] = None _order_torsion1 = validator("torsion1", allow_reuse=True)(order_torsion) _order_scan_range1 = validator("scan_range1", allow_reuse=True)(order_scan_range) @property def central_bond(self) -> Tuple[int, int]: """Get the sorted index of the central bond.""" return tuple(sorted(self.torsion1[1:3])) @property def get_dihedrals(self) -> List[Tuple[int, int, int, int]]: """ Get the formatted representation of the dihedrals to scan over. """ return [ self.torsion1, ] @property def get_scan_range(self) -> Optional[List[Tuple[int, int]]]: """ Get the formatted representation of the dihedral scan ranges. """ if self.scan_range1 is not None: return [ self.scan_range1, ] else: return self.scan_range1 @property def get_atom_map(self) -> Dict[int, int]: """ Create an atom map which will tag the correct dihedral atoms. """ return dict((atom, i) for i, atom in enumerate(self.torsion1))
class CourierGetResponseModelNoRating(CoreModel): courier_id: COURIER_ID courier_type: CourierType regions: REGIONS working_hours: HOURS_LIST earnings: conint(ge=0) _normalize_working_hours = validator("working_hours", allow_reuse=True)(hours_validate)
def get_model(self) -> Type[BaseModel]: field_name = "__root__" validators = { "root_validator": validator(field_name, allow_reuse=True)(self._validate_range) } return create_model(self.name, __root__=(int, ...), __validators__=validators)
class InterfaceIPv4Address(VendorIndependentBaseModel): _modelname = "interface_ipv4_address_model" address: ipaddress.IPv4Interface secondary: Optional[bool] _validate_address = validator("address", allow_reuse=True)(ipv4_is_assignable)