class PageBase(BaseModel): """ page: int 当前页 默认 1 pageSize: int 当前分页长度 默认 10 """ page: int = 1 pageSize: conint(le=50) = 10 # 限制最大长度小于等于 50 默认10
class TargetLevel(BaseModel): """ The tier level to consider for the target. Level 0 has the best resolution (largest image, basis of the image pyramid). Maximum admissible tier level depends on image. """ __root__: conint(ge=0)
class Page(BasePage[T], Generic[T]): page: conint(ge=0) # type: ignore size: conint(gt=0) # type: ignore @classmethod def create(cls, items: Sequence[T], total: int, params: AbstractParams) -> Page[T]: if not isinstance(params, PaginationParams): raise ValueError("Page should be used with PaginationParams") return cls( total=total, items=items, page=params.page, size=params.size, )
class TargetZoom(BaseModel): """ The zoom level to consider for the target. Zoom 0 has the worst resolution (smallest image, top of the image pyramid). Maximum admissible zoom level depends on image. """ __root__: conint(ge=0)
class AnnotationBgTransparency(BaseModel): """ The background transparency. 100 means transparent background. When transparency is used, the target content type must be an image format supporting transparency. """ __root__: conint(ge=0, le=100) = 0
async def new( cls, text: str, *, size: conint( # type:ignore strict=True, gt=Config["qrcode"]["min-size"].as_number(), # noqa:F821 lt=Config["qrcode"]["max-size"].as_number(), # noqa:F821 ) = 200, logo: Optional[HostUrl] = None, level: QRCodeLevel = QRCodeLevel.M, bgcolor: Color = Color("FFFFFF"), fgcolor: Color = Color("000000"), ): icon_stream = None if logo is not None: async with BaseNetClient() as client: response = await client.get( logo, headers={"user-agent": "HibiAPI@GitHub"}, timeout=6) response.raise_for_status() icon_stream = BytesIO(response.content) return cls( data=text, logo=logo, level=level, size=size, path=await cls._generate( text, size=size, level=level, icon_stream=icon_stream, bgcolor=bgcolor.as_hex(), fgcolor=fgcolor.as_hex(), ), )
def query_all(db: Session, *, page: int = 1, page_size: conint(le=50) = 10) -> dict: """ 查询数据列表 :param db: :param page: :param page_size: :return: """ temp_page = (page - 1) * page_size # 查询数量 total = db.query(func.count(MallCategory.id)).filter( MallCategory.is_delete == 0, MallCategory.parent_id.is_(None)).scalar() # 查询结果集 query_obj = db.query(MallCategory).filter( MallCategory.is_delete == 0, MallCategory.parent_id.is_( None)).offset(temp_page).limit(page_size).all() items = [{ "id": obj.id, 'create_time': obj.create_time.strftime('%Y-%m-%d %H:%M:%S'), "name": obj.name, "front_desc": obj.front_desc, "sort_order": obj.sort_order, "icon_url": obj.icon_url, "enabled": obj.enabled } for obj in query_obj] return {"items": items, "total": total}
class LoggingConfig(pydantic.BaseModel): version: pydantic.conint(ge=1, le=1) = 1 disable_existing_loggers: bool = True incremental: bool = False formatters: Dict[str, Dict[str, str]] = { "access": { "()": "uvicorn.logging.AccessFormatter", "fmt": '%(levelprefix)s %(client_addr)s - "%(request_line)s" %(status_code)s', } } loggers: Dict[str, dict] = {"uvicorn.access": {"handlers": ["access"]}} handlers: Dict[str, Dict[str, str]] = { "default": { "level": "DEBUG", "class": "logging.StreamHandler", "stream": "ext://sys.stdout" }, "access": { "formatter": "access", "class": "logging.StreamHandler", "stream": "ext://sys.stdout", } } root: dict = {"level": "DEBUG", "handlers": ["default"]}
async def get_transactions( db: DB = Depends(db), user: Account = Depends(get_current_user), dt_from: Optional[datetime] = None, dt_to: Optional[datetime] = None, limit: Optional[conint(le=1000)] = 100, order: Optional[Ordering] = Ordering.desc, ) -> list[Transaction]: """ Get transaction history of the authenticated account """ if dt_from is None and dt_to is None: dt_to = datetime.utcnow().replace(microsecond=0) dt_from = dt_to.replace(hour=0, minute=0, second=0) error = None if dt_from is not None and dt_to is not None and dt_from > dt_to: error = "dt_from must be earlier than dt_to" elif order is Ordering.desc and dt_to is None: error = "Specify dt_to with descending order" elif order is Ordering.asc and dt_from is None: error = "Specify dt_from with ascending order" if error: raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, detail={"msg": error}, ) return await db.get_transactions(user.id, dt_from, dt_to, limit, order)
class FilterByMoleFractionSchema(CurationComponentSchema): type: Literal["FilterByMoleFraction"] = "FilterByMoleFraction" mole_fraction_ranges: Dict[conint(gt=1), List[List[MoleFractionRange]]] = Field( ..., description="The ranges of mole fractions to retain. Each key in the " "dictionary corresponds to a number of components in the system. Each value " "is a list of the allowed mole fraction ranges for all but one of the " "components, i.e for a binary system, the allowed mole fraction for only the " "first component must be specified.", ) @validator("mole_fraction_ranges") def _validate_ranges(cls, value: Dict[int, List[List[MoleFractionRange]]]): for n_components, ranges in value.items(): assert len(ranges) == n_components - 1 assert all( mole_fraction_range[0] < mole_fraction_range[1] for component_ranges in ranges for mole_fraction_range in component_ranges ) return value
class ConvertExcessDensityDataSchema(CurationComponentSchema): type: Literal[ "ConvertExcessDensityDataSchema"] = "ConvertExcessDensityDataSchema" temperature_precision: conint(ge=0) = Field( 2, description= "The number of decimal places to compare temperatures (K) to " "within when attempting to identify compatible pure and binary data.", ) pressure_precision: conint(ge=0) = Field( 1, description="The number of decimal places to compare pressures (kPa) to " "within when attempting to identify compatible pure and binary data.", )
class Problem(BaseModel): detail: Optional[str] = Field( None, description= 'A human readable explanation specific to this occurrence of the\nproblem. You MUST NOT expose internal informations, personal\ndata or implementation details through this field.\n', example='Request took too long to complete.', ) instance: Optional[AnyUrl] = Field( None, description= 'An absolute URI that identifies the specific occurrence of the problem.\nIt may or may not yield further information if dereferenced.\n', ) status: Optional[conint(ge=100, lt=600)] = Field( None, description= 'The HTTP status code generated by the origin server for this occurrence\nof the problem.\n', example=503, ) title: Optional[str] = Field( None, description= 'A short, summary of the problem type. Written in english and readable\nfor engineers (usually not suited for non technical stakeholders and\nnot localized); example: Service Unavailable\n', ) type: Optional[AnyUrl] = Field( 'about:blank', description= 'An absolute URI that identifies the problem type. When dereferenced,\nit SHOULD provide human-readable documentation for the problem type\n(e.g., using HTML).\n', example='https://tools.ietf.org/html/rfc7231#section-6.6.4', )
class CuentaFisica(Cuenta): """ Based on: https://stpmex.zendesk.com/hc/es/articles/360038242071-Registro-de-Cuentas-de-Personas-f%C3%ADsicas """ _endpoint: ClassVar[str] = Cuenta._base_endpoint + '/fisica' _lote_endpoint: ClassVar[str] = Cuenta._base_endpoint + '/fisicas' nombre: truncated_stp_str(50) apellidoPaterno: truncated_stp_str(50) paisNacimiento: Pais fechaNacimiento: dt.date apellidoMaterno: Optional[truncated_stp_str(50)] = None genero: Optional[Genero] = None # Esperando a que STP agregue Nacido en el Extranjero entidadFederativa: Optional[EntidadFederativa] = None actividadEconomica: Optional[conint(ge=28, le=74)] = None calle: Optional[truncated_stp_str(60)] = None numeroExterior: Optional[truncated_stp_str(10)] = None numeroInterior: Optional[truncated_stp_str(5)] = None colonia: Optional[truncated_stp_str(50)] = None alcaldiaMunicipio: Optional[truncated_stp_str(50)] = None cp: Optional[digits(5, 5)] = None email: Optional[constr(max_length=150)] = None idIdentificacion: Optional[digits(max_length=20)] = None telefono: Optional[MxPhoneNumber] = None
class RMGOptions(BaseModel): """ A class for validating input.RMG.options arguments """ seed_name: str = 'Seed' save_edge: bool = True save_html: bool = False generate_seed_each_iteration: bool = True save_seed_to_database: bool = False units: str = 'si' generate_plots: bool = False save_simulation_profiles: bool = False verbose_comments: bool = False keep_irreversible: bool = False trimolecular_product_reversible: bool = True save_seed_modulus: conint(ge=-1) = -1 class Config: extra = "forbid" @validator('units', always=True) def check_units(cls, value): """RMGOptions.units validator""" if value.lower() is not None and value != 'si': raise ValueError( f'Currently RMG only supports SI units, got "{value}"') return value.lower()
class InterfaceLagMemberConfig(VendorIndependentBaseModel): _modelname = "interface_lag_member_config" group: conint(ge=1) protocol: Optional[Literal["lacp", "pagp"]] mode: LAG_MODE
class Person(BaseModel): first_name: str = Field(..., description="The person's first name.") last_name: str = Field(..., description="The person's last name.") age: Optional[conint(ge=0)] = Field(None, description='Age in years.') pets: Optional[List[Pet]] = None comment: Optional[Any] = None drink: Optional[List[Union[Coffee, Tea]]] = None
class FaceOneSideModel(BaseConfig, DummyDataModel): """Data model holding quantities on one side of cell faces normal to one direction.""" nx: conint(gt=0) ny: conint(gt=0) dtype: DummyDtype w: nplike.ndarray hu: nplike.ndarray hv: nplike.ndarray h: nplike.ndarray u: nplike.ndarray v: nplike.ndarray a: nplike.ndarray flux: WHUHVModel # validator _val_arrays = validator("w", "hu", "hv", "h", "u", "v", "a", "flux", allow_reuse=True)(_pydantic_val_arrays) _val_valid_numbers = validator("w", "hu", "hv", "h", "u", "v", "a", allow_reuse=True)(_pydantic_val_nan_inf) def __init__(self, nx, ny, dtype): dtype = DummyDtype.validator(dtype) super().__init__( # trigger pydantic validation nx=nx, ny=ny, dtype=dtype, w=nplike.zeros((ny, nx), dtype=dtype), hu=nplike.zeros((ny, nx), dtype=dtype), hv=nplike.zeros((ny, nx), dtype=dtype), h=nplike.zeros((ny, nx), dtype=dtype), u=nplike.zeros((ny, nx), dtype=dtype), v=nplike.zeros((ny, nx), dtype=dtype), a=nplike.zeros((ny, nx), dtype=dtype), flux=WHUHVModel(nx, ny, dtype))
async def get_posts( skip: conint(gt=-1) = 0, limit: conint(le=100) = 20, _=Depends(get_user_by_apikey) ): cursor = ( database.posts.aggregate( [ {"$match": {"closed_date": {"$eq": None}}}, {"$skip": skip}, {"$limit": limit}, { "$lookup": { "from": "votes", "let": {"id": "$_id"}, "pipeline": [ {"$match": {"$expr": {"$eq": ["$$id", "$post_id"]}}}, {"$group": {"_id": "$vote_type", "count": {"$sum": 1.0}}}, {"$project": {"_id": 0.0, "k": "$_id", "v": "$count"}}, { "$group": { "_id": None, "data": {"$push": {"k": "$k", "v": "$v"}}, } }, {"$replaceRoot": {"newRoot": {"$arrayToObject": "$data"}}}, ], "as": "votes", } }, { "$project": { "title": "$title", "body": "$body", "date": "$date", "username": "******", "closed_date": "$closed_date", "updated_at": "$updated_at", "votes": {"$arrayElemAt": ["$votes", 0]}, } }, ] ) # database.posts.find() # .sort("date", -1) # .skip(skip) # .limit(limit) ) return [doc async for doc in cursor]
class PostgresSettings(BaseSettings): # entrypoint host: str port: PortInt = 5432 # auth user: str password: SecretStr # database db: str # pool connection limits minsize: conint(ge=1) = 1 maxsize: conint(ge=1) = 50 dsn: Optional[PostgresDsn] = Field(None, description="Database Source Name") @validator("maxsize") @classmethod def check_size(cls, v, values): if not (values["minsize"] <= v): raise ValueError( f"assert minsize={values['minsize']} <= maxsize={v}") return v @validator("dsn", pre=True, always=True) @classmethod def autofill_dsn(cls, v, values): if not v and all( key in values for key in ["user", "password", "host", "port", "db"]): return PostgresDsn.build( scheme="postgresql", user=values["user"], password=values["password"].get_secret_value(), host=values["host"], port=f"{values['port']}", path=f"/{values['db']}", ) return v class Config: case_sensitive = False env_prefix = "POSTGRES_" extra = Extra.forbid
class UInt32DoocsChannel(DoocsChannel): value: conint(strict=True, ge=0, le=np.iinfo(np.uint32).max) = 0 class Config: @staticmethod def schema_extra(schema, model): schema['properties']['value']['type'] = '<u4'
class Biome(BaseModel): id: conint(ge=0) = Field(..., description="The unique identifier for a biome") name: str = Field(..., description="The name of a biome") category: str = Field(..., description="The category of a biome") temperature: confloat(ge=-1.0, le=2.0) = Field( ..., description="An indicator for the temperature in a biome") precipitation: str = Field( ..., description="The type of precipitation: none, rain or snow") depth: float = Field(..., description="The depth of a biome") dimension: str = Field( ..., description="The dimension of a biome: overworld, nether or end") displayName: constr(regex=r"\S+") = Field( ..., description="The display name of a biome") color: conint(ge=0) = Field(..., description="The color in a biome") rainfall: confloat(ge=0.0, le=1.0) = Field( ..., description="How much rain there is in a biome")
class Effect(BaseModel): id: conint(ge=0) = Field(..., description="The unique identifier for an effect") displayName: str = Field(..., description="The display name of an effect") name: constr(regex=r"\S+") = Field(..., description="The name of an effect") type: EffectType = Field( ..., description="Whether an effect is positive or negative")
class DnsRequestCreateForm(BaseModel): name: constr(min_length=4, max_length=2048) source_address: constr(min_length=7, max_length=15) source_port: conint(ge=0, le=65535) type: constr(min_length=1, max_length=32) protocol: constr(min_length=3, max_length=5) dns_server_name: constr(regex="^[a-zA-Z0-9-_]+$", min_length=4, max_length=254) raw_request: constr(min_length=1, max_length=16384)
class Meeting(BaseModel): days: Optional[conlist(conint(ge=0, le=8), max_items=10)] start: Optional[constr(max_length=10)] end: Optional[constr(max_length=10)] f_time: constr(max_length=100) bldg: constr(max_length=100) # Can be 'TBA' rm: Optional[constr(max_length=100)] rm_l: Optional[constr(max_length=200)]
class CharacterSettings(SettingsBaseModel): # cosmetic color: Optional[conint(ge=0, le=0xffffff)] = None embed_image: bool = True # gameplay crit_on: conint(ge=1, le=20) = 20 extra_crit_dice: int = 0 ignore_crit: bool = False reroll: Optional[conint(ge=1, le=20)] = None talent: bool = False srslots: bool = False # character sync sync_outbound: bool = True # avrae to upstream sync_inbound: bool = True # upstream to avrae @classmethod def from_old_csettings(cls, d): """Returns a new CharacterSettings instance with all default options, updated by legacy csettings options.""" # for each key, get it from old or fall back to class default old_settings = d.get('options', {}) return cls(color=old_settings.get('color', None), embed_image=old_settings.get('embedimage') or True, crit_on=old_settings.get('criton') or 20, extra_crit_dice=old_settings.get('critdice') or 0, ignore_crit=old_settings.get('ignorecrit') or False, reroll=old_settings.get('reroll', None), talent=old_settings.get('talent') or False, srslots=old_settings.get('srslots') or False) async def commit(self, mdb, character): """Commits the settings to the database for a given character.""" await mdb.characters.update_one( { "owner": character.owner, "upstream": character.upstream }, { "$set": { "options_v2": self.dict() }, "$unset": { "options": True } # delete any old options - they should have been converted by now })
class Entity(BaseModel): id: conint(ge=0) = Field(..., description="The unique identifier for an entity") internalId: Optional[conint(ge=0)] = Field( None, description= "The internal id of an entity : used in eggs metadata for example", ) displayName: str = Field(..., description="The display name of an entity") name: constr(regex=r"\S+") = Field(..., description="The name of an entity") type: str = Field(..., description="The type of an entity") width: Optional[float] = Field(..., description="The width of the entity") height: Optional[float] = Field(..., description="The height of the entity") category: Optional[str] = Field( None, description="The category of an entity : a semantic category")
class RemoteClientSettings(BaseSettings): token: str threads_num: conint(ge=1) = 2 # type: ignore read_timeout: int = 5 poll_timeout: int = 60 class Config: env_prefix = 'REMOTE_'
class ColumnType(pydantic.BaseModel): """ColumnType. """ name: str max_bytesize: Optional[pydantic.conint(ge=1)] nullable: bool = True default: Optional[str] = None
def query_collection( query: Query, limit: Optional[conint(ge=1, le=100)] = 20, offset: Optional[conint(ge=0)] = 0, user: Optional[Auth0User] = Security(guest_auth.get_user), ): mongo_items = ( mongo.db[collection_name] .find(query.dict(exclude_none=True)) .skip(offset) .limit(limit) ) return [ MainModel(**censor_item(mongo_item, user)) for mongo_item in mongo_items ]
class SettingsSchema(BaseModel): suggested_bpm: Optional[conint(gt=60, lt=300)] = None drift: Optional[conint(ge=0, lt=200)] = 15 @property def max_bpm(self) -> int: if self.suggested_bpm is not None: return self.suggested_bpm + self.drift else: return 300 @property def min_bpm(self) -> int: if self.suggested_bpm is not None: return self.suggested_bpm - self.drift else: return 60
"""**keepluggable** contains reusable code that stores files and images.""" from pydantic import BaseModel, conint, constr AtLeastOneChar: constr = constr(min_length=1, strip_whitespace=True) ZeroOrMore: conint = conint(gt=-1) class Pydantic(BaseModel): """Base class for our validation models.""" class Config: """Controls the behaviour of pydantic.""" anystr_strip_whitespace = True min_anystr_length = 1