class NorthPoleCredential(BaseModel): byr: conint(ge=1920, le=2002) iyr: conint(ge=2010, le=2020) eyr: conint(ge=2020, le=2030) hgt: constr(regex=r'[0-9]+(cm|in)') hcl: constr(regex=r'#([0-9]|[a-f]){6}') ecl: Literal['amb', 'blu', 'brn', 'gry', 'grn', 'hzl', 'oth'] pid: constr(regex=r'^[0-9]{9}$') cid: Optional[str] @validator('hgt') def height_validation(cls, v: str): if v.endswith('cm'): height = int(v[:-2]) lower_bound = 150 upper_bound = 193 if not lower_bound <= height <= upper_bound: raise ValueError( f'Height of {height}cm has to be >= {lower_bound}cm and <= {upper_bound}cm.' ) if v.endswith('in'): height = int(v[:-2]) lower_bound = 59 upper_bound = 76 if not lower_bound <= height <= upper_bound: raise ValueError( f'Height of {height}cm has to be >= {lower_bound}in and <= {upper_bound}in.' ) return v
class ExchangeRequest(BaseModel): """ Validates the data of an exchange request. """ amount: PositiveFloat origin_currency: constr(regex=CURRENCY_REGEX) target_currency: constr(regex=CURRENCY_REGEX)
class FormFieldSchemaUpdate(RWModel): identifier: Optional[constr(strip_whitespace=True, min_length=0)] = "" title: Optional[constr(strip_whitespace=True, min_length=1)] = "" subtitle: Optional[constr(strip_whitespace=True)] = "" placeholder: Optional[str] published: Optional[bool] keywords: Optional[List[str]] properties: Optional[ List[FormFieldScheProperties] ]
def indicators( indicator_type: constr( regex=r"^(ipv4|ipv6|uri|email|fqdn|md5|sha1|sha256)$"), last: constr(regex=r'^\d+[yMwdhms]?$') = "90d", args: argparse.Namespace = Depends(parse_args) ) -> PlainTextResponse: """ Download indicators Allowed indicator types: * ipv4 * ipv6 * uri * email * fqdn * md5 * sha1 * sha256 You can also specify the maximum age of the document you want to get indicators from with the `last` argument (default=90d). The format should be either be <NUM><TIME UNIT>, where TIME UNIT can be one of: y (year) M (month) w (week) d (day) h (hour) m (minute) s (second) OR <EPOC> (only digits) where the EPOC is a unix timestamp in milliseconds """ if not args.elasticsearch_client: raise HTTPException(status_code=412, detail="Elasticsearch is not configured") if re.search(r"^\d+$", last): # Only digits - assume unix timestamp start = last else: start = f"now-{last}" term = f"indicators.{indicator_type}.keyword" res = act.scio.es.aggregation( args.elasticsearch_client, terms=[term], start=start, end="now", ) return PlainTextResponse("\n".join(row[0].get(term) for row in res))
class UserBase(CamelModel): username: Optional[str] full_name: Optional[str] = None email: constr(to_lower=True, strip_whitespace=True) admin: bool group: Optional[str] favorite_recipes: Optional[list[str]] = [] class Config: orm_mode = True @classmethod def getter_dict(cls, name_orm: User): return { **GetterDict(name_orm), "group": name_orm.group.name, } schema_extra = { "username": "******", "fullName": "Change Me", "email": "*****@*****.**", "group": settings.DEFAULT_GROUP, "admin": "false", }
class FieldModel(BaseModel): uuid: UUID = Field( default_factory=uuid4, description="Unique identity for the field. Automatically generated.") name: constr(strip_whitespace=True, to_lower=True) = Field( ..., description= "Machine-readable term to uniquely address this field. Cannot have spaces. CamelCase or snake_case.", ) title: Optional[str] = Field( None, description="A human-readable version of the field name.") description: Optional[str] = Field( None, description="A description for the field.") type_field: FieldType = Field( ..., alias="type", description="A field must contain values of a specific type.") constraints: Optional[ConstraintsModel] = Field( None, description="A set of optional constraints to define the field.") missing: Optional[Any] = Field( default=None, description="Default to be used for missing values.") foreignKey: Optional[bool] = Field( default=False, description= "Set `foreignKey` `true` if the field is to be treated as an immutable value." ) class Config: use_enum_values = True anystr_strip_whitespace = True validate_assignment = True @validator("name") def name_space(cls, v): return "_".join(v.split(" ")).lower()
class PostBase(BaseModel): """ Pydantic structure input model for creation for type checking throws validation errors Args: BaseModel ([type]): pydantic base mmodel """ title: constr(max_length=20) author: constr(max_length=50) content: Text published_at: datetime = pytz.utc.localize(datetime.utcnow().replace( second=0, microsecond=0)) published: bool = False class Config: orm_mode = True
class User(BaseModel): username: Optional[str] = "" email: constr(strip_whitespace=True, min_length=1, max_length=100) password: constr(min_length=8, max_length=255) first_name: Optional[constr(max_length=50)] = None last_name: Optional[constr(max_length=50)] = None created_at: Optional[datetime] = datetime.now() dob: Optional[datetime] = datetime.now() is_active: Optional[bool] = True profile = Profile() def __str__(self): return str(self.email) def get_schema(self): print(self.schema_json(indent=2)) return True
class RedshiftDataSource(ToucanDataSource): database: str = Field( ..., description='The name of the database you want to query') query: constr(min_length=1) = Field( None, description='You can write a custom query against your ' 'database here. It will take precedence over ' 'the table parameter', widget='sql', ) query_object: Dict = Field( None, description= 'An object describing a simple select query, this field is used internally', **{'ui.hidden': True}, ) table: constr(min_length=1) = Field( None, description='The name of the data table that you want to ' 'get (equivalent to "SELECT * FROM ' 'your_table")', ) language: str = Field('sql', **{'ui.hidden': True}) def __init__(self, **data): super().__init__(**data) query = data.get('query') table = data.get('table') if query is None and table is None: self.query = TABLE_QUERY elif query is None and table is not None: self.query = f'select * from {table};' @classmethod def get_form(cls, connector: 'RedshiftConnector', current_config): constraints = {} with suppress(Exception): if 'database' in current_config: ds = RedshiftDataSource(domain='Redshift', name='redshift', database=current_config['database']) available_tables = connector._retrieve_tables( database=ds.database) constraints['table'] = strlist_to_enum('table', available_tables, None) return create_model('FormSchema', **constraints, __base__=cls).schema()
class RuleRequest(CommonModel): name: constr(strip_whitespace=True, min_length=2, max_length=100) target_device_id: Optional[int] message_field: str action_type: ActionType action_arg: str operator: RuleOperator operator_arg_1: Optional[float] operator_arg_2: Optional[float]
class RuleResponse(CommonModel): id: int name: constr(strip_whitespace=True, min_length=2, max_length=100) target_device: Optional[DeviceResponse] creator: User message_field: str action_type: ActionType action_arg: str operator: RuleOperator operator_arg_1: Optional[float] operator_arg_2: Optional[float]
class Blog(BaseModel): title: constr(strip_whitespace=True, min_length=1, max_length=100) slug: constr(strip_whitespace=True, min_length=1, max_length=150) meta_keywords: Optional[List] = [] meta_description: constr(min_length=8, max_length=255) description: Optional[str] = None status: Optional[bool] = True tags: Optional[List] = [] author: Optional[str] = '' created_at: Optional[datetime] = datetime.now() updated_at: Optional[datetime] = datetime.now() def __str__(self): return str(self.slug) def get_schema(self): print(self.schema_json(indent=2)) return True
def download(id: constr(regex=r"^[0-9A-Fa-f]{64}$"), args: argparse.Namespace = Depends(parse_args)) -> Response: """ Download document as original content""" res = document_lookup(id, args.elasticsearch_client) if not os.path.isfile(res.filename): return Response(content="File not found", media_type="application/text") return FileResponse(res.filename, filename=os.path.basename(res.filename), media_type=res.content_type)
class Card(BaseModel): name: constr(strip_whitespace=True, min_length=1) number: PaymentCardNumber exp: datetime @property def brand(self) -> PaymentCardBrand: return self.number.brand @property def expired(self) -> bool: return self.exp < datetime.today()
class Note(SketchBase): """ A note is a piece of text anchored to a specific page. """ keycode: constr(min_length=1) = ... text: constr(min_length=1, max_length=255) = ... page_number: conint(ge=1) = 1 note_position: PageCoordinate = ... class Config: schema_extra = { "examples": [ { "keyCode": "1/0", "text": "Main Condo Strip", "notePosition": "49,44", "pageNumber": 1, }, ] }
class SavePredefinedTimerValidator(BaseModel): name: constr(min_length=3) length: str id: int = None sound_file: str = None group_name: str = None @validator('length') def validate_length(cls, value: str): try: parse_timer_lengths(value) except TimeLengthParseError as e: raise InvalidTimedeltaError(e) # TODO: test this return value
def test_schema_dict_constr(): regex_str = r'^([a-zA-Z_][a-zA-Z0-9_]*)$' ConStrType = constr(regex=regex_str) ConStrKeyDict = Dict[ConStrType, str] class Foo(BaseModel): a: ConStrKeyDict = {} assert Foo.schema() == { 'title': 'Foo', 'type': 'object', 'properties': { 'a': {'type': 'object', 'title': 'A', 'default': {}, 'patternProperties': {regex_str: {'type': 'string'}}} }, }
class SchemaModel(BaseModel): uuid: UUID = Field( default_factory=uuid4, description="Automatically generated unique identity for the schema.") name: constr(strip_whitespace=True, to_lower=True) = Field( ..., description= "Machine-readable term to uniquely address this schema. Cannot have spaces. CamelCase or snake_case.", ) title: Optional[str] = Field( None, description="A human-readable version of the schema name.") description: Optional[str] = Field( None, description= "A complete description of the schema. Depending on how complex your work becomes, try and be as helpful as possible to 'future-you'. You'll thank yourself later.", ) fields: List[FieldModel] = Field( default=[], description= "A list of fields which define the schema. Fields, similarly, contain `name`, `title` and `description`, as well as `type` as compulsory.", ) version: List[VersionModel] = Field( default=[], description="Version and update history for the schema.") class Config: anystr_strip_whitespace = True validate_assignment = True @validator("name") def name_space(cls, v): if v.lower() != "_".join(v.split(" ")).lower(): raise ValueError( f"Schema name ({v}) cannot have spaces and must be CamelCase or snake_case." ) return "_".join(v.split(" ")).lower() @validator("fields") def are_fields_unique(cls, v): field_names = [] for f in v: field_names.append(f.name) if len(field_names) != len(set(field_names)): raise ValueError( f"Field names must be unique. There are {len(field_names) - len(set(field_names))} duplications." ) return v
def download_json(id: constr(regex=r"^[0-9A-Fa-f]{64}$"), args: argparse.Namespace = Depends(parse_args)) -> Union[ Response, Dict]: """ Download document base64 decoded in json struct """ res = document_lookup(id, args.elasticsearch_client) if not os.path.isfile(res.filename): return { "error": "File not found", "bytes": 0, } content = open(res.filename, "rb").read() return { "error": None, "bytes": len(content), "content": base64.b64encode(content), "encoding": "base64" }
class CreateArticle(BaseModel): title: constr(strip_whitespace=True, min_length=1, max_length=100) meta_keywords: Optional[List] = [] meta_description: constr(min_length=8, max_length=255) description: Optional[str] = None tags: Optional[List] = []
# Cleanup database connections when FastAPI shutsdown @app.on_event("shutdown") def on_shutdown(): postgres_pool.cleanup() # Allow requests from all origins app.add_middleware( CORSMiddleware, allow_origins=["*"], allow_credentials=True, allow_methods=["*"], allow_headers=["*"], ) CRN = constr(regex="^[0-9]{5}$") """A constrained string that must be a 5 digit number. All CRNs conform to this (I think).""" @app.get("/semesters", tags=["semesters"], response_model=List[Semester], summary="Fetch supported semesters", response_description="Semesters which have their schedules loaded into the API.") async def get_semesters(conn: RealDictConnection = Depends(postgres_pool.get_conn)): return fetch_semesters(conn) @app.get("/{semester_id}/sections", tags=["sections"], response_model=List[CourseSection], summary="Get sections from CRNs", response_description="List of found course sections. Excludes CRNs not found.") async def get_sections( semester_id: str = Path( None, example="202101", description="The id of the semester, determined by the Registrar.", ),
class SnowflakeDataSource(ToucanDataSource): database: str = None warehouse: str = None query: constr(min_length=1)
class TokenData(BaseModel): username: Optional[constr(to_lower=True, strip_whitespace=True)] = None
def test_str_basic_types(field_type, expected_schema): class Model(BaseModel): a: field_type base_schema = {'title': 'Model', 'type': 'object'} base_schema.update(expected_schema) assert Model.schema() == base_schema @pytest.mark.parametrize( 'field_type,expected_schema', [ (StrictStr, {'title': 'A', 'type': 'string'}), (ConstrainedStr, {'title': 'A', 'type': 'string'}), ( constr(min_length=3, max_length=5, regex='^text$'), {'title': 'A', 'type': 'string', 'minLength': 3, 'maxLength': 5, 'pattern': '^text$'}, ), ], ) def test_str_constrained_types(field_type, expected_schema): class Model(BaseModel): a: field_type base_schema = {'title': 'Model', 'type': 'object', 'properties': {'a': {}}, 'required': ['a']} base_schema['properties']['a'] = expected_schema assert Model.schema() == base_schema @pytest.mark.parametrize(
class EndLoanRequest(BaseModel): """ Validates the data of an end loan request. """ loan_id: PositiveFloat target_currency: constr(regex=CURRENCY_REGEX)
class Config: extra = Extra.forbid schema_extra = { "examples": [ { "url": "s3://some_file_url", }, { "url": "s3://some_file_url", "file_mapping": "some_file_name.txt" }, ] } PortKey = constr(regex=PROPERTY_KEY_RE) PortValue = Union[StrictBool, StrictInt, StrictFloat, StrictStr, FileUrl, None] class TaskInputData(DictModel[PortKey, PortValue]): class Config(DictModel.Config): schema_extra = { "examples": [ { "boolean_input": False, "int_input": -45, "float_input": 4564.45, "string_input": "nobody thinks like a string", "file_input": { "url": "s3://some_file_url" },
from dispatch.location import service as location_service from dispatch.database import ( Base, get_class_by_tablename, get_model_name_by_tablename, get_db, ) from dispatch.job.models import Job from dispatch.team.models import Team from dispatch.worker.models import Worker log = logging.getLogger(__file__) # allows only printable characters QueryStr = constr(regex=r"^[ -~]+$", min_length=1) # def restricted_incident_filter(query: orm.Query, current_user: DispatchUser, role: UserRoles): # """Adds additional incident filters to query (usually for permissions).""" # if role == UserRoles.member: # # We filter out resticted incidents for users with a member role if the user is not an incident participant # query = ( # query.join(Participant, Incident.id == Participant.incident_id) # .join(IndividualContact) # .filter( # or_( # Incident.visibility == Visibility.open, # IndividualContact.email == current_user.email, # ) # ) # )
assert Model.schema() == base_schema @pytest.mark.parametrize( 'field_type,expected_schema', [ (StrictStr, { 'title': 'A', 'type': 'string' }), (ConstrainedStr, { 'title': 'A', 'type': 'string' }), ( constr(min_length=3, max_length=5, regex='^text$'), { 'title': 'A', 'type': 'string', 'minLength': 3, 'maxLength': 5, 'pattern': '^text$' }, ), ], ) def test_str_constrained_types(field_type, expected_schema): class Model(BaseModel): a: field_type base_schema = {
class LaplaceNoiseParameters(CamelBaseModel): mechanism: constr(regex="^laplaceNoise$") = "laplaceNoise" config: LaplaceNoise
from enum import Enum from pydantic.types import constr class BankCode(Enum): """ https://www.bot.or.th/Thai/Statistics/DataManagementSystem/Standard/StandardCode/Pages/default.aspx """ BBL = "002" KBANK = "004" AnyBankCode = constr(min_length=3, max_length=3)