def _process_values(self, input_data: typing.Any) -> "DictStrAny": pk_only = input_data.pop("__pk_only__", False) if pk_only: v, _ = pydantic.validate_model(self, input_data, raise_exc=False) else: v = pydantic.validate_model(self, input_data) return v
def test_return_errors_ok(): class Model(BaseModel): foo: int bar: List[int] assert validate_model(Model, {'foo': '123', 'bar': (1, 2, 3)}) == {'foo': 123, 'bar': [1, 2, 3]} d, e = validate_model(Model, {'foo': '123', 'bar': (1, 2, 3)}, False) assert d == {'foo': 123, 'bar': [1, 2, 3]} assert e is None
def dump(self, data: ModelT, file: Union[BufferedWriter, Path, str], *args, **kwargs): validate_model(data.__class__, data.__dict__) if isinstance(file, str): file = Path(file) if isinstance(file, Path): file = file.open(mode='wb') return self._dump(data, file, *args, **kwargs)
def test_return_errors_error(): class Model(BaseModel): foo: int bar: List[int] d, e = validate_model(Model, {'foo': '123', 'bar': (1, 2, 'x')}, False) assert d == {'foo': 123} assert e.errors() == [{'loc': ('bar', 2), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'}] d, e = validate_model(Model, {'bar': (1, 2, 3)}, False) assert d == {'bar': [1, 2, 3]} assert e.errors() == [{'loc': ('foo',), 'msg': 'field required', 'type': 'value_error.missing'}]
def validate(self, include=None): """ Validate the current object. :param include: Only validate on given parameters :return: """ cls, obj = self._decompose_class() # Disable validation for unneeded fields by deleting fields in a dummy class if include is not None: class ReducedClass(cls, BaseModel): pass cls = ReducedClass cls.__fields__ = { k: v for k, v in cls.__fields__.items() if k in include } # Todo stop supporting pydantic < 1.7 if VERSION >= '1.7': defaults = {k: v.get_default() for k, v in cls.__fields__.items()} else: defaults = cls.__field_defaults__ return validate_model( cls, { **deepcopy(defaults), **{ k: obj[k] for k in obj.keys() if include is None or k in include } })
def __init__(self, **kwargs) -> None: cls = self.__class__ if cls._model is None: cls._model = self._create_model() # might be needed in the future due to postponed annotations # cls._model.update_forward_refs() if hasattr(cls, '__fields__'): AmpelBaseModel.__init__(self) # type: ignore[arg-type] vres = validate_model(cls._model, kwargs) # type: ignore[arg-type] self._exclude_unset = self._defaults.keys() - kwargs.keys() # pydantic ValidationError if e := vres[2]: # https://github.com/samuelcolvin/pydantic/issues/784 print("") if kwargs: print("#" * 60) print("Offending values:") for k, v in kwargs.items(): print(f"{k}: {v}") print("#" * 60) raise TypeError(e) from None
def validate_object(obj: BaseModel, is_request: bool = True): *_, validation_error = validate_model(obj.__class__, obj.__dict__) if validation_error: if is_request: raise RequestValidationError(validation_error.raw_errors) raise validation_error
async def parse_request_ignore_missing(request, model: Type[T], *, headers_=None) -> T: try: raw_data = await request.json() except ValueError: raise JsonErrors.HTTPBadRequest(message='Error decoding JSON', headers_=headers_) if not isinstance(raw_data, dict): raise JsonErrors.HTTPBadRequest(message='data not a dictionary', headers_=headers_) data, fields_set, e = validate_model(model, raw_data) if e: errors = [ e for e in e.errors() if not (e['type'] == 'value_error.missing' and len(e['loc']) == 1) ] if errors: raise JsonErrors.HTTPBadRequest(message='Invalid Data', details=errors, headers_=headers_) return model.construct(_fields_set=fields_set, **data)
def update_todo( *, user_id: int, todo_id: int, todo_service: todo.TodoService, updates: typing.Dict[str, typing.Any], ) -> response.TodoResponse: original_todo = todo_service.get_by_id(user_id=user_id, todo_id=todo_id) if original_todo: data = original_todo.dict() data.update(updates) _, _, errors = pydantic.validate_model(todo.Todo, data) if errors: raise fastapi.HTTPException( status_code=fastapi.status.HTTP_400_BAD_REQUEST, detail=errors.json(), ) updated_todo = original_todo.copy(update=updates) updated_todo_from_db = todo_service.update_todo(user_id=user_id, todo=updated_todo) return response.TodoResponse.from_domain(updated_todo_from_db) else: raise fastapi.HTTPException( status_code=fastapi.status.HTTP_404_NOT_FOUND, detail="Todo does not exist.")
async def get_model_from_json_data( cls, request: Request) -> Tuple[STATUS_CODE, REASON, MODEL_DATA]: json_data = await request.json() _, _, data = await CourierIdRequest.get_model_from_json_data( request=request) # 400 быть не может так как там рейс values, _, error = validate_model(cls, json_data) if error is not None: return ( web.HTTPBadRequest.status_code, web.HTTPBadRequest().reason, cls.error_handler(validation_error=error), ) return ( web.HTTPOk.status_code, web.HTTPOk().reason, cls.success_handler({ "new_data": values, "courier_id": data }), )
def validate_taskdata_entry(value: dict): """ Validate taskdata entry """ if not isinstance(value, dict): raise ValidationError("taskdata entry should be dict", TaskDataEntry()) *_, validation_error = validate_model(TaskDataEntry, value) if validation_error: raise validation_error
def validate_again(self) -> None: """ Use to manually validate for debugging when fields change """ _, _, validation_error = pydantic.validate_model( type(self), self.dict()) if validation_error: raise validation_error
def check(self, return_new=False): self.__class__.validate(self) out_dict, _, validation_error = validate_model(self.__class__, self.__dict__) if validation_error: raise validation_error # For compatibility with tests if return_new: return self.__class__(**out_dict)
def from_pandas_df(cls, df: pd.DataFrame) -> 'PandasModel': obj = df.to_dict(orient="list") m = cls.__new__(cls) values, fields_set, validation_error = validate_model(cls, obj) if validation_error: raise validation_error object.__setattr__(m, '__dict__', values) object.__setattr__(m, '__fields_set__', fields_set) return m
def _get_object_model(cls, obj_data: dict) -> "ModelSchema": values, fields_set, validation_error = validate_model(cls, obj_data) if validation_error: # pragma: nocover raise validation_error model_schema = cls.__new__(cls) object.__setattr__(model_schema, "__dict__", values) object.__setattr__(model_schema, "__fields_set__", fields_set) return model_schema
def validate_all(cls, value: dict) -> Any: """ Validate kwargs values against all fields of cls """ if cls._model is None: model = cls._model = cls._create_model() else: model = cls._model values, fields, errors = validate_model(model, value) if errors: raise TypeError(errors) from None return values
def check(self): """Manually trigger the validation of the data in this instance.""" _, _, validation_error = validate_model(self.__class__, self.__dict__) if validation_error: raise validation_error for field in self.__dict__.values(): if isinstance(field, BaseConfig): field.check()
async def get_model_from_json_data( cls, json_data: dict) -> Tuple[STATUS_CODE, REASON, dict]: values, fields_set, error = validate_model(cls, json_data) if error is not None: raise web.HTTPBadRequest return ( web.HTTPOk.status_code, web.HTTPOk().reason, cls.success_handler(values), )
async def get_model_from_json_data( cls, request: Request ) -> Union[Tuple[STATUS_CODE, REASON, MODEL_DATA], NoReturn]: values, _, error = validate_model( cls, {"id": request.match_info.get("courier_id")}) if error is not None: raise web.HTTPBadRequest return web.HTTPOk.status_code, web.HTTPOk( ).reason, cls.success_handler(values)
def check(self): for field_name, field_props in self.__fields__.items(): field = getattr(self, field_name) if isinstance(field, BaseNetModel): field.check() elif isinstance(field, list): [x.check() for x in field if isinstance(x, BaseNetModel)] elif isinstance(field, dict): [v.check() for k,v in field.items() if isinstance(v, BaseNetModel)] *_, validation_error = validate_model(self.__class__, self.__dict__) if validation_error: raise validation_error
def __init__(self, **data): if "pk" in data: data[self.Mapping.pk_name] = data.pop("pk") if typing.TYPE_CHECKING: self.__values__: Dict[str, Any] = {} self.__fields_set__: "SetStr" = set() pk_only = data.pop("__pk_only__", False) values, fields_set, _ = pydantic.validate_model( self, data, not pk_only) object.__setattr__(self, "__dict__", values) object.__setattr__(self, "__fields_set__", fields_set)
def __init__(self, **data): if "pk" in data: data[self.Mapping.pk_name] = data.pop("pk") if typing.TYPE_CHECKING: self.__dict__: typing.Dict[str, typing.Any] = {} self.__fields_set__: typing.Set[str] = set() pk_only = data.pop("__pk_only__", False) values, fields_set, error = pydantic.validate_model(self, data) if not pk_only and error: raise error object.__setattr__(self, "__dict__", values) object.__setattr__(self, "__fields_set__", fields_set)
def test_initial_sequential_build(self): config_map = ClientConfigAdapter( AvellanedaMarketMakingConfigMap.construct()) config_settings = self.get_default_map() def build_config_map(cm: ClientConfigAdapter, cs: Dict): """This routine can be used in the create command, with slight modifications.""" for key in cm.keys(): client_data = cm.get_client_data(key) if client_data is not None and client_data.prompt_on_new: self.assertIsInstance(client_data.prompt(cm), str) if key == "execution_timeframe_model": setattr(cm, key, "daily_between_times") # simulate user input else: setattr(cm, key, cs[key]) new_value = getattr(cm, key) if isinstance(new_value, ClientConfigAdapter): build_config_map(new_value, cs[key]) build_config_map(config_map, config_settings) hb_config = config_map.hb_config validate_model(hb_config.__class__, hb_config.__dict__) self.assertEqual(0, len(config_map.validate_model()))
def __init__(self, *args: Any, **kwargs: Any) -> None: # type: ignore object.__setattr__(self, "_orm_id", uuid.uuid4().hex) object.__setattr__(self, "_orm_saved", False) object.__setattr__( self, "_orm", RelationsManager( related_fields=[ field for name, field in self.Meta.model_fields.items() if issubclass(field, ForeignKeyField) ], owner=self, ), ) pk_only = kwargs.pop("__pk_only__", False) if "pk" in kwargs: kwargs[self.Meta.pkname] = kwargs.pop("pk") # build the models to set them and validate but don't register new_kwargs = { k: self._convert_json( k, self.Meta.model_fields[k].expand_relationship( v, self, to_register=False ), "dumps", ) for k, v in kwargs.items() } values, fields_set, validation_error = pydantic.validate_model( self, new_kwargs # type: ignore ) if validation_error and not pk_only: raise validation_error object.__setattr__(self, "__dict__", values) object.__setattr__(self, "__fields_set__", fields_set) # register the columns models after initialization for related in self.extract_related_names(): self.Meta.model_fields[related].expand_relationship( new_kwargs.get(related), self, to_register=True )
def __init__(__pydantic_self__, **data): if typing.TYPE_CHECKING: __pydantic_self__.__dict__: typing.Dict[str, typing.Any] = {} __pydantic_self__.__fields_set__: "SetStr" = set() values, fields_set, validation_error = validate_model( __pydantic_self__.__class__, data) if validation_error: _fields = list() errors = validation_error.errors() for each_error in errors: _fields.append( Field( name=each_error.get("loc")[0], message=each_error.get("msg"), error_type=each_error.get("type"), )) raise ValidationError(_fields) object.__setattr__(__pydantic_self__, "__dict__", values) object.__setattr__(__pydantic_self__, "__fields_set__", fields_set)
async def update_orm(model: Type[BaseModel], orm_obj: models.Model, input: BaseModel, *, access: Optional[Access] = None) -> BaseModel: """ Apply (partial) changes given in `input` to an orm_obj and return an instance of `model` with the full data of the orm including the updated fields. """ warnings.warn( "Use transfer_to_orm with exclude_unset=True instead of this function", category=DeprecationWarning) if access: check_field_access(input, access) data = await model.from_orm(orm_obj) input_dict: dict = input.dict(exclude_unset=True) def update(model: BaseModel, input: dict): for key, value in input.items(): if isinstance(value, dict): attr = getattr(model, key) if attr is None: setattr(model, key, model.__fields__[key].type_.parse_obj(value)) else: update(attr, value) else: setattr(model, key, value) update(data, input_dict) values, fields_set, validation_error = validate_model(model, data.dict()) if validation_error: raise RequestValidationError(validation_error.raw_errors) transfer_to_orm(data, orm_obj) return data
def from_django( cls: Type["PydanticDjangoModel"], instance: django.db.models.Model, cache: bool = True, save: bool = False, ) -> Type["PydanticDjangoModel"]: obj_data = {} for field in instance._meta.get_fields(): if not field.concrete and field.auto_created: accessor_name = field.get_accessor_name() if field.one_to_many: obj_data[accessor_name] = list( getattr(instance, accessor_name).all().values("pk") ) elif field.one_to_one: _obj = getattr(instance, accessor_name, None) if _obj: obj_data[accessor_name] = _obj.pk else: obj_data[field.name] = field.value_from_object(instance) values, fields_set, validation_error = validate_model(cls, obj_data) if validation_error: raise validation_error p_model = cls.__new__(cls) object.__setattr__(p_model, "__dict__", values) object.__setattr__(p_model, "__fields_set__", fields_set) if save: instance.save() if cache: cls.instance = instance return p_model
def __init__(self, *args: Any, **kwargs: Any) -> None: # type: ignore """ Initializer that creates a new ormar Model that is also pydantic Model at the same time. Passed keyword arguments can be only field names and their corresponding values as those will be passed to pydantic validation that will complain if extra params are passed. If relations are defined each relation is expanded and children models are also initialized and validated. Relation from both sides is registered so you can access related models from both sides. Json fields are automatically loaded/dumped if needed. Models marked as abstract=True in internal Meta class cannot be initialized. Accepts also special __pk_only__ flag that indicates that Model is constructed only with primary key value (so no other fields, it's a child model on other Model), that causes skipping the validation, that's the only case when the validation can be skipped. Accepts also special __excluded__ parameter that contains a set of fields that should be explicitly set to None, as otherwise pydantic will try to populate them with their default values if default is set. :raises ModelError: if abstract model is initialized, model has ForwardRefs that has not been updated or unknown field is passed :param args: ignored args :type args: Any :param kwargs: keyword arguments - all fields values and some special params :type kwargs: Any """ self._verify_model_can_be_initialized() object.__setattr__(self, "_orm_id", uuid.uuid4().hex) object.__setattr__(self, "_orm_saved", False) object.__setattr__(self, "_pk_column", None) object.__setattr__( self, "_orm", RelationsManager( related_fields=self.extract_related_fields(), owner=self, ), ) pk_only = kwargs.pop("__pk_only__", False) excluded: Set[str] = kwargs.pop("__excluded__", set()) if "pk" in kwargs: kwargs[self.Meta.pkname] = kwargs.pop("pk") # build the models to set them and validate but don't register # also remove property fields values from validation try: new_kwargs: Dict[str, Any] = { k: self._convert_json( k, self.Meta.model_fields[k].expand_relationship( v, self, to_register=False, ), "dumps", ) for k, v in kwargs.items() if k not in object.__getattribute__( self, "Meta").property_fields } except KeyError as e: raise ModelError( f"Unknown field '{e.args[0]}' for model {self.get_name(lower=False)}" ) # explicitly set None to excluded fields # as pydantic populates them with default if set for field_to_nullify in excluded: new_kwargs[field_to_nullify] = None values, fields_set, validation_error = pydantic.validate_model( self, new_kwargs # type: ignore ) if validation_error and not pk_only: raise validation_error object.__setattr__(self, "__dict__", values) object.__setattr__(self, "__fields_set__", fields_set) # register the columns models after initialization for related in self.extract_related_names(): self.Meta.model_fields[related].expand_relationship( new_kwargs.get(related), self, to_register=True, )
def __init__(self, *args: Any, **kwargs: Any) -> None: # type: ignore """ Initializer that creates a new ormar Model that is also pydantic Model at the same time. Passed keyword arguments can be only field names and their corresponding values as those will be passed to pydantic validation that will complain if extra params are passed. If relations are defined each relation is expanded and children models are also initialized and validated. Relation from both sides is registered so you can access related models from both sides. Json fields are automatically loaded/dumped if needed. Models marked as abstract=True in internal Meta class cannot be initialized. Accepts also special __pk_only__ flag that indicates that Model is constructed only with primary key value (so no other fields, it's a child model on other Model), that causes skipping the validation, that's the only case when the validation can be skipped. Accepts also special __excluded__ parameter that contains a set of fields that should be explicitly set to None, as otherwise pydantic will try to populate them with their default values if default is set. :raises ModelError: if abstract model is initialized, model has ForwardRefs that has not been updated or unknown field is passed :param args: ignored args :type args: Any :param kwargs: keyword arguments - all fields values and some special params :type kwargs: Any """ self._verify_model_can_be_initialized() self._initialize_internal_attributes() pk_only = kwargs.pop("__pk_only__", False) object.__setattr__(self, "__pk_only__", pk_only) new_kwargs, through_tmp_dict = self._process_kwargs(kwargs) values, fields_set, validation_error = pydantic.validate_model( self, new_kwargs # type: ignore ) if validation_error and not pk_only: raise validation_error object.__setattr__(self, "__dict__", values) object.__setattr__(self, "__fields_set__", fields_set) # add back through fields new_kwargs.update(through_tmp_dict) model_fields = object.__getattribute__(self, "Meta").model_fields # register the columns models after initialization for related in self.extract_related_names().union(self.extract_through_names()): model_fields[related].expand_relationship( new_kwargs.get(related), self, to_register=True, ) if hasattr(self, "_init_private_attributes"): # introduced in pydantic 1.7 self._init_private_attributes()
def validate(cls, value: dict) -> Any: """ Validate kwargs values against fields of cls (except traceless) """ values, fields, errors = validate_model(cls._create_model(True), value) if errors: raise TypeError(errors) from None return values