def __init__(self, text: Text, type_: ResponseType = None, result: Dict = None, **data: Any) -> None: """ Accept `text` and `type_` as positional arguments and `result` as dictionary for backward compatibility :param text: text to pronounce :param type_: response type :param data: """ if "type" in data and type_ is not None: raise ValidationError( f"Ambiguous response type: 'type_'={type_} and 'type='{data['type']}.", type(self), ) params: Dict[str, Any] = dict(text=text) if type_ is not None: params.update(type=type_) if result and isinstance(result, Dict): params.update(result=Result(result)) super().__init__(**{**data, **params})
def _validate_feature_server_config(cls, values): # Having no feature server is the default. if "feature_server" not in values: return values # Skip if we aren't creating the configuration from a dict if not isinstance(values["feature_server"], Dict): return values # Make sure that the provider configuration is set. We need it to set the defaults if "provider" not in values: raise FeastProviderNotSetError() feature_server_type = FEATURE_SERVER_TYPE_FOR_PROVIDER.get( values["provider"]) defined_type = values["feature_server"].get("type") # Make sure that the type is either not set, or set correctly, since it's defined by the provider if defined_type not in (None, feature_server_type): raise FeastFeatureServerTypeSetError(defined_type) values["feature_server"]["type"] = feature_server_type # Validate the dict to ensure one of the union types match try: feature_server_config_class = get_feature_server_config_from_type( feature_server_type) feature_server_config_class(**values["feature_server"]) except ValidationError as e: raise ValidationError( [ErrorWrapper(e, loc="feature_server")], model=RepoConfig, ) return values
def validate_protocol_version(cls, protocol_version: int) -> int: """Check if protocol_version is 1.""" if not protocol_version: return protocol_version if protocol_version != 1: raise ValidationError("Protocol version 1 only supported.") return protocol_version
def _validate_feature_server_config(cls, values): # Having no feature server is the default. if "feature_server" not in values: return values # Skip if we aren't creating the configuration from a dict if not isinstance(values["feature_server"], Dict): return values # Make sure that the provider configuration is set. We need it to set the defaults if "provider" not in values: raise FeastProviderNotSetError() # Make sure that the type is not set, since we will set it based on the provider. if "type" in values["feature_server"]: raise FeastFeatureServerTypeSetError(values["feature_server"]["type"]) # Set the default type. We only support AWS Lambda for now. if values["provider"] == "aws": values["feature_server"]["type"] = "aws_lambda" feature_server_type = values["feature_server"]["type"] # Validate the dict to ensure one of the union types match try: feature_server_config_class = get_feature_server_config_from_type( feature_server_type ) feature_server_config_class(**values["feature_server"]) except ValidationError as e: raise ValidationError( [ErrorWrapper(e, loc="feature_server")], model=RepoConfig, ) return values
def _validate_offline_store_config(cls, values): # Set empty offline_store config if it isn't set explicitly if "offline_store" not in values: values["offline_store"] = dict() # Skip if we aren't creating the configuration from a dict if not isinstance(values["offline_store"], Dict): return values # Make sure that the provider configuration is set. We need it to set the defaults assert "provider" in values # Set the default type if "type" not in values["offline_store"]: if values["provider"] == "local": values["offline_store"]["type"] = "file" elif values["provider"] == "gcp": values["offline_store"]["type"] = "bigquery" elif values["provider"] == "aws": values["offline_store"]["type"] = "redshift" offline_store_type = values["offline_store"]["type"] # Validate the dict to ensure one of the union types match try: offline_config_class = get_offline_config_from_type( offline_store_type) offline_config_class(**values["offline_store"]) except ValidationError as e: raise ValidationError( [ErrorWrapper(e, loc="offline_store")], model=RepoConfig, ) return values
def nest_predicate_fields(cls, values): """Move fields passed to the Fact model that really belong to the Predicate model.""" type_str = values.pop("type", "") if type_str and type_str.lower() != "fact": raise ValidationError(f"type {type_str} was passed to Fact model") if isinstance(values.get("predicate"), str): values["predicate"] = Predicate(content=values["predicate"]) if "truth" in values: values["predicate"].truth = values.pop("truth") for field_name in ["content", "truth", "sign", "expression"]: if field_name in values: values["predicate"] = values.get("predicate", {}) values["predicate"][field_name] = values.pop(field_name) if isinstance(values.get("predicate"), dict) and values["predicate"].get("content"): for sign in { **QuantityRange.opposite_comparisons, **QuantityRange.normalized_comparisons, }: if sign in values["predicate"]["content"]: content, quantity_text = values["predicate"][ "content"].split(sign) values["predicate"]["content"] = content.strip() values["predicate"]["expression"] = quantity_text.strip() values["predicate"]["sign"] = sign break return values
def check_type_field(cls, values): """Fail valitation if the input has a "type" field without the class name.""" type_str = values.pop("type", "") if type_str and type_str.lower() != "evidence": raise ValidationError( f"type {type_str} was passed to Evidence model") return values
def error_handler( cls, json_data: dict, validation_error: ValidationError, id_key: str, items_key: str, ) -> dict: bad_data_ids = [] errors_data = [] if json_data[ "data"]: # нам могут отправить пустой список (вообще не могу, но пусть будет) for error in validation_error.errors(): errors_data.append({ "location": error["loc"], "msg": error["msg"], "type": error["type"], }) element_number = error["loc"][1] element_id = json_data["data"][element_number][id_key] if element_id not in bad_data_ids: bad_data_ids.append(element_id) response_bad_data = { "validation_error": { items_key: [{ "id": element_id } for element_id in bad_data_ids], "errors_data": errors_data, } } return response_bad_data
def test_pydantic_validation_error(self): self.add_route_raises_exception( ExceptionHandlingSpec( Exception, broad_exception_handler, ValidationError( [ ErrorWrapper(NoneIsNotAllowedError(), "foo"), ErrorWrapper(BoolError(), "bar"), ], BaseModel, ), )) resp = self.request() self.assertEqual(400, resp.status_code) self.assertDictEqual( { "error": [ { "loc": ["foo"], "msg": "none is not an allowed value", "type": "type_error.none.not_allowed", }, { "loc": ["bar"], "msg": "value could not be parsed to a boolean", "type": "type_error.bool", }, ] }, resp.json, )
def _validate_online_store_config(cls, values): # This method will validate whether the online store configurations are set correctly. This explicit validation # is necessary because Pydantic Unions throw very verbose and cryptic exceptions. We also use this method to # impute the default online store type based on the selected provider. For the time being this method should be # considered tech debt until we can implement https://github.com/samuelcolvin/pydantic/issues/619 or a more # granular configuration system # Skip if online store isn't set explicitly if "online_store" not in values: values["online_store"] = dict() # Skip if we arent creating the configuration from a dict if not isinstance(values["online_store"], Dict): return values # Make sure that the provider configuration is set. We need it to set the defaults assert "provider" in values if "online_store" in values: # Set the default type if "type" not in values["online_store"]: if values["provider"] == "local": values["online_store"]["type"] = "sqlite" elif values["provider"] == "gcp": values["online_store"]["type"] = "datastore" online_store_type = values["online_store"]["type"] # Make sure the user hasn't provided the wrong type assert online_store_type in ["datastore", "sqlite"] # Validate the dict to ensure one of the union types match try: if online_store_type == "sqlite": SqliteOnlineStoreConfig(**values["online_store"]) elif values["online_store"]["type"] == "datastore": DatastoreOnlineStoreConfig(**values["online_store"]) else: raise ValidationError( f"Invalid online store type {online_store_type}" ) except ValidationError as e: raise ValidationError( [ErrorWrapper(e, loc="online_store")], model=SqliteOnlineStoreConfig, ) return values
def validate_name(cls, name: str) -> str: """Normalize the project name.""" name_re = re.compile(r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE) if not name_re.match(name): raise ValidationError("Name should contains only letter, \ numeric, '.', '_', '-'") return normalize_package_name(name)
def test_get_metrics_reporting_registry_settings_error(): """get_metrics_reporting_registry() handles invalid settings.""" passed_registry = CollectorRegistry() with patch("ctms.metrics.config.Settings") as settings: settings.side_effect = ValidationError(errors=[], model=Mock) the_registry = get_metrics_reporting_registry(passed_registry) assert the_registry is passed_registry assert not the_registry._collector_to_names # pylint: disable=protected-access
def handle_validation_error(error: ValidationError) -> Response: """Convert a ValidationError from parsing JSON input to a error response.""" response = jsonify({ "reason": "validation_failed", "errors": error.errors() }) response.status_code = 400 return cast(Response, response)
def check_date(cls, v, values): try: datetime.date(2016, values['month'], v) except ValidationError: raise ValidationError('{}/{} is not a valid date.'.format( values['month'], v)) else: return v
def validate_taskdata_entry(value: dict): """ Validate taskdata entry """ if not isinstance(value, dict): raise ValidationError("taskdata entry should be dict", TaskDataEntry()) *_, validation_error = validate_model(TaskDataEntry, value) if validation_error: raise validation_error
def end_date_datetime(cls, end_date: Union[str, datetime.datetime], field: ModelField) -> datetime.datetime: if isinstance(end_date, str): end_date = convert(value=end_date, validator_name=field.name.upper()) if end_date is not None: return end_date elif isinstance(end_date, datetime.datetime): return end_date raise ValidationError(model=Features)
def date_datetime(cls, date: Union[str, datetime.datetime], field: ModelField) -> datetime.datetime: if isinstance(date, str): date = convert(value=date, validator_name=field.name.upper()) if date is not None: return date elif isinstance(date, datetime.datetime): return date raise ValidationError(model=PaymentData)
def check_texture_is_plausible(cls, values): sand, silt, clay = values.get("sand"), values.get("silt"), values.get( "clay") args = [a for a in [sand, silt, clay] if a is not None] if args: if sum(args) > 1.0: raise ValidationError("sum(sand, silt, clay) > 100") return values
def validate_requires_python(cls, requires_python: str) -> str: """Check if requires_python is a valid pep440 version.""" if not requires_python: return "" if not is_valid_pep440_specifier(requires_python): raise ValidationError( "requires_python is not a valid pep 440 specifier") return requires_python
def validator_taskdata_uri(cls, value, values, **kwargs): taskdata = values.get('taskdata') taskdata_uri = values.get('taskdata_uri') if taskdata is not None and len(taskdata) > 0 and taskdata_uri is not None: raise ValidationError( u'Specify only one of taskdata {} or taskdata_uri {}'.format( taskdata, taskdata_uri)) return value
def email_or_phone_required(cls, values): email = values.get("email") phone = values.get("phone") if not email and not phone: raise ValidationError("Either email or phone must be defined.") return values
def validate_blake2_256_digestt(cls, blake2_256_digest: str) -> str: """Check if blake2_256_digest is a valid hex-encoded string.""" if not blake2_256_digest: return "" blake2_256_re = re.compile(r"^[A-F0-9]{64}$", re.IGNORECASE) if not blake2_256_re.match(blake2_256_digest): raise ValidationError( "blake2_256_digest is not a valid hex-encoded string") return blake2_256_digest
def validate_sha256_digest(cls, sha256_digest: str) -> str: """Check if sha256_digest is a valid hex-encoded string.""" if not sha256_digest: return "" sha256_re = re.compile(r"^[A-F0-9]{64}$", re.IGNORECASE) if not sha256_re.match(sha256_digest): raise ValidationError( "sha256_digest is not a valid hex-encoded string.") return sha256_digest
def parse_from_docs(cls, code: str, prev: dict, curr: dict) -> 'DynamicRate': try: return cls(date=curr['created_at'], value=curr[code]['value'], change=get_percent_change(prev[code]['value'], curr[code]['value'])) except KeyError: raise ValidationError()
def validate_description_content_type( cls, description_content_type: str) -> str: """Check if description_content_type is a known format.""" if not description_content_type: return "" allowed_content_type = {"text/plain", "text/x-rst", "text/markdown"} if description_content_type not in allowed_content_type: raise ValidationError("Invalid description_content_type") return description_content_type
def registration_date_datetime(cls, registration_date: Union[str, datetime.datetime], field: ModelField) -> datetime.datetime: if isinstance(registration_date, str): registration_date = convert(value=registration_date, validator_name=field.name.upper()) if registration_date is not None: return registration_date elif isinstance(registration_date, datetime.datetime): return registration_date raise ValidationError(model=AuthInfo)
def creation_date_datetime(cls, creation_date: Union[str, datetime.datetime], field: ModelField) -> datetime.datetime: if isinstance(creation_date, str): creation_date = convert(value=creation_date, validator_name=field.name.upper()) if creation_date is not None: return creation_date elif isinstance(creation_date, datetime.datetime): return creation_date raise ValidationError(model=ContractInfo)
def handle_error(error_: ValidationError) -> Tuple[Response, int]: return ( jsonify({ "error": Failure.PARAMETERS_ERROR, "error_message": "validation_error", "error_hint": json.loads(error_.json()), }), 400, )
def validate_sitekey(cls, value): if value is not None: for restriction in value: for sitekey in restriction: try: UUID(sitekey) except: raise ValidationError("invalid sitekey") return value
def convert_error(err: ValidationError) -> Error: errors = [] for error in err.errors(): if isinstance(error["loc"], tuple): name = ".".join([str(x) for x in error["loc"]]) else: name = str(error["loc"]) errors.append("%s: %s" % (name, error["msg"])) return Error(code=ErrorCode.INVALID_REQUEST, errors=errors)