def _int(codec_type, python_type): python_type, _ = split_annotated(python_type) if is_subclass(python_type, int) and not is_subclass(python_type, bool): if codec_type is Binary: return _int_binarycodec if codec_type is String: return _int_stringcodec if codec_type is JSON: return _int_jsoncodec
def _date(codec_type, python_type): python_type, _ = split_annotated(python_type) if is_subclass(python_type, date) and not is_subclass(python_type, datetime): if codec_type is Binary: return _date_binarycodec if codec_type is String: return _date_stringcodec if codec_type is JSON: return _date_jsoncodec
def _int_schema(*, python_type, annotated, **_): if is_subclass(python_type, int) and not is_subclass(python_type, bool): kwargs = {} for annotation in annotated: if is_instance(annotation, fondat.validation.MinValue): kwargs["minimum"] = annotation.value elif is_instance(annotation, fondat.validation.MaxValue): kwargs["maximum"] = annotation.value return Schema( type="integer", format="int64", **_kwargs(python_type, annotated), **kwargs )
def _iterable_schema(*, python_type, annotated, origin, args, processor, **_): if is_subclass(origin, Iterable) and not is_subclass(origin, Mapping) and len(args) == 1: kwargs = {} if is_subclass(origin, set): kwargs["uniqueItems"] = True for annotation in annotated: if is_instance(annotation, fondat.validation.MinLen): kwargs["minItems"] = annotation.value elif is_instance(annotation, fondat.validation.MaxLen): kwargs["maxItems"] = annotation.value return Schema( type="array", items=processor.schema(args[0]), **_kwargs(python_type, annotated), **kwargs, )
def _iterable_codec_provider(python_type): origin = typing.get_origin(python_type) if not origin or not is_subclass(origin, Iterable): return args = typing.get_args(python_type) if not args or len(args) > 1: return codec = get_codec(args[0]) class IterableCodec(PostgreSQLCodec[python_type]): sql_type = f"{codec.sql_type}[]" @validate_arguments def encode(self, value: python_type) -> Any: return [codec.encode(v) for v in value] @validate_arguments def decode(self, value: Any) -> python_type: return python_type(codec.decode(v) for v in value) return IterableCodec()
def _float(codec_type, python_type): python_type, _ = split_annotated(python_type) if is_subclass(python_type, float): if codec_type is Binary: return _float_binarycodec if codec_type is String: return _float_stringcodec if codec_type is JSON: return _float_jsoncodec
def _bool(codec_type, python_type): python_type, _ = split_annotated(python_type) if is_subclass(python_type, bool): if codec_type is Binary: return _bool_binarycodec if codec_type is String: return _bool_stringcodec if codec_type is JSON: return _bool_jsoncodec
def _Decimal(codec_type, python_type): python_type, _ = split_annotated(python_type) if is_subclass(python_type, Decimal): if codec_type is Binary: return _decimal_binary if codec_type is String: return _decimal_string if codec_type is JSON: return _decimal_json
def _uuid(codec_type, python_type): python_type, _ = split_annotated(python_type) if is_subclass(python_type, UUID): if codec_type is Binary: return _uuid_binarycodec if codec_type is String: return _uuid_stringcodec if codec_type is JSON: return _uuid_jsoncodec
def _bytes(codec_type, python_type): python_type, _ = split_annotated(python_type) if is_subclass(python_type, (bytes, bytearray)): if codec_type is Binary: return _bytes_binarycodec if codec_type is String: return _bytes_stringcodec if codec_type is JSON: return _bytes_jsoncodec
def _mapping_schema(*, python_type, annotated, origin, args, processor, **_): if is_subclass(origin, Mapping) and len(args) == 2: if args[0] is not str: raise TypeError("Mapping[k, v] only supports str keys") return Schema( type="object", properties={}, additionalProperties=processor.schema(args[1]), **_kwargs(python_type, annotated), )
def _float_schema(*, python_type, annotated, **_): if is_subclass(python_type, float): kwargs = {} for annotation in annotated: if is_instance(annotation, fondat.validation.MinValue): kwargs["minimum"] = annotation.value elif is_instance(annotation, fondat.validation.MaxValue): kwargs["maximum"] = annotations.value return Schema( type="number", format="double", **_kwargs(python_type, annotated), **kwargs )
def get_param_in(method, param_name, type_hint): """ Return an annotation expressing where a parameter is to be provided. If an annotations is a class, then it is instantated. If no annotation exists, then an appropriate InQuery annotation is provided. """ stripped = fondat.types.strip_optional(type_hint) if typing.get_origin(stripped) is Annotated: args = typing.get_args(stripped) for annotation in args[1:]: if is_subclass(annotation, (InBody, InQuery)): return annotation(param_name) elif is_subclass(annotation, AsBody): return annotation() elif isinstance(annotation, (AsBody, InBody, InQuery)): return annotation if method._fondat_operation.type == "mutation": return InBody(param_name) return InQuery(param_name)
def _str_schema(*, python_type, annotated, **_): if is_subclass(python_type, str): kwargs = {} for annotation in annotated: if is_instance(annotation, fondat.annotation.Format): kwargs["format"] = annotation.value elif is_instance(annotation, fondat.validation.MinLen): kwargs["minLength"] = annotation.value elif is_instance(annotation, fondat.validation.MaxLen): kwargs["maxLength"] = annotation.value elif is_instance(annotation, fondat.validation.Pattern): kwargs["pattern"] = annotation.pattern.pattern return Schema(type="string", **_kwargs(python_type, annotated), **kwargs)
def _bytes_schema(*, python_type, annotated, **_): if is_subclass(python_type, (bytes, bytearray)): kwargs = {} for annotation in annotated: if is_instance(annotation, fondat.validation.MinLen): kwargs["minLength"] = annotation.value elif is_instance(annotation, fondat.validation.MaxLen): kwargs["maxLength"] = annotation.value return Schema( type="string", format="binary" if fondat.http.InBody in annotated else "byte", **_kwargs(python_type, annotated), **kwargs, )
async def _decode_body(operation, request): body_type = get_body_type(operation) if not body_type: return None python_type, _ = fondat.types.split_annotated(body_type) if is_subclass(python_type, Stream): return request.body content = await stream_bytes(request.body) if len(content) == 0: return None # empty body is no body try: with DecodeError.path_on_error("«body»"): result = get_codec(Binary, body_type).decode(content) except DecodeError as de: raise BadRequestError from de except Exception as e: raise InternalServerError from e return result
def validate(value: Any, type_hint: Any) -> NoneType: """Validate a value.""" python_type, annotations = split_annotated(type_hint) origin = typing.get_origin(python_type) args = typing.get_args(python_type) # validate using specified validator annotations for annotation in annotations: if isinstance(annotation, Validator): annotation.validate(value) # aggregate type validation if python_type is Any: return elif origin is Union: return _validate_union(value, args) elif origin is Literal: return _validate_literal(value, args) # TypedDict if is_subclass(python_type, dict) and hasattr(python_type, "__annotations__"): origin = dict # basic type validation if origin and not is_instance(value, origin): raise ValidationError( f"expecting {origin.__name__}; received {type(value)}") elif not origin and not is_instance(value, python_type): raise ValidationError( f"expecting {python_type}; received {type(value)}") elif python_type is int and is_instance(value, bool): # bool is subclass of int raise ValidationError("expecting int; received bool") elif is_subclass(origin, Iterable) and is_instance( value, (str, bytes, bytearray)): raise ValidationError(f"expecting Iterable; received {type(value)}") # structured type validation if is_subclass(python_type, dict) and hasattr(python_type, "__annotations__"): return _validate_typeddict(value, python_type) elif is_subclass(origin, Mapping): return _validate_mapping(value, python_type, args) elif is_subclass(origin, tuple): return _validate_tuple(value, python_type, args) elif is_subclass(origin, Iterable): return _validate_iterable(value, python_type, args) elif dataclasses.is_dataclass(python_type): return _validate_dataclass(value, python_type)
def redact_passwords(hint: Any, value: Any, redaction: str = "__REDACTED__"): """ Redact password fields in dataclass or TypedDict value. """ if is_dataclass(value): getter, setter = functools.partial(getattr, value), functools.partial( setattr, value) elif isinstance(value, Mapping): getter, setter = value.get, value.__setitem__ else: raise TypeError("type must be dataclass or TypedDict") value_type, _ = split_annotated(strip_optional(hint)) for field_name, field_hint in value_type.__annotations__.items(): field_type, field_annotations = split_annotated( strip_optional(field_hint)) field_value = getter(field_name) if hasattr(field_type, "__annotations__") and ( is_dataclass(field_value) or isinstance(field_value, Mapping)): redact_passwords(field_hint, field_value) elif (field_value is not None and is_subclass(field_type, str) and Password in field_annotations): setter(field_name, redaction)
def _blob_codec_provider(python_type): """ Provides a codec that encodes/decodes a value to/from a SQLite BLOB. Supports the following types: bytes, bytearray. """ if not is_subclass(python_type, (bytes, bytearray)): return @affix_type_hints(localns=locals()) class BlobCodec(SQLiteCodec[python_type]): sql_type = "BLOB" @validate_arguments def encode(self, value: python_type) -> bytes: return bytes(value) @validate_arguments def decode(self, value: bytes) -> python_type: return python_type(value) return BlobCodec()
def _integer_codec_provider(python_type): """ Provides a codec that encodes/decodes a value to/from a SQLite INTEGER. Supports the following types: int, bool. """ if not is_subclass(python_type, int): # includes bool return @affix_type_hints(localns=locals()) class IntegerCodec(SQLiteCodec[python_type]): sql_type = "INTEGER" @validate_arguments def encode(self, value: python_type) -> int: return int(value) @validate_arguments def decode(self, value: int) -> python_type: return python_type(value) return IntegerCodec()
def _real_codec_provider(python_type): """ Provides a codec that encodes/decodes a value to/from a SQLite REAL. Supports the following type: float. """ if not is_subclass(python_type, float): return @affix_type_hints(localns=locals()) class RealCodec(SQLiteCodec[python_type]): sql_type = "REAL" @validate_arguments def encode(self, value: python_type) -> float: return float(value) @validate_arguments def decode(self, value: float) -> python_type: return python_type(value) return RealCodec()
def _typeddict_schema(*, python_type, annotated, origin, args, processor, **_): if is_subclass(python_type, dict) and hasattr(python_type, "__annotations__"): if ref := processor.references.get(python_type): return ref component_schema = _get_component_schema(annotated) if component_schema: name = component_schema.name or processor.component_schema_name( python_type.__name__ ) ref = {"$ref": f"#/components/schemas/{name}"} processor.references[python_type] = ref hints = typing.get_type_hints(python_type, include_extras=True) required = list(python_type.__required_keys__) or None schema = Schema( type="object", properties={key: processor.schema(pytype) for key, pytype in hints.items()}, required=required, additionalProperties=False, **_kwargs(python_type, annotated), ) if component_schema: processor.openapi.components.schemas[name] = schema return ref return schema
def pass_provider(python_type): for codec in _pass_codecs: if is_subclass(python_type, codec.python_type): return codec
def _iterable(codec_type, python_type): decode_type = list if get_origin(python_type) is Iterable else python_type python_type, _ = split_annotated(python_type) if is_subclass(python_type, Iterable) and not is_subclass(python_type, (str, bytes, bytearray)): origin = python_type args = (Any, ) else: origin = get_origin(python_type) if not is_subclass(origin, Iterable) or is_subclass(origin, Mapping): return args = get_args(python_type) if len(args) != 1: raise TypeError("expecting Iterable[T]") item_type = args[0] is_set = is_subclass(origin, set) if codec_type is JSON: item_codec = get_codec(JSON, item_type) _json_type = list[item_codec.json_type] @affix_type_hints(localns=locals()) class _Iterable_JSON(JSON[python_type]): json_type = _json_type def encode(self, value: python_type) -> _json_type: if not isinstance(value, Iterable) or isinstance(value, str): raise EncodeError if is_set: value = sorted(value) # TODO: path return [item_codec.encode(item) for item in value] def decode(self, value: _json_type) -> python_type: if not isinstance(value, list): raise DecodeError # TODO: path return decode_type((item_codec.decode(item) for item in value)) return _Iterable_JSON() if codec_type is String: item_codec = get_codec(String, item_type) @affix_type_hints(localns=locals()) class _Iterable_String(String[python_type]): def encode(self, value: python_type) -> str: if not isinstance(value, Iterable) or isinstance(value, str): raise EncodeError if is_set: value = sorted(value) return _csv_encode((item_codec.encode(item) for item in value)) def decode(self, value: str) -> python_type: # TODO: path return decode_type( (item_codec.decode(item) for item in _csv_decode(value))) return _Iterable_String() if codec_type is Binary: json_codec = get_codec(JSON, python_type) @affix_type_hints(localns=locals()) class _Iterable_Binary(Binary[python_type]): content_type = "application/json" def encode(self, value: python_type) -> bytes: if not isinstance(value, Iterable) or isinstance(value, str): raise EncodeError return json.dumps(json_codec.encode(value)).encode() def decode(self, value: Union[bytes, bytearray]) -> python_type: return json_codec.decode(_s2j(_b2s(value))) return _Iterable_Binary()
def _mapping(codec_type, python_type): python_type, _ = split_annotated(python_type) if is_subclass(python_type, Mapping): origin = Mapping args = [Any, Any] else: origin = get_origin(python_type) if not is_subclass(origin, Mapping) or getattr( python_type, "__annotations__", None): return # not a Mapping args = get_args(python_type) if len(args) != 2: raise TypeError("expecting Mapping[KT, VT]") if codec_type is JSON: key_codec = get_codec(String, args[0]) value_codec = get_codec(JSON, args[1]) _json_type = dict[str, value_codec.json_type] @affix_type_hints(localns=locals()) class _Mapping_JSON(JSON[python_type]): json_type = _json_type def encode(self, value: python_type) -> _json_type: if not isinstance(value, Mapping): raise EncodeError result = {} for k, v in value.items(): key = key_codec.encode(k) with CodecError.path_on_error(key): result[key] = value_codec.encode(v) return result def decode(self, value: _json_type) -> python_type: if not isinstance(value, Mapping): raise DecodeError result = {} for k, v in value.items(): key = key_codec.decode(k) with CodecError.path_on_error(key): result[key] = value_codec.decode(v) return result return _Mapping_JSON() if codec_type is String: json_codec = get_codec(JSON, python_type) @affix_type_hints(localns=locals()) class _Mapping_String(String[python_type]): def encode(self, value: python_type) -> str: if not isinstance(value, Mapping): raise EncodeError return json.dumps(json_codec.encode(value)) def decode(self, value: str) -> python_type: return json_codec.decode(_s2j(value)) return _Mapping_String() if codec_type is Binary: string_codec = get_codec(String, python_type) @affix_type_hints(localns=locals()) class _Mapping_Binary(Binary[python_type]): content_type = "application/json" def encode(self, value: python_type) -> bytes: if not isinstance(value, Mapping): raise EncodeError return string_codec.encode(value).encode() def decode(self, value: Union[bytes, bytearray]) -> python_type: return string_codec.decode(_b2s(value)) return _Mapping_Binary()
async def _handle(self, request: Request) -> Response: if not request.path.startswith(self.path): raise NotFoundError path = request.path[len(self.path):] response = Response() method = request.method.lower() segments = path.split("/") if path else () resource = self.root operation = None for segment in segments: if operation: # cannot have segments after operation name raise NotFoundError try: resource = await _subordinate(resource, segment) except NotFoundError: try: operation = getattr(resource, segment) if not fondat.resource.is_operation(operation): raise NotFoundError except AttributeError: raise NotFoundError if operation: # operation name as segment (@query or @mutation) fondat_op = getattr(operation, "_fondat_operation", None) if not fondat_op or not fondat_op.method == method: raise MethodNotAllowedError else: # no remaining segments; operation name as HTTP method operation = getattr(resource, method, None) if not fondat.resource.is_operation(operation): raise MethodNotAllowedError body = await _decode_body(operation, request) params = {} signature = inspect.signature(operation) hints = typing.get_type_hints(operation, include_extras=True) return_hint = hints.get("return", type(None)) for name, hint in hints.items(): if name == "return": continue required = signature.parameters[ name].default is inspect.Parameter.empty param_in = get_param_in(operation, name, hint) if isinstance(param_in, AsBody) and body is not None: params[name] = body elif isinstance(param_in, InBody) and body is not None: if param_in.name in body: params[name] = body[param_in.name] elif isinstance(param_in, InQuery): if param_in.name in request.query: codec = get_codec(String, hint) try: with DecodeError.path_on_error(param_in.name): params[name] = codec.decode( request.query[param_in.name]) except DecodeError as de: raise BadRequestError from de if name not in params and required: if not is_optional(hint): raise BadRequestError from DecodeError( "required parameter", ["«params»", name]) params[name] = None result = await operation(**params) if not is_subclass(return_hint, Stream): return_codec = get_codec(Binary, return_hint) try: result = BytesStream(return_codec.encode(result), return_codec.content_type) except Exception as e: raise InternalServerError from e response.body = result response.headers["Content-Type"] = response.body.content_type if response.body.content_length is not None: if response.body.content_length == 0: response.status = http.HTTPStatus.NO_CONTENT.value else: response.headers["Content-Length"] = str( response.body.content_length) return response
def typeddict_codec( typeddict: Any, columns: Optional[Sequence[str]] = None, keys: Optional[Mapping[str, str]] = None, codecs: Optional[Mapping[str, Any]] = None, ): """ Return a codec that encodes/decodes a typed dictionary to/from a CSV row. A CSV row is represented as a list of strings. Parameters: • typeddict: TypedDict type to encode/decode • columns: sequence of column names • keys: mapping between columns and dictionary keys • codecs: mapping between columns and codecs The columns parameter specifies the names of CSV columns, and the order they are encoded in a row. If the columns parameter is omitted, then columns will be all dictionary keys, in the order they are defined in the TypedDict. The keys mapping specifies the mapping between columns and dictionary keys. If no mapping for a given column is specified, then the column will map the to dictionary key of the same name. The codecs mapping specifies which codecs are used to encode columns. If no mapping for a given column is provided, then the default codec for its associated field is used. """ if not is_subclass(typeddict, dict) or getattr( typeddict, "__annotations__", None) is None: raise TypeError("typeddict parameter must be a TypedDict") hints = get_type_hints(typeddict, include_extras=True) if columns is None: columns = tuple(key for key in hints.keys()) if keys is None: keys = {key: key for key in hints} keys = {column: key for column, key in keys.items() if column in columns} if codecs is None: codecs = {} codecs = { column: codecs.get(column, get_codec(String, hints[keys[column]])) for column in columns if column in keys } optional_fields = {key for key in keys if is_optional(hints[key])} class TypedDictRowCodec(Codec[typeddict, list[str]]): """Encodes/decodes a dataclass to/from a CSV row.""" def __init__(self, columns: Sequence[str]): self.columns = columns def encode(self, value: typeddict) -> list[str]: """ Encode from TypedDict value to CSV row. If a field value is None, it will be represented in a column as an empty string. """ return [ codecs[column].encode(value.get(keys[column])) for column in self.columns ] def decode(self, values: list[str]) -> typeddict: """ Decode from CSV row to TypedDict value. If a column to decode contains an empty string value, it will be represented as None if the associated field is optional. """ items = {} for column, value in zip(self.columns, values): key = keys.get(column) if not key: # ignore unmapped column continue if value == "" and key in optional_fields: items[key] = None else: with DecodeError.path_on_error(column): items[key] = codecs[column].decode(value) return typeddict(items) return TypedDictRowCodec(columns=columns)