def test_subset_dataclass_optional_subset(): Foo = make_datacls("Foo", (("a", int), ("b", str), ("c", float))) optional = {"a", "b"} Bar = derive_datacls("Bar", Foo, optional=optional) for f in fields(Bar): assert (f.name in optional and is_optional(f.type)) or not is_optional(f.type)
def test_is_optional(): assert is_optional(Optional[str]) assert is_optional(Annotated[Optional[str], ""]) assert is_optional(Annotated[Union[str, Annotated[Optional[int], ""]], ""]) assert not is_optional(str) assert not is_optional(Annotated[str, ""]) assert not is_optional(Annotated[Union[str, Annotated[int, ""]], ""])
def decode(self, value: Any) -> python_type: if not isinstance(value, dict): raise DecodeError kwargs = {} for field in fields: codec = get_codec(JSON, field.type) try: with CodecError.path_on_error(field.name): kwargs[field.name] = codec.decode(value[_dc_kw.get( field.name, field.name)]) except KeyError: if (is_optional(field.type) and field.default is dataclasses.MISSING and field.default_factory is dataclasses.MISSING): kwargs[field.name] = None try: return python_type(**kwargs) except Exception as e: raise DecodeError from e
def __init__(self, **kwargs): hints = get_type_hints(self, include_extras=True) for name in kwargs: if name not in fields: raise TypeError(f"unexpected keyword argument: '{name}'") for field in fields.values(): if (value := kwargs.get(field.name, MISSING)) is MISSING: if field.default_factory is not dataclasses.MISSING: value = field.default_factory() elif field.default is not dataclasses.MISSING: value = field.default elif is_optional(hints[field.name]): value = None else: raise TypeError( f"missing required keyword argument: '{field.name}'") setattr(self, field.name, value)
def _dataclass_schema(*, python_type, annotated, origin, args, processor, **_): if dataclasses.is_dataclass(python_type): if ref := processor.references.get(python_type): return ref component_schema = _get_component_schema(annotated) if component_schema: name = component_schema.name or processor.component_schema_name( python_type.__name__ ) ref = {"$ref": f"#/components/schemas/{name}"} processor.references[python_type] = ref hints = typing.get_type_hints(python_type, include_extras=True) required = { f.name for f in dataclasses.fields(python_type) if f.default is dataclasses.MISSING and f.default_factory is dataclasses.MISSING and not is_optional(hints[f.name]) } properties = { _dc_kw.get(key, key): processor.schema(pytype) for key, pytype in hints.items() } for key, schema in properties.items(): if key not in required and not fondat.validation.is_valid(schema, Reference): schema.nullable = None schema = Schema( type="object", properties=properties, required=required or None, additionalProperties=False, **_kwargs(python_type, annotated), ) if component_schema: processor.openapi.components.schemas[name] = schema return ref return schema
async def _handle(self, request: Request) -> Response: if not request.path.startswith(self.path): raise NotFoundError path = request.path[len(self.path):] response = Response() method = request.method.lower() segments = path.split("/") if path else () resource = self.root operation = None for segment in segments: if operation: # cannot have segments after operation name raise NotFoundError try: resource = await _subordinate(resource, segment) except NotFoundError: try: operation = getattr(resource, segment) if not fondat.resource.is_operation(operation): raise NotFoundError except AttributeError: raise NotFoundError if operation: # operation name as segment (@query or @mutation) fondat_op = getattr(operation, "_fondat_operation", None) if not fondat_op or not fondat_op.method == method: raise MethodNotAllowedError else: # no remaining segments; operation name as HTTP method operation = getattr(resource, method, None) if not fondat.resource.is_operation(operation): raise MethodNotAllowedError body = await _decode_body(operation, request) params = {} signature = inspect.signature(operation) hints = typing.get_type_hints(operation, include_extras=True) return_hint = hints.get("return", type(None)) for name, hint in hints.items(): if name == "return": continue required = signature.parameters[ name].default is inspect.Parameter.empty param_in = get_param_in(operation, name, hint) if isinstance(param_in, AsBody) and body is not None: params[name] = body elif isinstance(param_in, InBody) and body is not None: if param_in.name in body: params[name] = body[param_in.name] elif isinstance(param_in, InQuery): if param_in.name in request.query: codec = get_codec(String, hint) try: with DecodeError.path_on_error(param_in.name): params[name] = codec.decode( request.query[param_in.name]) except DecodeError as de: raise BadRequestError from de if name not in params and required: if not is_optional(hint): raise BadRequestError from DecodeError( "required parameter", ["«params»", name]) params[name] = None result = await operation(**params) if not is_subclass(return_hint, Stream): return_codec = get_codec(Binary, return_hint) try: result = BytesStream(return_codec.encode(result), return_codec.content_type) except Exception as e: raise InternalServerError from e response.body = result response.headers["Content-Type"] = response.body.content_type if response.body.content_length is not None: if response.body.content_length == 0: response.status = http.HTTPStatus.NO_CONTENT.value else: response.headers["Content-Length"] = str( response.body.content_length) return response
def test_derive_datacls_optional_true(): Foo = make_datacls("Foo", (("a", int), ("b", str), ("c", float))) Bar = derive_datacls("Bar", Foo, optional=True) for field in fields(Bar): assert is_optional(field.type)
def typeddict_codec( typeddict: Any, columns: Optional[Sequence[str]] = None, keys: Optional[Mapping[str, str]] = None, codecs: Optional[Mapping[str, Any]] = None, ): """ Return a codec that encodes/decodes a typed dictionary to/from a CSV row. A CSV row is represented as a list of strings. Parameters: • typeddict: TypedDict type to encode/decode • columns: sequence of column names • keys: mapping between columns and dictionary keys • codecs: mapping between columns and codecs The columns parameter specifies the names of CSV columns, and the order they are encoded in a row. If the columns parameter is omitted, then columns will be all dictionary keys, in the order they are defined in the TypedDict. The keys mapping specifies the mapping between columns and dictionary keys. If no mapping for a given column is specified, then the column will map the to dictionary key of the same name. The codecs mapping specifies which codecs are used to encode columns. If no mapping for a given column is provided, then the default codec for its associated field is used. """ if not is_subclass(typeddict, dict) or getattr( typeddict, "__annotations__", None) is None: raise TypeError("typeddict parameter must be a TypedDict") hints = get_type_hints(typeddict, include_extras=True) if columns is None: columns = tuple(key for key in hints.keys()) if keys is None: keys = {key: key for key in hints} keys = {column: key for column, key in keys.items() if column in columns} if codecs is None: codecs = {} codecs = { column: codecs.get(column, get_codec(String, hints[keys[column]])) for column in columns if column in keys } optional_fields = {key for key in keys if is_optional(hints[key])} class TypedDictRowCodec(Codec[typeddict, list[str]]): """Encodes/decodes a dataclass to/from a CSV row.""" def __init__(self, columns: Sequence[str]): self.columns = columns def encode(self, value: typeddict) -> list[str]: """ Encode from TypedDict value to CSV row. If a field value is None, it will be represented in a column as an empty string. """ return [ codecs[column].encode(value.get(keys[column])) for column in self.columns ] def decode(self, values: list[str]) -> typeddict: """ Decode from CSV row to TypedDict value. If a column to decode contains an empty string value, it will be represented as None if the associated field is optional. """ items = {} for column, value in zip(self.columns, values): key = keys.get(column) if not key: # ignore unmapped column continue if value == "" and key in optional_fields: items[key] = None else: with DecodeError.path_on_error(column): items[key] = codecs[column].decode(value) return typeddict(items) return TypedDictRowCodec(columns=columns)