Ejemplo n.º 1
0
def test_get_codec_annotated_codec():
    class CustomCodec(JSON[int]):
        def encode(self, value: int) -> Any:
            return 123

        def encode(self, value: Any) -> int:
            return 123

    json_codec = CustomCodec()
    CustomType = Annotated[int, json_codec]
    assert get_codec(JSON, CustomType) is json_codec
    assert get_codec(String, CustomType) is not json_codec
Ejemplo n.º 2
0
def test_dataclass_json_encode_decode_keyword():
    DC = make_dataclass("DC", [("in_", str), ("inn", str)])
    codec = get_codec(JSON, DC)
    dc = DC(in_="a", inn="b")
    encoded = codec.encode(dc)
    assert encoded == {"in": "a", "inn": "b"}
    assert codec.decode(encoded) == dc
Ejemplo n.º 3
0
def test_set_str_decode_crazy_csv_scenario():
    assert get_codec(String, set[str]).decode('a,"b,c",d,"""e"""') == {
        "a",
        "b,c",
        "d",
        '"e"',
    }
Ejemplo n.º 4
0
def test_typeddict_json_encode_optional_success():
    class TD(TypedDict, total=False):  # https://bugs.python.org/issue42059
        ejc: float
        ejd: bool

    value = dict(ejc=123.45)
    assert get_codec(JSON, TD).encode(value) == value
Ejemplo n.º 5
0
def test_tuple_bytes_decode_success():
    assert get_codec(Binary, tuple[str, str,
                                   str]).decode(b'["a", "b", "c"]') == (
                                       "a",
                                       "b",
                                       "c",
                                   )
Ejemplo n.º 6
0
def test_tuple_ellipsis_str_decode_crazy_csv_scenario():
    assert get_codec(String, tuple[str, ...]).decode('a,"b,c",d,"""e"""') == (
        "a",
        "b,c",
        "d",
        '"e"',
    )
Ejemplo n.º 7
0
def test_tuple_json_decode_success():
    assert get_codec(JSON, tuple[float, float, float]).decode([1.2, 3.4,
                                                               5.6]) == (
                                                                   1.2,
                                                                   3.4,
                                                                   5.6,
                                                               )
Ejemplo n.º 8
0
def test_typeddict_json_decode_optional_success():
    class TD(TypedDict, total=False):  # https://bugs.python.org/issue42059
        djc: int
        djd: str

    value = dict(djc=12345)
    assert get_codec(JSON, TD).decode(value) == value
Ejemplo n.º 9
0
def currency_codec(python_type: Any,
                   prefix: str = "",
                   suffix: str = "",
                   precision: Optional[int] = None):
    """
    Return a codec that encodes/decodes a number as a currency value; optionally encodes with
    fixed-point precision.

    Parameters:
    • python_type: type of the value to be encoded/decoded
    • prefix: currency prefix (e.g. "$")
    • suffix: currency suffix
    • precision: round encoded value to number of digits  [floating point]
    """

    codec = get_codec(String, python_type)

    class CurrencyCodec(String[python_type]):
        def encode(self, value: python_type) -> str:
            return f"{prefix}{_round(value, precision)}{suffix}" if value is not None else ""

        def decode(self, value: str) -> python_type:
            result = codec.decode(value.lstrip(prefix).rstrip(suffix))
            if precision is not None:
                result = round(result, precision)
            return result

    return CurrencyCodec()
Ejemplo n.º 10
0
def json_merge_diff(*, old: Any, new: Any, type: type = Any) -> Any:
    """
    Return a JSON Merge Patch document per RFC 7386, the result of comparing the JSON
    representations of specified old and new values.
    """
    codec = get_codec(JSON, type)
    return _json_merge_diff(codec.encode(old), codec.encode(new))
Ejemplo n.º 11
0
def json_merge_patch(*, value: Any, type: type = Any, patch: Any) -> Any:
    """
    Return the result of applying a JSON Merge Patch document to the JSON representation of
    a specified value, per RFC 7386.

    Parameters:
    • value: value to be patched
    • type: type of value to be patched
    • patch: JSON Merge Patch document to apply to value
    """
    codec = get_codec(JSON, type)
    result = codec.decode(_json_merge_patch(codec.encode(value), patch))
    return result
Ejemplo n.º 12
0
async def test_nested_containers():
    @resource
    class R1:
        @operation
        async def get(self) -> str:
            return "str"

    c1 = container_resource({"r1": R1()})
    c2 = container_resource({"c1": c1})
    info = fondat.openapi.Info(title="title", version="version")
    doc = generate_openapi(resource=c2, info=info)
    validate(doc, fondat.openapi.OpenAPI)
    c1_r1 = doc.paths.get("/c1/r1")
    assert c1_r1 is not None
    assert c1_r1.get is not None
    js = get_codec(JSON, fondat.openapi.OpenAPI).encode(doc)
Ejemplo n.º 13
0
async def _decode_body(operation, request):
    body_type = get_body_type(operation)
    if not body_type:
        return None
    python_type, _ = fondat.types.split_annotated(body_type)
    if is_subclass(python_type, Stream):
        return request.body
    content = await stream_bytes(request.body)
    if len(content) == 0:
        return None  # empty body is no body
    try:
        with DecodeError.path_on_error("«body»"):
            result = get_codec(Binary, body_type).decode(content)
    except DecodeError as de:
        raise BadRequestError from de
    except Exception as e:
        raise InternalServerError from e
    return result
Ejemplo n.º 14
0
async def test_request_body_dataclass():
    @dataclass
    class Model:
        a: int
        b: str

    @resource
    class Resource:
        @operation
        async def post(self, val: Annotated[Model, AsBody]) -> Model:
            return val

    application = Application(Resource())
    m = Model(a=1, b="s")
    codec = get_codec(Binary, Model)
    request = Request(method="POST", path="/", body=BytesStream(codec.encode(m)))
    response = await application(request)
    assert response.status == http.HTTPStatus.OK.value
    assert codec.decode(await body(response)) == m
Ejemplo n.º 15
0
def fixed_codec(python_type: Any, precision: int):
    """
    Return a codec encodes/decodes a number with fixed-point precision.

    Parameter:
    • python_type: type of the value to be encoded/decoded
    • precision: round encoded value to number of digits
    """

    codec = get_codec(String, python_type)

    class FixedCodec(String[python_type]):
        def encode(self, value: Any) -> str:
            return _round(value, precision) if value is not None else ""

        def decode(self, value: str) -> python_type:
            result = codec.decode(value)
            if precision is not None:
                result = round(result, precision)
            return result

    return FixedCodec()
Ejemplo n.º 16
0
def percent_codec(python_type: Any, precision: int):
    """
    Return a codec that encodes/decodes a fractional value as a percentage string with
    fixed-point precision.

    Parameter:
    • python_type: type of the value to be encoded/decoded
    • precision: round encoded value to number of digits
    """

    codec = get_codec(String, python_type)

    class PercentCodec(String[python_type]):
        def encode(self, value: python_type) -> str:
            return f"{_round(value * 100, precision)}%"

        def decode(self, value: str) -> python_type:
            result = codec.decode(value.rstrip("%")) / 100
            if precision is not None:
                result = round(result, precision + 2)
            return result

    return PercentCodec()
Ejemplo n.º 17
0
def test_datacls_json_decode_missing_field():
    DC = make_datacls("DC", [("x", str)])
    with pytest.raises(DecodeError):
        get_codec(JSON, DC).decode({})
Ejemplo n.º 18
0
def directory_resource(
    path: Union[Path, str],
    key_type: type = str,
    value_type: type = Stream,
    extension: Optional[str] = None,
    compress: Any = None,
    writeable: bool = False,
    index: bool = True,
    publish: bool = True,
    policies: Optional[Iterable[Policy]] = None,
) -> type:
    """
    Return a new resource that manages files in a directory.

    Parameters:
    • path: path to directory where files are stored
    • key_type: type of key to identify file
    • value_type: type of value stored in each file
    • extenson: filename extension to append (including dot)
    • compress: algorithm to compress and decompress file content
    • writeable: can files be written or deleted
    • index: provide index of files with get method
    • publish: publish the operation in documentation
    • policies: Security requirements to apply to all operations

    Compression algorithm is any object or module that exposes callable "compress" and
    "decompress" attributes. Examples: bz2, gzip, lzma, zlib. Compression is not supported for
    value_type of Stream.

    The index parameter can be one of the following:
    • key_type: identifies a file in the directory to provide as the index
    • None: generates a paginated list of file keys as the index
    """

    _path = (Path(path) if isinstance(path, str) else path).expanduser()

    if not _path.is_dir():
        raise FileNotFoundError(f"directory not found: {_path}")

    codec = get_codec(String, key_type)

    Page = make_page_dataclass("Page", key_type)
    FileResource = _file_resource_class(value_type, compress, writeable, publish, policies)

    @resource
    class DirectoryResource:
        def __getitem__(self, key: key_type) -> FileResource:
            return FileResource(
                _path.joinpath(
                    f"{quote(codec.encode(key), safe='')}{extension if extension else ''}"
                )
            )

        if index:

            @operation(publish=publish, policies=policies)
            async def get(
                self, limit: Optional[int] = None, cursor: Optional[bytes] = None
            ) -> Page:
                """Return paginated list of file keys."""
                limit = _limit(limit)
                if cursor is not None:
                    cursor = cursor.decode()
                try:
                    if not extension:
                        names = sorted(
                            entry.name for entry in _path.iterdir() if entry.is_file()
                        )
                    else:
                        names = sorted(
                            entry.name[: -len(extension)]
                            for entry in _path.iterdir()
                            if entry.is_file() and entry.name.endswith(extension)
                        )
                except FileNotFoundError as fnfe:
                    raise InternalServerError from fnfe
                page = Page(items=[], cursor=None, remaining=0)
                for (counter, name) in enumerate(names, 1):
                    if cursor is not None:
                        if name <= cursor:
                            continue
                        cursor = None
                    try:
                        page.items.append(codec.decode(unquote(name)))
                    except ValueError:
                        continue  # ignore name that cannot be decoded
                    if len(page.items) == limit and counter < len(names):
                        page.cursor = name.encode()
                        page.remaining = len(names) - counter
                        break
                return page

    affix_type_hints(DirectoryResource, localns=locals())
    DirectoryResource.__qualname__ = "DirectoryResource"

    return DirectoryResource()
Ejemplo n.º 19
0
def _file_resource_class(
    value_type: type,
    compress: Any,
    writeable: bool,
    publish: bool,
    policies: Iterable[Policy],
):

    if value_type is Stream:
        if compress is not None:
            raise TypeError("file resources does not support compression of streams")
        return _stream_resource_class(writeable, publish, policies)

    codec = get_codec(Binary, value_type)

    @resource
    class FileResource:
        def __init__(self, path: Path):
            self.path = path

        @operation(publish=publish, policies=policies)
        async def get(self) -> value_type:
            """Read resource."""
            try:
                with self.path.open("rb") as file:
                    content = file.read()
                if compress:
                    content = compress.decompress(content)
                return codec.decode(content)
            except FileNotFoundError:
                raise NotFoundError
            except Exception as e:
                raise InternalServerError from e

        if writeable:

            @operation(publish=publish, policies=policies)
            async def put(self, value: Annotated[value_type, AsBody]):
                """Write resource."""
                content = codec.encode(value)
                if compress:
                    content = compress.compress(content)
                tmp = _tmp_path(self.path)
                try:
                    with tmp.open("xb") as file:
                        file.write(content)
                    tmp.replace(self.path)
                except Exception as e:
                    raise InternalServerError from e

            @operation(publish=publish, policies=policies)
            async def delete(self):
                """Delete resource."""
                try:
                    self.path.unlink()
                except FileNotFoundError:
                    raise NotFoundError

    affix_type_hints(FileResource, localns=locals())
    FileResource.__qualname__ = "FileResource"
    return FileResource
Ejemplo n.º 20
0
import pytest

import asyncio
import fondat.codec
import fondat.monitoring

from datetime import datetime, timedelta, timezone
from fondat.codec import String, get_codec
from fondat.monitoring import Measurement

_tags = {"name": "test"}

_dt = lambda s: get_codec(String, datetime).decode(s)

_now = lambda: datetime.now(tz=timezone.utc)


@pytest.mark.asyncio
async def test_simple_counter_type():
    simple = fondat.monitoring.SimpleMonitor()
    _type = "counter"
    simple.track("test", _type, _tags, 60, 60)
    await simple.record(
        Measurement(_tags, _dt("2018-12-01T00:00:00Z"), _type, 1))
    await simple.record(
        Measurement(_tags, _dt("2018-12-01T00:00:10.100000Z"), _type, 2))
    await simple.record(
        Measurement(_tags, _dt("2018-12-01T00:00:20.200000Z"), _type, 3))
    await simple.record(
        Measurement(_tags, _dt("2018-12-01T00:00:30.300000Z"), _type, 4))
    await simple.record(
Ejemplo n.º 21
0
def test_dataclass_json_decode_error():
    DC = make_dataclass("DC", [("djx", str)])
    with pytest.raises(DecodeError):
        get_codec(JSON, DC).decode({"djx": False})
Ejemplo n.º 22
0
def test_iterable_string_decode():
    assert get_codec(String, Iterable[int]).decode("1,2,3") == [1, 2, 3]
Ejemplo n.º 23
0
def test_iterable_json_decode():
    assert get_codec(JSON, Iterable[int]).decode([1, 2, 3]) == [1, 2, 3]
Ejemplo n.º 24
0
def test_dataclass_json_decode_invalid_type():
    DC = make_dataclass("DC", [("djx", str)])
    with pytest.raises(DecodeError):
        get_codec(JSON, DC).decode("not_a_dict")
Ejemplo n.º 25
0
def table_resource_class(table: Table,
                         row_resource_type: Optional[type] = None) -> type:
    """
    Return a base class for a table resource.

    Parameters:
    • table: table for which table resource is based
    • row_resource_type: type to instantiate for table row resource  [implict]
    """

    if row_resource_type is None:
        row_resource_type = row_resource_class(table)

    @datacls
    class Page:
        items: list[table.schema]
        cursor: Optional[bytes] = None

    fondat.types.affix_type_hints(Page, localns=locals())

    dc_codec = get_codec(JSON, table.schema)
    pk_type = table.columns[table.pk]
    pk_codec = get_codec(JSON, pk_type)
    cursor_codec = get_codec(Binary, pk_type)

    class TableResource:
        """Table resource."""
        def __getitem__(self, pk: pk_type) -> row_resource_type:
            return row_resource_type(pk)

        def __init__(self):
            self.table = table

        @operation
        async def get(
            self,
            limit: Annotated[int, MinValue(1)] = 1000,
            cursor: Optional[bytes] = None,
        ) -> Page:
            """Get paginated list of rows, ordered by primary key."""
            if cursor is not None:
                where = Expression(f"{table.pk} > ",
                                   Param(cursor_codec.decode(cursor), pk_type))
            else:
                where = None
            async with table.database.transaction():
                items = [
                    table.schema(**result) async for result in table.select(
                        order_by=table.pk, limit=limit, where=where)
                ]
                cursor = (cursor_codec.encode(getattr(items[-1], table.pk))
                          if len(items) == limit else None)
            return Page(items=items, cursor=cursor)

        @operation
        async def patch(self, body: Iterable[dict[str, Any]]):
            """
            Insert and/or modify multiple rows in a single transaction.

            Patch body is an iterable of JSON Merge Patch documents; each document must
            contain the primary key of the row to patch.
            """
            async with table.database.transaction():
                for doc in body:
                    pk = doc.get(table.pk)
                    if pk is None:
                        raise ValidationError("missing primary key")
                    row = row_resource_type(pk_codec.decode(pk))
                    try:
                        old = await row._read()
                        try:
                            new = json_merge_patch(value=old,
                                                   type=table.schema,
                                                   patch=doc)
                        except DecodeError as de:
                            raise BadRequestError from de
                        await row._validate(new)
                        await row._update(old, new)
                    except NotFoundError:
                        new = dc_codec.decode(doc)
                        await row._validate(new)
                        await row._insert(new)

        @query
        async def find_pks(
                self, pks: set[table.columns[table.pk]]) -> list[table.schema]:
            """Return rows corresponding to the specified set of primary keys."""
            if not pks:
                return []
            async with table.database.transaction():
                return [
                    table.schema(**row)
                    async for row in table.select(where=Expression(
                        f"{table.pk} IN (",
                        Expression.join((Param(pk, table.columns[table.pk])
                                         for pk in pks), ", "),
                        ")",
                    ))
                ]

    fondat.types.affix_type_hints(TableResource, localns=locals())
    return TableResource
Ejemplo n.º 26
0
def test_typeddict_json_decode_success():
    TD = TypedDict("TD", dict(dja=float, djb=bool))
    value = dict(dja=802.11, djb=True)
    assert get_codec(JSON, TD).decode(value) == value
Ejemplo n.º 27
0
            raise NotFoundError
        hints = typing.get_type_hints(attr)
        if not fondat.resource.is_resource(hints.get("return")):
            raise NotFoundError
        if asyncio.iscoroutinefunction(attr):
            return await attr()
        else:
            return attr()

    # resource[item]
    try:
        hints = typing.get_type_hints(resource.__getitem__)
        name = next(iter(hints))
        if name == "return":
            raise NotFoundError
        item = resource[get_codec(String, hints[name]).decode(segment)]
        if not fondat.resource.is_resource(item):
            raise NotFoundError
        return item
    except:
        raise NotFoundError


class InQuery:
    """
    Annotation to indicate an operation parameter is expected in a request query string
    parameter.  This is the default annotation for query operation parameters.

    If the InQuery class is used as the annotation instead of an InQuery(name=...) instance,
    then the name of the query string parameter will be the name of the operation parameter.
Ejemplo n.º 28
0
def test_any_dataclass_json_codec_success():
    DC = make_dataclass("DC", [("i", int), ("s", str)])
    dc = DC(1, "a")
    encoded = get_codec(JSON, Any).encode(dc)
    decoded = get_codec(JSON, Any).decode(encoded)
    assert DC(**decoded) == dc
Ejemplo n.º 29
0
 async def _handle(self, request: Request) -> Response:
     if not request.path.startswith(self.path):
         raise NotFoundError
     path = request.path[len(self.path):]
     response = Response()
     method = request.method.lower()
     segments = path.split("/") if path else ()
     resource = self.root
     operation = None
     for segment in segments:
         if operation:  # cannot have segments after operation name
             raise NotFoundError
         try:
             resource = await _subordinate(resource, segment)
         except NotFoundError:
             try:
                 operation = getattr(resource, segment)
                 if not fondat.resource.is_operation(operation):
                     raise NotFoundError
             except AttributeError:
                 raise NotFoundError
     if operation:  # operation name as segment (@query or @mutation)
         fondat_op = getattr(operation, "_fondat_operation", None)
         if not fondat_op or not fondat_op.method == method:
             raise MethodNotAllowedError
     else:  # no remaining segments; operation name as HTTP method
         operation = getattr(resource, method, None)
         if not fondat.resource.is_operation(operation):
             raise MethodNotAllowedError
     body = await _decode_body(operation, request)
     params = {}
     signature = inspect.signature(operation)
     hints = typing.get_type_hints(operation, include_extras=True)
     return_hint = hints.get("return", type(None))
     for name, hint in hints.items():
         if name == "return":
             continue
         required = signature.parameters[
             name].default is inspect.Parameter.empty
         param_in = get_param_in(operation, name, hint)
         if isinstance(param_in, AsBody) and body is not None:
             params[name] = body
         elif isinstance(param_in, InBody) and body is not None:
             if param_in.name in body:
                 params[name] = body[param_in.name]
         elif isinstance(param_in, InQuery):
             if param_in.name in request.query:
                 codec = get_codec(String, hint)
                 try:
                     with DecodeError.path_on_error(param_in.name):
                         params[name] = codec.decode(
                             request.query[param_in.name])
                 except DecodeError as de:
                     raise BadRequestError from de
         if name not in params and required:
             if not is_optional(hint):
                 raise BadRequestError from DecodeError(
                     "required parameter", ["«params»", name])
             params[name] = None
     result = await operation(**params)
     if not is_subclass(return_hint, Stream):
         return_codec = get_codec(Binary, return_hint)
         try:
             result = BytesStream(return_codec.encode(result),
                                  return_codec.content_type)
         except Exception as e:
             raise InternalServerError from e
     response.body = result
     response.headers["Content-Type"] = response.body.content_type
     if response.body.content_length is not None:
         if response.body.content_length == 0:
             response.status = http.HTTPStatus.NO_CONTENT.value
         else:
             response.headers["Content-Length"] = str(
                 response.body.content_length)
     return response
Ejemplo n.º 30
0
def test_any_dataclass_binary_codec_success():
    DC = make_dataclass("DC", [("i", int), ("s", str)])
    dc = DC(1, "a")
    encoded = get_codec(Binary, Any).encode(dc)
    decoded = get_codec(JSON, Any).decode(json.loads(encoded.decode()))
    assert DC(**decoded) == dc