Beispiel #1
0
def _process(typeinfo: typecheck.Type, val: typing.Any, make: bool) -> typing.Any:
    if val is None:
        return None
    elif make and hasattr(typeinfo, "from_state"):
        return typeinfo.from_state(val)
    elif not make and hasattr(val, "get_state"):
        return val.get_state()

    typename = str(typeinfo)

    if typename.startswith("typing.List"):
        T = typecheck.sequence_type(typeinfo)
        return [_process(T, x, make) for x in val]
    elif typename.startswith("typing.Tuple"):
        Ts = typecheck.tuple_types(typeinfo)
        if len(Ts) != len(val):
            raise ValueError("Invalid data. Expected {}, got {}.".format(Ts, val))
        return tuple(
            _process(T, x, make) for T, x in zip(Ts, val)
        )
    elif typename.startswith("typing.Dict"):
        k_cls, v_cls = typecheck.mapping_types(typeinfo)
        return {
            _process(k_cls, k, make): _process(v_cls, v, make)
            for k, v in val.items()
        }
    elif typename.startswith("typing.Any"):
        # FIXME: Remove this when we remove flow.metadata
        assert isinstance(val, (int, str, bool, bytes))
        return val
    else:
        return typeinfo(val)
def _process(typeinfo: typecheck.Type, val: typing.Any,
             make: bool) -> typing.Any:
    if val is None:
        return None
    elif make and hasattr(typeinfo, "from_state"):
        return typeinfo.from_state(val)
    elif not make and hasattr(val, "get_state"):
        return val.get_state()

    typename = str(typeinfo)

    if typename.startswith("typing.List"):
        T = typecheck.sequence_type(typeinfo)
        return [_process(T, x, make) for x in val]
    elif typename.startswith("typing.Tuple"):
        Ts = typecheck.tuple_types(typeinfo)
        if len(Ts) != len(val):
            raise ValueError("Invalid data. Expected {}, got {}.".format(
                Ts, val))
        return tuple(_process(T, x, make) for T, x in zip(Ts, val))
    elif typename.startswith("typing.Dict"):
        k_cls, v_cls = typecheck.mapping_types(typeinfo)
        return {
            _process(k_cls, k, make): _process(v_cls, v, make)
            for k, v in val.items()
        }
    elif typename.startswith("typing.Any"):
        # FIXME: Remove this when we remove flow.metadata
        assert isinstance(val, (int, str, bool, bytes))
        return val
    else:
        return typeinfo(val)
Beispiel #3
0
def _process(typeinfo: typecheck.Type, val: typing.Any,
             make: bool) -> typing.Any:
    if val is None:
        return None
    elif make and hasattr(typeinfo, "from_state"):
        return typeinfo.from_state(val)
    elif not make and hasattr(val, "get_state"):
        return val.get_state()

    typename = str(typeinfo)

    if typename.startswith("typing.List"):
        T = typecheck.sequence_type(typeinfo)
        return [_process(T, x, make) for x in val]
    elif typename.startswith("typing.Tuple"):
        Ts = typecheck.tuple_types(typeinfo)
        if len(Ts) != len(val):
            raise ValueError("Invalid data. Expected {}, got {}.".format(
                Ts, val))
        return tuple(_process(T, x, make) for T, x in zip(Ts, val))
    elif typename.startswith("typing.Dict"):
        k_cls, v_cls = typecheck.mapping_types(typeinfo)
        return {
            _process(k_cls, k, make): _process(v_cls, v, make)
            for k, v in val.items()
        }
    elif typename.startswith("typing.Any"):
        # This requires a bit of explanation. We can't import our IO layer here,
        # because it causes a circular import. Rather than restructuring the
        # code for this, we use JSON serialization, which has similar primitive
        # type restrictions as tnetstring, to check for conformance.
        try:
            json.dumps(val)
        except TypeError:
            raise ValueError(f"Data not serializable: {val}")
        return val
    else:
        return typeinfo(val)
Beispiel #4
0
def _process(typeinfo: typecheck.Type, val: typing.Any, make: bool) -> typing.Any:
    if val is None:
        return None
    elif make and hasattr(typeinfo, "from_state"):
        return typeinfo.from_state(val)
    elif not make and hasattr(val, "get_state"):
        return val.get_state()

    typename = str(typeinfo)

    if typename.startswith("typing.List"):
        T = typecheck.sequence_type(typeinfo)
        return [_process(T, x, make) for x in val]
    elif typename.startswith("typing.Tuple"):
        Ts = typecheck.tuple_types(typeinfo)
        if len(Ts) != len(val):
            raise ValueError("Invalid data. Expected {}, got {}.".format(Ts, val))
        return tuple(
            _process(T, x, make) for T, x in zip(Ts, val)
        )
    elif typename.startswith("typing.Dict"):
        k_cls, v_cls = typecheck.mapping_types(typeinfo)
        return {
            _process(k_cls, k, make): _process(v_cls, v, make)
            for k, v in val.items()
        }
    elif typename.startswith("typing.Any"):
        # This requires a bit of explanation. We can't import our IO layer here,
        # because it causes a circular import. Rather than restructuring the
        # code for this, we use JSON serialization, which has similar primitive
        # type restrictions as tnetstring, to check for conformance.
        try:
            json.dumps(val)
        except TypeError:
            raise ValueError(f"Data not serializable: {val}")
        return val
    else:
        return typeinfo(val)