Exemple #1
0
    def test_generic_bases(self):
        class MyClass(List[int], Mapping[str, List[int]]):
            pass

        self.assertEqual(get_generic_bases(MyClass),
                         (List[int], Mapping[str, List[int]]))
        self.assertEqual(get_generic_bases(int), ())
Exemple #2
0
def _get_generic_type_var_dict(controller: Type[ControllerBase]) -> Dict:
    """

    Args:
        controller:

    Returns:

    """
    generic_values = []

    generic_bases = typing_inspect.get_generic_bases(controller)

    for generic_base in generic_bases:
        generic_values.extend(typing_inspect.get_args(generic_base))

    generic_type_vars = []

    base_generic_bases = typing_inspect.get_generic_bases(controller.__base__)

    type_var_generic_bases = list(
        filter(typing_inspect.is_generic_type, base_generic_bases))

    for type_var_generic_base in type_var_generic_bases:
        generic_type_vars.extend(
            typing_inspect.get_args(type_var_generic_base))

    return {k: v for k, v in zip(generic_type_vars, generic_values)}
def get_argument_to_typevar(cls: Type, generic_base_class: Type,
                            typevar: TypeVar):
    """
    Gets the argument given to a type variable parameterising
    a generic base class in a particular sub-class of that base.

    :param cls:                 The sub-class specifying the argument.
    :param generic_base_class:  The generic base-class specifying the type variable.
    :param typevar:             The type variable to get the argument for.
    :return:                    The argument to the type variable.
    """
    # Check the arguments
    typevar_index: int = check_args(cls, generic_base_class, typevar)

    # Get the decendency path from derived to base class
    bases = [cls]
    while bases[-1] is not generic_base_class:
        # Keep track of if we found a base
        base_found = False

        # Try and find a generic base
        for base in typing_inspect.get_generic_bases(bases[-1]):
            if issubclass(base, generic_base_class):
                bases.append(base)
                base_found = True
                break

        # If we didn't find a generic base, find a non-generic base
        if not base_found:
            for base in bases[-1].__bases__:
                if issubclass(base, generic_base_class):
                    bases.append(base)
                    break

    # Search the dependency path for the type variable's final argument
    arg = None
    while len(bases) > 1:
        # Get the arguments to the generic base class
        args = typing_inspect.get_args(bases[-2])

        # If no arguments are given, the signature stays the same
        if len(args) == 0:
            bases = bases[:-2] + bases[-1:]
            continue

        # Get the argument to this typevar
        arg = args[typevar_index]

        # If it's another type variable, keep looking for the argument to this
        # type variable
        if typing_inspect.is_typevar(arg):
            typevar_index = typing_inspect.get_parameters(bases[-2]).index(arg)
            bases = bases[:-1]
            continue

        # Otherwise return the argument to the type variable
        return arg

    return arg
Exemple #4
0
def _maybe_node_for_dict(
    typ: Type[iface.IType],
    overrides: OverridesT,
    memo: MemoType,
    forward_refs: ForwardRefs,
    supported_type=frozenset({
        dict,
        collections.abc.Mapping,
        pyt.PMap,
    }),
    supported_origin=frozenset({
        Dict,
        dict,
        collections.abc.Mapping,
        pyt.PMap,
    })
) -> Tuple[Optional[schema.nodes.SchemaNode], MemoType, ForwardRefs]:
    """ This is mainly for cases when a user has manually
    specified that a field should be a dictionary, rather than a
    strict structure, possibly due to dynamic nature of keys
    (for instance, python logging settings that have an infinite
    set of possible attributes).
    """
    rv = None
    # This is a hack for Python 3.9
    if insp.is_generic_type(typ):
        generic_bases = [get_origin_39(x) for x in insp.get_generic_bases(typ)]
    else:
        generic_bases = []

    typ = dict if is_39_deprecated_dict(typ) else typ

    if typ in supported_type or get_origin_39(
            typ) in supported_origin or are_generic_bases_match(
                generic_bases, supported_origin):
        schema_node_type = schema.nodes.PMapSchema if is_pmap(
            typ) else schema.nodes.SchemaNode

        if generic_bases:
            # python 3.9 args
            key_type, value_type = typ.__args__
        else:
            try:
                key_type, value_type = insp.get_args(typ)
            except ValueError:
                # Mapping doesn't provide key/value types
                key_type, value_type = Any, Any

        key_node, memo, forward_refs = decide_node_type(
            key_type, overrides, memo, forward_refs)
        value_node, memo, forward_refs = decide_node_type(
            value_type, overrides, memo, forward_refs)
        mapping_type = schema.types.TypedMapping(key_node=key_node,
                                                 value_node=value_node)
        rv = schema_node_type(mapping_type)
    return rv, memo, forward_refs
Exemple #5
0
def test_type_inspect():
    ty = typing
    basic_type = int
    list_type = ty.List[int]
    dict_type = ty.Dict[int, str]
    tuple_type = ty.Tuple[ty.Dict[int, str], str, ty.List[str]]
    union_type = ty.Union[list_type, dict_type, None]

    type_a = ty.TypeVar('TA')
    type_b = ty.TypeVar('TB')
    gen_list_type = ty.List[type_a]
    gen_dict_type = ty.Dict[type_a, type_b]
    gen_tuple_type = ty.Tuple[type_a, type_b]
    test_types = [
        basic_type, list_type, dict_type, tuple_type, union_type,
        gen_list_type, gen_dict_type, gen_tuple_type
    ]

    print("ti.get_origin:\n")
    for t in test_types:
        print("    ", ti.get_origin(t))

    print("ti.get_parameters:\n")
    for t in test_types:
        print("    ", ti.get_parameters(t))

    print("ti.get_args:\n")
    for t in test_types:
        print("    ", ti.get_args(t))

    print("ti.get_generic_type:\n")
    for t in test_types:
        print("    ", ti.get_generic_type(t))

    print("ti.get_generic_bases:\n")
    for t in test_types:
        print("    ", ti.get_generic_bases(t))

    print("ti.typed_dict_keys:\n")
    for t in test_types:
        print("    ", ti.get_generic_bases(t))
Exemple #6
0
    def _get_generic_type(self) -> Type[GenericIdModel]:
        """
        Get generic type of inherited BaseRepository:

        >>> class TransactionRepo(AiopgRepository[Transaction]):
        ...     table = transactions_table
        ... # doctest: +SKIP
        >>> assert TransactionRepo().__get_generic_type() is Transaction # doctest: +SKIP
        """
        return cast(
            Type[GenericIdModel],
            typing_inspect.get_args(
                typing_inspect.get_generic_bases(self)[-1])[0],
        )
Exemple #7
0
    def with_options(cls, **options) -> typing.Callable[[], "OptionsBagMixin"]:
        """Build an options bag and return an instance partial.

        Utilizes :mod:`typing_inspect` to determine the value of the
        :obj:`T_OptionsBag` generic, and instantialize it with the given
        keyword arguments.

        Returns:
            A :func:`functools.partial` object with **options** passed.

        """
        bases = typing_inspect.get_generic_bases(cls)
        for base in bases:
            if base.__class__.__name__ == "_GenericAlias":
                args = typing_inspect.get_args(base)
                if not args:
                    raise RuntimeError(
                        "Generic backend base not passed options bag")
                return functools.partial(cls, options=args[0](**options))
        raise RuntimeError("Invalid backend bases")
Exemple #8
0
def issubclass(cls, classinfo):
    if classinfo is dataclass:
        return original_isinstance(cls, type) and is_dataclass(cls)

    if cls is dataclass:
        return issubclass(object, classinfo)

    if original_isinstance(cls, GenericMeta):
        origin = get_origin(cls)
        bases = get_generic_bases(origin) or (origin, )
        return classinfo in bases

    classinfo_origin = get_origin(classinfo)
    if classinfo_origin is None and original_isinstance(
            classinfo, GenericMeta):
        classinfo_origin = classinfo
    if classinfo_origin in issubclass_generic_funcs:
        return issubclass_generic_funcs[classinfo_origin](cls, classinfo)

    if not original_isinstance(cls, type):
        return False

    return original_issubclass(cls, classinfo)
Exemple #9
0
def _my_typevars(obj, cls):
    """Compute the type arguments of `obj` (a class) relative to `cls` (a
    superclass).

    Example:

        from typing import Generic, TypeVar, List
        A, B, C = TypeVar('A'), TypeVar('B'), TypeVar('C')
        class Super(Generic[A, B, C]):
            pass
        class Sub(Super[int, str, float], List[int]):
            pass

        _my_typevars(Sub, Super) => (int, str, float)
        _my_typevars(Sub, list) => (int,)
    """

    while not isclass(obj):
        obj = type(obj)
    for base in get_generic_bases(obj):
        if get_origin(base) == cls:
            return get_args(base, evaluate=True)
    raise TypeError(
        f"Couldn't find type variables for {obj} relative to {cls}")
Exemple #10
0
def _get_generic_types(
    instance: _tp.Any,
    count: _tp.Union[int, _tp.Set[int]],
    *,
    module_from: _tp.Any,
    hint: str = ""
) -> _tp.Tuple[_tp.Type[_tp.Any], ...]:
    types = _typing_inspect.get_args(_typing_inspect.get_generic_type(instance))

    if not types:
        types = _typing_inspect.get_args(_typing_inspect.get_generic_bases(instance)[0])

    globalns = _sys.modules[module_from.__module__].__dict__

    _eval_type = _tp._eval_type  # type: ignore
    types = tuple(_eval_type(i, globalns, None) for i in types)

    if isinstance(count, int):
        count = {count}

    if count != {-1} and len(types) not in count or any(_typing_inspect.is_typevar(i) for i in types):
        raise TypeError(f"{instance.__class__.__name__} generic was not properly parameterized{hint}: {types}")

    return types
Exemple #11
0
def decode(obj: Any, hint: Type, hint_args: Any = ()) -> Any:
    ftype = hint
    origin = get_origin(ftype)
    bases = get_generic_bases(ftype)
    targs = get_args(ftype)

    if hint_args and any(is_typevar(t) for t in targs):
        targs = hint_args

    if ftype in PRIMITIVES:
        return obj

    if is_optional_type(ftype):
        if obj is None:
            return None

        real_args = [t for t in targs if t is not type(None)]
        if len(real_args) > 1:
            raise NotImplementedError(f"can't decode union types ({real_args})")
        ftype = real_args[0]
        origin = get_origin(ftype)
    elif is_union_type(ftype):
        raise NotImplementedError(f"can't decode union types ({targs})")

    if is_primitive(ftype):
        return ftype(obj)

    if is_datatype(ftype) or (origin and is_dataclass(origin)):
        if not isinstance(obj, dict):
            raise TypeError(f"invalid data {obj!r} for {ftype}")

        kwargs: Dict[str, Any] = {}

        namespace = sys.modules[ftype.__module__].__dict__
        if origin and is_dataclass(origin):
            type_hints = get_type_hints(origin, namespace)
        else:
            type_hints = get_type_hints(ftype, namespace)
        for fname, ft in type_hints.items():
            key = camelcase(fname.strip("_"))

            if key in obj:
                kwargs[fname] = decode(obj[key], ft, targs)

        return ftype(**kwargs)

    if is_generic_type(ftype):
        while origin is None and bases:
            if len(bases) > 1:  # pragma: nocover
                raise NotImplementedError(f"can't decode multiple bases {ftype}")
            ftype = bases[0]
            origin = get_origin(ftype)
            bases = get_generic_bases(ftype)
            targs = get_args(ftype)

        if origin in (dict, Dict, Mapping):
            if not is_primitive(targs[0]):
                raise NotImplementedError(f"can't decode object keys {ftype}")
            ftype = targs[1]

            if ftype in PRIMITIVES:
                return dict(obj)
            else:
                return {k: decode(v, ftype) for k, v in obj.items()}

        if origin in (tuple, Tuple):
            return tuple(decode(v, ftype) for v, ftype in zip(obj, targs))

        if origin in (set, Set):
            ftype = targs[0]

            if ftype in PRIMITIVES:
                return set(obj)
            else:
                return set(decode(v, ftype) for v in obj)

        if origin in (list, List):
            ftype = targs[0]

            if ftype in PRIMITIVES:
                return list(obj)
            else:
                return [decode(v, ftype) for v in obj]

    raise NotImplementedError(f"failed to decode {obj} as type {hint}")
Exemple #12
0
def encode(obj: Any, hint: Optional[Type] = None) -> Any:
    ftype = hint or type(obj)
    if obj is None or is_primitive(ftype):
        return obj

    if is_optional_type(ftype):
        targs = [t for t in get_args(ftype) if t is not type(None)]
        if len(targs) > 1:
            raise NotImplementedError(f"can't encode union types ({targs})")
        ftype = targs[0]
    elif is_union_type(ftype):
        raise NotImplementedError(f"can't encode union types ({get_args(ftype)})")

    if is_generic_type(ftype):
        origin = get_origin(ftype)
        bases = get_generic_bases(ftype)
        targs = get_args(ftype)

        while origin is None and bases:
            if len(bases) > 1:  # pragma: nocover
                raise NotImplementedError(f"can't encode multiple bases {ftype}")
            ftype = bases[0]
            origin = get_origin(ftype)
            bases = get_generic_bases(ftype)
            targs = get_args(ftype)

        if origin in (dict, Dict, Mapping):
            if not is_primitive(targs[0]):
                raise NotImplementedError(f"can't encode object keys {ftype}")
            ftype = targs[1]

            if is_primitive(ftype):
                return obj
            else:
                return {
                    camelcase(k.strip("_")): encode(v, ftype) for k, v in obj.items()
                }

        if origin in (tuple, Tuple):
            return [encode(v, ftype) for v, ftype in zip(obj, targs)]

        if origin in (set, list, Sequence, Set):
            ftype = targs[0]

            if is_primitive(ftype):
                return list(obj)
            else:
                return [encode(v, ftype) for v in obj]

    if is_primitive(ftype):
        return obj

    if isinstance(obj, Datatype):
        result: Dict[str, Any] = {}

        namespace = sys.modules[ftype.__module__].__dict__
        for fname, ftype in get_type_hints(obj, namespace).items():
            key = camelcase(fname.strip("_"))
            value = getattr(obj, fname)

            if value or not obj._sparse:
                result[key] = encode(value, ftype)

        return result

    raise NotImplementedError(f"failed to encode {hint}({type(obj)!r})")
Exemple #13
0
def _maybe_node_for_user_type(
    typ: Type[iface.IType],
    overrides: OverridesT,
    memo: MemoType,
    forward_refs: ForwardRefs,
) -> Tuple[Optional[schema.nodes.SchemaNode], MemoType, ForwardRefs]:
    """ Generates a Colander schema for the given user-defined `typ` that is capable
    of both constructing (deserializing) and serializing the `typ`.
    This includes named tuples and dataclasses.
    """
    global_name_overrider = get_global_name_overrider(overrides)
    is_generic = insp.is_generic_type(typ)

    if is_generic:
        # get the base class that was turned into Generic[T, ...]
        hints_source = get_origin_39(typ)
        # now we need to map generic type variables to the bound class types,
        # e.g. we map Generic[T,U,V, ...] to actual types of MyClass[int, float, str, ...]
        generic_repr = insp.get_generic_bases(hints_source)
        generic_vars_ordered = (insp.get_args(x)[0] for x in generic_repr)
        bound_type_args = insp.get_args(typ)
        type_var_to_type = pmap(zip(generic_vars_ordered, bound_type_args))
        # resolve type hints
        attribute_hints = [
            (field_name, type_var_to_type[type_var])
            for field_name, type_var in ((x, raw_type)
                                         for x, _resolved_type, raw_type in
                                         _type_hints_getter(hints_source))
        ]
        # Generic types should not have default values
        defaults_source = lambda: ()
        # Overrides should be the same as class-based ones, as Generics are not NamedTuple classes,
        # TODO: consider reducing duplication between this and the logic from init-based types (see below)
        deserialize_overrides = pmap({
            # try to get a specific override for a field, if it doesn't exist, use the global modifier
            overrides.get((typ, python_field_name),
                          global_name_overrider(python_field_name)):
            python_field_name
            for python_field_name, _ in attribute_hints
        })
        # apply a local optimisation that discards `deserialize_overrides`
        # if there is no difference with the original field_names;
        # it is done to occupy less memory with unnecessary mappings
        if deserialize_overrides == pmap({
                x: x
                for x, _ in attribute_hints
        }) and global_name_overrider is flags.Identity:
            deserialize_overrides = pmap({})

    elif is_named_tuple(typ):
        hints_source = typ
        attribute_hints = [
            (x, raw_type)
            for x, y, raw_type in _type_hints_getter(hints_source)
        ]
        get_override_identifier = lambda x: getattr(typ, x)
        defaults_source = typ.__new__

        deserialize_overrides = pmap({
            # try to get a specific override for a field, if it doesn't exist, use the global modifier
            overrides.get(getattr(typ, python_field_name),
                          global_name_overrider(python_field_name)):
            python_field_name
            for python_field_name in typ._fields
        })

        # apply a local optimisation that discards `deserialize_overrides`
        # if there is no difference with the original field_names;
        # it is done to occupy less memory with unnecessary mappings
        if deserialize_overrides == pmap({
                x: x
                for x in typ._fields
        }) and global_name_overrider is flags.Identity:
            deserialize_overrides = pmap({})
    else:
        # use init-based types
        hints_source = typ.__init__
        attribute_hints = [
            (x, raw_type)
            for x, y, raw_type in _type_hints_getter(hints_source)
        ]
        get_override_identifier = lambda x: (typ, x)
        defaults_source = typ.__init__

        deserialize_overrides = pmap({
            # try to get a specific override for a field, if it doesn't exist, use the global modifier
            overrides.get((typ, python_field_name),
                          global_name_overrider(python_field_name)):
            python_field_name
            for python_field_name, _ in attribute_hints
        })
        # apply a local optimisation that discards `deserialize_overrides`
        # if there is no difference with the original field_names;
        # it is done to occupy less memory with unnecessary mappings
        if deserialize_overrides == pmap({
                x: x
                for x, _ in attribute_hints
        }) and global_name_overrider is flags.Identity:
            deserialize_overrides = pmap({})

    defaults = {
        k: v.default
        for k, v in inspect.signature(defaults_source).parameters.items()
        if k != 'self' and v.default != inspect.Parameter.empty
    }

    if is_generic and hints_source in overrides:
        # Generic types may have their own custom Schemas defined
        # as a TypeExtension through overrides
        overridden: TypeExtension = overrides[hints_source]
        schema_type_type, _node_children_ = overridden.schema
    else:
        schema_type_type = schema.types.Structure

    schema_type = schema_type_type(
        typ=typ,
        attrs=pvector([x[0] for x in attribute_hints]),
        deserialize_overrides=deserialize_overrides,
    )

    type_schema = schema.nodes.SchemaNode(schema_type)

    for field_name, field_type in attribute_hints:
        globally_modified_field_name = global_name_overrider(field_name)
        # apply field override, if available
        if deserialize_overrides:
            field = get_override_identifier(field_name)
            serialized_field_name = overrides.get(
                field, globally_modified_field_name)
        else:
            serialized_field_name = globally_modified_field_name

        node, memo, forward_refs = decide_node_type(field_type, overrides,
                                                    memo, forward_refs)
        if node is None:
            raise TypeError(
                f'Cannot recognise type "{field_type}" of the field '
                f'"{typ.__name__}.{field_name}" (from {typ.__module__})')
        # clonning because we mutate it next, and the node
        # might be from the cache already
        node = clone_schema_node(node)
        node.name = serialized_field_name
        node.missing = defaults.get(field_name, node.missing)
        type_schema.add(node)
    return type_schema, memo, forward_refs
Exemple #14
0
 def get_runtime_bases(self, typ: type) -> Sequence[Value]:
     if typing_inspect.is_generic_type(typ):
         return typing_inspect.get_generic_bases(typ)
     return typ.__bases__