Example #1
0
def _update_generic_parameters_signature(generic_dict: Dict, method: Callable):
    """

    Args:
        generic_dict:
        method:

    Returns:

    """
    sig = inspect.signature(method)
    params = sig.parameters

    new_params = []
    for k, v in params.items():
        annotation = v.annotation
        if typing_inspect.is_typevar(annotation):
            new_params.append(
                inspect.Parameter(name=k,
                                  kind=v.kind,
                                  annotation=generic_dict[annotation],
                                  default=v.default))
        else:
            new_params.append(v)

    return_val = generic_dict[
        sig.return_annotation] if typing_inspect.is_typevar(
            sig.return_annotation) else sig.return_annotation

    setattr(method, "__signature__",
            sig.replace(parameters=new_params, return_annotation=return_val))
Example #2
0
def _generate_field_schema(field: Field, context: str,
                           schemas: Dict[str, Schema]) -> Tuple[bool, Schema]:
    is_optional, annotation = extract_optional_annotation(field.annotation)
    if is_schema(annotation):
        field_schema_name = _generate_schema(context, annotation, schemas)
        field_schema = Schema(all_of=[_make_schema_ref(field_schema_name)])

    elif is_generic_type(annotation):
        origin = get_origin(annotation)
        if origin in (list, List):
            arguments = get_args(annotation)
            if not arguments or arguments == [Any] or is_typevar(arguments[0]):
                field_schema = _generate_primitive_schema(list)

            else:
                item_schema_name = _generate_schema(context, arguments[0],
                                                    schemas)
                field_schema = Schema("array",
                                      items=_make_schema_ref(item_schema_name))

        elif origin in (dict, Dict):
            # TODO: Improve support for dicts.
            field_schema = _generate_primitive_schema(dict)

    else:
        field_schema = _generate_primitive_schema(annotation)

    if field_schema is not None:
        field_schema.description = field.description
        for option, value in field.validator_options.items():
            if option in Schema._FIELDS:
                setattr(field_schema, option, value)

    return is_optional, field_schema
def robust_isinstance(inst, typ) -> bool:
    """
    Similar to isinstance, but if 'typ' is a parametrized generic Type, it is first transformed into its base generic
    class so that the instance check works. It is also robust to Union and Any.

    :param inst:
    :param typ:
    :return:
    """
    if typ is Any:
        return True
    if is_typevar(typ):
        if hasattr(typ, '__constraints__') and typ.__constraints__ is not None:
            typs = get_args(typ, evaluate=True)
            return any(robust_isinstance(inst, t) for t in typs)
        elif hasattr(typ, '__bound__') and typ.__bound__ is not None:
            return robust_isinstance(inst, typ.__bound__)
        else:
            # a raw TypeVar means 'anything'
            return True
    else:
        if is_union_type(typ):
            typs = get_args(typ, evaluate=True)
            return any(robust_isinstance(inst, t) for t in typs)
        else:
            return isinstance(inst, get_base_generic_type(typ))
def is_valid_pep484_type_hint(typ_hint, allow_forward_refs: bool = False):
    """
    Returns True if the provided type is a valid PEP484 type hint, False otherwise.

    Note: string type hints (forward references) are not supported by default, since callers of this function in
    parsyfiles lib actually require them to be resolved already.

    :param typ_hint:
    :param allow_forward_refs:
    :return:
    """
    # most common case first, to be faster
    try:
        if isinstance(typ_hint, type):
            return True
    except:
        pass

    # optionally, check forward reference
    try:
        if allow_forward_refs and is_forward_ref(typ_hint):
            return True
    except:
        pass

    # finally check unions and typevars
    try:
        return is_union_type(typ_hint) or is_typevar(typ_hint)
    except:
        return False
def is_collection(object_type, strict: bool = False) -> bool:
    """
    Utility method to check if a type is a subclass of typing.{List,Dict,Set,Tuple}
    or of list, dict, set, tuple.

    If strict is set to True, the method will return True only if the class IS directly one of the base collection
    classes

    :param object_type:
    :param strict: if set to True, this method will look for a strict match.
    :return:
    """
    if object_type is None or object_type is Any or is_union_type(
            object_type) or is_typevar(object_type):
        return False
    elif strict:
        return object_type == dict \
               or object_type == list \
               or object_type == tuple \
               or object_type == set \
               or get_base_generic_type(object_type) == Dict \
               or get_base_generic_type(object_type) == List \
               or get_base_generic_type(object_type) == Set \
               or get_base_generic_type(object_type) == Tuple
    else:
        return issubclass(object_type, Dict) \
               or issubclass(object_type, List) \
               or issubclass(object_type, Set) \
               or issubclass(object_type, Tuple) \
               or issubclass(object_type, dict) \
               or issubclass(object_type, list) \
               or issubclass(object_type, tuple) \
               or issubclass(object_type, set)
def get_argument_to_typevar(cls: Type, generic_base_class: Type,
                            typevar: TypeVar):
    """
    Gets the argument given to a type variable parameterising
    a generic base class in a particular sub-class of that base.

    :param cls:                 The sub-class specifying the argument.
    :param generic_base_class:  The generic base-class specifying the type variable.
    :param typevar:             The type variable to get the argument for.
    :return:                    The argument to the type variable.
    """
    # Check the arguments
    typevar_index: int = check_args(cls, generic_base_class, typevar)

    # Get the decendency path from derived to base class
    bases = [cls]
    while bases[-1] is not generic_base_class:
        # Keep track of if we found a base
        base_found = False

        # Try and find a generic base
        for base in typing_inspect.get_generic_bases(bases[-1]):
            if issubclass(base, generic_base_class):
                bases.append(base)
                base_found = True
                break

        # If we didn't find a generic base, find a non-generic base
        if not base_found:
            for base in bases[-1].__bases__:
                if issubclass(base, generic_base_class):
                    bases.append(base)
                    break

    # Search the dependency path for the type variable's final argument
    arg = None
    while len(bases) > 1:
        # Get the arguments to the generic base class
        args = typing_inspect.get_args(bases[-2])

        # If no arguments are given, the signature stays the same
        if len(args) == 0:
            bases = bases[:-2] + bases[-1:]
            continue

        # Get the argument to this typevar
        arg = args[typevar_index]

        # If it's another type variable, keep looking for the argument to this
        # type variable
        if typing_inspect.is_typevar(arg):
            typevar_index = typing_inspect.get_parameters(bases[-2]).index(arg)
            bases = bases[:-1]
            continue

        # Otherwise return the argument to the type variable
        return arg

    return arg
Example #7
0
def match_types(hint: typing.Type, t: typing.Type) -> TypeVarMapping:
    """
    Matches a type hint with a type, return a mapping of any type vars to their values.
    """
    if hint == t:
        return {}

    # If it is an instance of OfType[Type[T]], then we should consider it as T
    if isinstance(t, OfType):
        of_type, = typing_inspect.get_args(get_type(t))
        assert issubclass(of_type, typing.Type)
        t, = typing_inspect.get_args(of_type)
        return match_types(hint, t)

    # If the type is an OfType[T] then we should really just consider it as T
    if issubclass(t, OfType) and not issubclass(hint, OfType):
        t, = typing_inspect.get_args(t)
        return match_types(hint, t)
    if issubclass(hint, OfType) and not issubclass(t, OfType):
        hint, = typing_inspect.get_args(hint)
        return match_types(hint, t)

    # Matching an expanded type is like matching just whatever it represents
    if issubclass(t, ExpandedType):
        t, = typing_inspect.get_args(t)

    if typing_inspect.is_typevar(hint):
        return {hint: t}

    if typing_inspect.is_typevar(t):
        return {}

    if typing_inspect.is_union_type(hint):
        # If this is a union, iterate through and use the first that is a subclass
        for inner_type in typing_inspect.get_args(hint):
            if issubclass(t, inner_type):
                hint = inner_type
                break
        else:
            raise TypeError(f"Cannot match concrete type {t} with hint {hint}")

    if not issubclass(t, hint):
        raise TypeError(f"Cannot match concrete type {t} with hint {hint}")
    return merge_typevars(*(match_types(inner_hint, inner_t)
                            for inner_hint, inner_t in zip(
                                get_inner_types(hint), get_inner_types(t))))
Example #8
0
def is_dataclass_type(t: Type) -> bool:
    """Returns wether t is a dataclass type or a TypeVar of a dataclass type.

    Args:
        t (Type): Some type.

    Returns:
        bool: Wether its a dataclass type.
    """
    return dataclasses.is_dataclass(t) or (
        tpi.is_typevar(t) and dataclasses.is_dataclass(tpi.get_bound(t)))
Example #9
0
def is_type(type_):
    return (
        isinstance(type_, type) or
        type_ is Unknown or
        type_ is Any or
        is_parameterized(type_) or
        is_parametrical(type_) or
        is_typevar(type_) or
        is_union_type(type_)

    )
Example #10
0
def merge_typevars(*typevars: TypeVarMapping) -> TypeVarMapping:
    """
    Merges typevar mappings. If there is a duplicate key, either the values should be the
    same or one should have `typing.Any` type, or one should be a typevar itself. If it is a typevar,
    that typevar is also set to the other's value
    """
    merged: TypeVarMapping = {}
    typevars_: typing.List[TypeVarMapping] = list(typevars)
    while typevars_:
        tvs: TypeVarMapping = typevars_.pop()
        for tv, tp in tvs.items():  # type: ignore
            if tv not in merged:
                merged[tv] = tp  # type: ignore
                continue
            prev_tp = merged[tv]
            if prev_tp == typing.Any:
                merged[tv] = tp  # type: ignore
            elif prev_tp == tp or tp == typing.Any:
                pass
            elif typing_inspect.is_typevar(prev_tp):
                merged[prev_tp] = merged[tv] = tp  # type: ignore
            elif typing_inspect.is_typevar(tp):
                merged[tp] = prev_tp  # type: ignore
            else:
                # Try merging them and choosing replacing the hint with the
                # merged values
                try:
                    merged[tv] = replace_typevars(  # type: ignore
                        match_types(prev_tp, tp), prev_tp)
                    continue
                except TypeError:
                    pass
                try:
                    merged[tv] = replace_typevars(  # type: ignore
                        match_types(tp, prev_tp), tp)
                    continue
                except TypeError:
                    pass
                raise TypeError(
                    f"Cannot merge {prev_tp} and {tp} for type var {tv}")
    return merged
Example #11
0
        def wrapper(*a: object, **kw: Dict[str, object]) -> object:
            #debug('short circuit wrapper ', original)
            space = optional_context_statespace()
            if (not self.engaged) or (not space) or space.running_framework_code:
                return original(*a, **kw)
            # We *heavily* bias towards concrete execution, because it's often the case
            # that a single short-circuit will render the path useless. TODO: consider
            # decaying short-crcuit probability over time.
            use_short_circuit = space.fork_with_confirm_or_else(0.95)
            if not use_short_circuit:
                #debug('short circuit: Choosing not to intercept', original)
                return original(*a, **kw)
            try:
                self.engaged = False
                debug('short circuit: Short circuiting over a call to ', original)
                self.intercepted = True
                return_type = sig.return_annotation

                # Deduce type vars if necessary
                if len(typing_inspect.get_parameters(sig.return_annotation)) > 0 or typing_inspect.is_typevar(sig.return_annotation):
                    typevar_bindings: typing.ChainMap[object, type] = collections.ChainMap(
                    )
                    bound = sig.bind(*a, **kw)
                    bound.apply_defaults()
                    for param in sig.parameters.values():
                        argval = bound.arguments[param.name]
                        value_type = python_type(argval)
                        #debug('unify', value_type, param.annotation)
                        if not dynamic_typing.unify(value_type, param.annotation, typevar_bindings):
                            debug(
                                'aborting intercept due to signature unification failure')
                            return original(*a, **kw)
                        #debug('unify bindings', typevar_bindings)
                    return_type = dynamic_typing.realize(
                        sig.return_annotation, typevar_bindings)
                    debug('short circuit: Deduced return type was ', return_type)

                # adjust arguments that may have been mutated
                assert subconditions is not None
                bound = sig.bind(*a, **kw)
                mutable_args = subconditions.mutable_args
                for argname, arg in bound.arguments.items():
                    if mutable_args is None or argname in mutable_args:
                        forget_contents(arg)

                if return_type is type(None):
                    return None
                # note that the enforcement wrapper ensures postconditions for us, so we
                # can just return a free variable here.
                return proxy_for_type(return_type, 'proxyreturn' + space.uniq())
            finally:
                self.engaged = True
Example #12
0
def _repr(val: t.Any) -> str:

    assert val is not None

    if types.is_none_type(val):
        return 'NoneType'
    elif ti.is_literal_type(val):
        return str(val)
    elif ti.is_new_type(val):
        nested_type = val.__supertype__
        return f'{_qualified_name(val)}[{get_repr(nested_type)}]'
    elif ti.is_typevar(val):
        tv_constraints = ti.get_constraints(val)
        tv_bound = ti.get_bound(val)
        if tv_constraints:
            constraints_repr = (get_repr(tt) for tt in tv_constraints)
            return f'typing.TypeVar(?, {", ".join(constraints_repr)})'
        elif tv_bound:
            return get_repr(tv_bound)
        else:
            return 'typing.Any'
    elif ti.is_optional_type(val):
        optional_args = ti.get_args(val, True)[:-1]
        nested_union = len(optional_args) > 1
        optional_reprs = (get_repr(tt) for tt in optional_args)
        if nested_union:
            return f'typing.Optional[typing.Union[{", ".join(optional_reprs)}]]'
        else:
            return f'typing.Optional[{", ".join(optional_reprs)}]'
    elif ti.is_union_type(val):
        union_reprs = (get_repr(tt) for tt in ti.get_args(val, True))
        return f'typing.Union[{", ".join(union_reprs)}]'
    elif ti.is_generic_type(val):
        attr_name = val._name
        generic_reprs = (get_repr(tt) for tt in ti.get_args(val, evaluate=True))
        return f'typing.{attr_name}[{", ".join(generic_reprs)}]'
    else:
        val_name = _qualified_name(val)
        maybe_td_entries = getattr(val, '__annotations__', {}).copy()
        if maybe_td_entries:
            # we are dealing with typed dict
            # that's quite lovely
            td_keys = sorted(maybe_td_entries.keys())
            internal_members_repr = ', '.join(
                '{key}: {type}'.format(key=k, type=get_repr(maybe_td_entries.get(k)))
                for k in td_keys
            )
            return f'{val_name}{{{internal_members_repr}}}'
        elif 'TypedDict' == getattr(val, '__name__', ''):
            return 'typing_extensions.TypedDict'
        else:
            return val_name
Example #13
0
def evaluate(tp: t.Union[str, t.TypeVar, t.Type, t.ForwardRef], *, frame=None):
    if isinstance(tp, str):
        tp = t.ForwardRef(tp)

    if ti.is_typevar(tp):
        tp = ti.get_bound(tp)

    # TODO python versions
    return t._eval_type(
        tp,
        frame.f_globals if frame else None,
        frame.f_locals if frame else None,
    )
Example #14
0
    def _init_parametric_user(cls) -> None:
        """Initialize an indirect descendant of ParametricType."""

        # For ParametricType grandchildren we have to deal with possible
        # TypeVar remapping and generally check for type sanity.

        ob = getattr(cls, '__orig_bases__', ())
        for b in ob:
            if (isinstance(b, type) and issubclass(b, ParametricType)
                    and b is not ParametricType):
                raise TypeError(
                    f'{cls.__name__}: missing one or more type arguments for'
                    f' base {b.__name__!r}')

            if not typing_inspect.is_generic_type(b):
                continue

            org = typing_inspect.get_origin(b)
            if not isinstance(org, type):
                continue
            if not issubclass(org, ParametricType):
                continue

            base_params = getattr(org, '__parameters__', ())

            args = typing_inspect.get_args(b)
            expected = len(base_params)
            if len(args) != expected:
                raise TypeError(
                    f'{b.__name__} expects {expected} type arguments'
                    f' got {len(args)}')

            base_map = dict(cls._type_param_map)
            subclass_map = {}

            for i, arg in enumerate(args):
                if not typing_inspect.is_typevar(arg):
                    raise TypeError(f'{b.__name__} expects all arguments to be'
                                    f' TypeVars')

                base_typevar = base_params[i]
                attr = base_map.get(base_typevar)
                if attr is not None:
                    subclass_map[arg] = attr

            if len(subclass_map) != len(base_map):
                raise TypeError(
                    f'{cls.__name__}: missing one or more type arguments for'
                    f' base {org.__name__!r}')

            cls._type_param_map = subclass_map
Example #15
0
    def validate(self, value: Optional[Any]) -> _T:
        """Validate and possibly transform the given value.

        Raises:
          FieldValidationError: When the value is not valid.
        """
        is_optional, annotation = extract_optional_annotation(self.annotation)
        # Distinguishing between missing values and null values is
        # important.  Optional types can have None as a value whereas
        # types with a default cannot.  Additionally, it's possible to
        # have an optional type without a default value.
        if value is Missing:
            if self.default is not Missing:
                return self.default

            elif self.default_factory:
                return self.default_factory()

            elif is_optional:
                return None

            raise FieldValidationError("this field is required")

        if value is None:
            if not is_optional:
                raise FieldValidationError("this field cannot be null")

            return value

        if annotation not in (Any,) and \
           not is_forward_ref(annotation) and \
           not is_generic_type(annotation) and \
           not is_union_type(annotation) and \
           not is_typevar(annotation) and \
           not is_schema(annotation) and \
           not isinstance(value, annotation):
            if not self.allow_coerce:
                raise FieldValidationError(
                    f"unexpected type {type(value).__name__}")

            try:
                value = annotation(value)
            except Exception:
                raise FieldValidationError(
                    f"value could not be coerced to {annotation.__name__}")

        if self.validator:
            return self.validator.validate(self, value,
                                           **self.validator_options)
        return value
Example #16
0
def realize(pytype: Type, bindings: Mapping[object, type]) -> object:
    if typing_inspect.is_typevar(pytype):
        return bindings[pytype]
    if not hasattr(pytype, '__args__'):
        return pytype
    newargs: List = []
    for arg in pytype.__args__:  # type:ignore
        newargs.append(realize(arg, bindings))
    #print('realizing pytype', repr(pytype), 'newargs', repr(newargs))
    pytype_origin = origin_of(pytype)
    if not hasattr(pytype_origin, '_name'):
        pytype_origin = getattr(typing, pytype._name)  # type:ignore
    if pytype_origin is Callable:  # Callable args get flattened
        newargs = [newargs[:-1], newargs[-1]]
    return pytype_origin.__getitem__(tuple(newargs))
Example #17
0
def _generate_primitive_schema(annotation: Any) -> Optional[Schema]:
    try:
        arguments = _PRIMITIVE_ANNOTATION_MAP[annotation]
        return Schema(*arguments)
    except KeyError:
        origin = get_origin(annotation)
        if origin in _LIST_TYPES:
            arguments = get_args(annotation)
            if not arguments or is_typevar(arguments[0]):
                return Schema("array", items=_ANY_VALUE)

            else:
                return Schema("array", items=_generate_primitive_schema(arguments[0]))

        # TODO: Add support for additionalFields.
        return Schema("string")
Example #18
0
    def _init_parametric_base(cls) -> None:
        """Initialize a direct subclass of ParametricType"""

        # Direct subclasses of ParametricType must declare
        # ClassVar attributes corresponding to the Generic type vars.
        # For example:
        #     class P(ParametricType, Generic[T, V]):
        #         t: ClassVar[Type[T]]
        #         v: ClassVar[Type[V]]

        params = getattr(cls, '__parameters__', None)

        if not params:
            raise TypeError(f'{cls} must be declared as Generic')

        mod = sys.modules[cls.__module__]
        annos = get_type_hints(cls, mod.__dict__)
        param_map = {}

        for attr, t in annos.items():
            if not typing_inspect.is_classvar(t):
                continue

            args = typing_inspect.get_args(t)
            # ClassVar constructor should have the check, but be extra safe.
            assert len(args) == 1

            arg = args[0]
            if typing_inspect.get_origin(arg) != type:
                continue

            arg_args = typing_inspect.get_args(arg)
            # Likewise, rely on Type checking its stuff in the constructor
            assert len(arg_args) == 1

            if not typing_inspect.is_typevar(arg_args[0]):
                continue

            if arg_args[0] in params:
                param_map[arg_args[0]] = attr

        for param in params:
            if param not in param_map:
                raise TypeError(f'{cls.__name__}: missing ClassVar for'
                                f' generic parameter {param}')

        cls._type_param_map = param_map
def is_pep484_nonable(typ):
    """
    Checks if a given type is nonable, meaning that it explicitly or implicitly declares a Union with NoneType.
    Nested TypeVars and Unions are supported.

    :param typ:
    :return:
    """
    # TODO rely on typing_inspect if there is an answer to https://github.com/ilevkivskyi/typing_inspect/issues/14
    if typ is type(None):
        return True
    elif is_typevar(typ) or is_union_type(typ):
        return any(
            is_pep484_nonable(tt) for tt in
            get_alternate_types_resolving_forwardref_union_and_typevar(typ))
    else:
        return False
Example #20
0
def normalize_pytype(typ: Type) -> Type:
    if typing_inspect.is_typevar(typ):
        # we treat type vars in the most general way possible (the bound, or as 'object')
        bound = typing_inspect.get_bound(typ)
        if bound is not None:
            return normalize_pytype(bound)
        constraints = typing_inspect.get_constraints(typ)
        if constraints:
            raise CrosshairUnsupported
            # TODO: not easy; interpreting as a Union allows the type to be
            # instantiated differently in different places. So, this doesn't work:
            # return Union.__getitem__(tuple(map(normalize_pytype, constraints)))
        return object
    if typ is Any:
        # The distinction between any and object is for type checking, crosshair treats them the same
        return object
    if typ is Type:
        return type
    return typ
Example #21
0
def replace_typevars(typevars: TypeVarMapping, hint: T_type) -> T_type:
    """
    Replaces type vars in a type hint with other types.
    """
    if typing_inspect.is_typevar(hint):
        return typing.cast(T_type, typevars.get(hint, hint))

    # Special case empty callable, which raisees error on getting args
    if hint == typing.Callable:  # type: ignore
        return hint
    if typing_inspect.get_origin(hint) == collections.abc.Callable:
        arg_types, return_type = typing_inspect.get_args(hint)
        return typing.Callable[
            [replace_typevars(typevars, a) for a in arg_types],
            replace_typevars(typevars, return_type), ]

    args = typing_inspect.get_args(hint)
    if not args:
        return hint
    replaced_args = tuple(replace_typevars(typevars, arg) for arg in args)
    return get_origin(hint)[replaced_args]
def get_pretty_type_str(object_type) -> str:
    """
    Utility method to check if a type is a subclass of typing.{List,Dict,Set,Tuple}. In that case returns a
    user-friendly character string with the inner item types, such as Dict[str, int].

    :param object_type:
    :return: type.__name__ if type is not a subclass of typing.{List,Dict,Set,Tuple}, otherwise
    type__name__[list of inner_types.__name__]
    """

    try:
        # DO NOT resolve forward references otherwise this can lead to infinite recursion
        contents_item_type, contents_key_type = _extract_collection_base_type(
            object_type, resolve_fwd_refs=False)
        if isinstance(contents_item_type, tuple):
            return object_type.__name__ + '[' \
                   + ', '.join([get_pretty_type_str(item_type) for item_type in contents_item_type]) + ']'
        else:
            if contents_key_type is not None:
                return object_type.__name__ + '[' + get_pretty_type_str(contents_key_type) + ', ' \
                       + get_pretty_type_str(contents_item_type) + ']'
            elif contents_item_type is not None:
                return object_type.__name__ + '[' + get_pretty_type_str(
                    contents_item_type) + ']'
    except Exception as e:
        pass

    if is_union_type(object_type):
        return 'Union[' + ', '.join([
            get_pretty_type_str(item_type)
            for item_type in get_args(object_type, evaluate=True)
        ]) + ']'
    elif is_typevar(object_type):
        # typevars usually do not display their namespace so str() is compact. And it displays the cov/contrav symbol
        return str(object_type)
    else:
        try:
            return object_type.__name__
        except:
            return str(object_type)
Example #23
0
def _get_generic_types(
    instance: _tp.Any,
    count: _tp.Union[int, _tp.Set[int]],
    *,
    module_from: _tp.Any,
    hint: str = ""
) -> _tp.Tuple[_tp.Type[_tp.Any], ...]:
    types = _typing_inspect.get_args(_typing_inspect.get_generic_type(instance))

    if not types:
        types = _typing_inspect.get_args(_typing_inspect.get_generic_bases(instance)[0])

    globalns = _sys.modules[module_from.__module__].__dict__

    _eval_type = _tp._eval_type  # type: ignore
    types = tuple(_eval_type(i, globalns, None) for i in types)

    if isinstance(count, int):
        count = {count}

    if count != {-1} and len(types) not in count or any(_typing_inspect.is_typevar(i) for i in types):
        raise TypeError(f"{instance.__class__.__name__} generic was not properly parameterized{hint}: {types}")

    return types
Example #24
0
def is_vague_type(t: typing.Type) -> bool:
    """
    Returns true if the type is a typevar, object or typing.
    """
    return typing_inspect.is_typevar(t) or t == object or t == typing.Any
Example #25
0
def decode(obj: Any, hint: Type, hint_args: Any = ()) -> Any:
    ftype = hint
    origin = get_origin(ftype)
    bases = get_generic_bases(ftype)
    targs = get_args(ftype)

    if hint_args and any(is_typevar(t) for t in targs):
        targs = hint_args

    if ftype in PRIMITIVES:
        return obj

    if is_optional_type(ftype):
        if obj is None:
            return None

        real_args = [t for t in targs if t is not type(None)]
        if len(real_args) > 1:
            raise NotImplementedError(f"can't decode union types ({real_args})")
        ftype = real_args[0]
        origin = get_origin(ftype)
    elif is_union_type(ftype):
        raise NotImplementedError(f"can't decode union types ({targs})")

    if is_primitive(ftype):
        return ftype(obj)

    if is_datatype(ftype) or (origin and is_dataclass(origin)):
        if not isinstance(obj, dict):
            raise TypeError(f"invalid data {obj!r} for {ftype}")

        kwargs: Dict[str, Any] = {}

        namespace = sys.modules[ftype.__module__].__dict__
        if origin and is_dataclass(origin):
            type_hints = get_type_hints(origin, namespace)
        else:
            type_hints = get_type_hints(ftype, namespace)
        for fname, ft in type_hints.items():
            key = camelcase(fname.strip("_"))

            if key in obj:
                kwargs[fname] = decode(obj[key], ft, targs)

        return ftype(**kwargs)

    if is_generic_type(ftype):
        while origin is None and bases:
            if len(bases) > 1:  # pragma: nocover
                raise NotImplementedError(f"can't decode multiple bases {ftype}")
            ftype = bases[0]
            origin = get_origin(ftype)
            bases = get_generic_bases(ftype)
            targs = get_args(ftype)

        if origin in (dict, Dict, Mapping):
            if not is_primitive(targs[0]):
                raise NotImplementedError(f"can't decode object keys {ftype}")
            ftype = targs[1]

            if ftype in PRIMITIVES:
                return dict(obj)
            else:
                return {k: decode(v, ftype) for k, v in obj.items()}

        if origin in (tuple, Tuple):
            return tuple(decode(v, ftype) for v, ftype in zip(obj, targs))

        if origin in (set, Set):
            ftype = targs[0]

            if ftype in PRIMITIVES:
                return set(obj)
            else:
                return set(decode(v, ftype) for v in obj)

        if origin in (list, List):
            ftype = targs[0]

            if ftype in PRIMITIVES:
                return list(obj)
            else:
                return [decode(v, ftype) for v in obj]

    raise NotImplementedError(f"failed to decode {obj} as type {hint}")
Example #26
0
def match_expression(
        wildcards: typing.List[Expression], template: object,
        expr: object) -> typing.Tuple[TypeVarMapping, WildcardMapping]:
    """
    Returns a mapping of wildcards to the objects at that level, or None if it does not match.

    A wildcard can match either an expression or a value. If it matches two nodes, they must be equal.
    """

    if template in wildcards:
        # If we are matching against a placeholder and the expression is not resolved to that placeholder, don't match.
        if (isinstance(template, PlaceholderExpression)
                and isinstance(expr, Expression)
                and not typing_inspect.is_typevar(
                    typing_inspect.get_args(
                        typing_inspect.get_generic_type(template))[0])):
            raise NoMatch

        # Match type of wildcard with type of expression
        try:
            return (
                match_values(template, expr),
                UnhashableMapping(Item(typing.cast(Expression, template),
                                       expr)),
            )
        except TypeError:
            raise NoMatch

    if isinstance(expr, Expression):
        if not isinstance(template, Expression):
            raise NoMatch
        # Any typevars in the template that are unbound should be matched with their
        # versions in the expr

        try:
            fn_type_mapping: TypeVarMapping = match_functions(
                template.function, expr.function)
        except TypeError:
            raise NoMatch

        if set(expr.kwargs.keys()) != set(template.kwargs.keys()):
            raise TypeError("Wrong kwargs in match")

        template_args: typing.Iterable[object]
        expr_args: typing.Iterable[object]

        # Process args in the template that can represent any number of args.
        # These are the "IteratedPlaceholder"s
        # Allow one iterated placeholder in the template args
        # For example fn(a, b, [...], *c, d, e, [...])
        # Here `c` should take as many args as it can between the ends,
        # Each of those should be matched against the inner
        iterated_args = [
            arg for arg in template.args
            if isinstance(arg, IteratedPlaceholder)
        ]
        if iterated_args:
            # template args, minus the iterator, is the minimum length of the values
            # If they have less values than this, raise an error
            if len(expr.args) < len(template.args) - 1:
                raise TypeError("Wrong number of args in match")
            template_args_ = list(template.args)
            # Only support one iterated arg for now
            # TODO: Support more than one, would require branching
            template_iterated, = iterated_args
            template_index_iterated = list(
                template.args).index(template_iterated)

            # Swap template iterated with inner wildcard
            template_args_[template_index_iterated], = template_iterated.args
            template_args = template_args_

            expr_args = collapse_tuple(
                expr.args,
                template_index_iterated,
                # The number we should preserve on the right, is the number of template
                # args after index
                len(template.args) - template_index_iterated - 1,
            )

        else:
            if len(template.args) != len(expr.args):
                raise TypeError("Wrong number of args in match")
            template_args = template.args
            expr_args = expr.args

        type_mappings, expr_mappings = list(
            zip(
                *(match_expression(wildcards, arg_template, arg_value)
                  for arg_template, arg_value in zip(template_args, expr_args)
                  ),
                *(match_expression(wildcards, template.kwargs[key],
                                   expr.kwargs[key])
                  for key in template.kwargs.keys()),
            )) or ((), ())
        try:
            merged_typevars: TypeVarMapping = merge_typevars(
                fn_type_mapping, *type_mappings)
        except TypeError:
            raise NoMatch
        try:
            return (
                merged_typevars,
                safe_merge(*expr_mappings, dict_constructor=UnhashableMapping),
            )
        except ValueError:
            raise NoMatch
    if template != expr:
        raise NoMatch
    return match_values(template, expr), UnhashableMapping()
Example #27
0
def match_types(hint: typing.Type, t: typing.Type) -> TypeVarMapping:
    """
    Matches a type hint with a type, return a mapping of any type vars to their values.
    """
    logger.debug("match_types hint=%s type=%s", hint, t)
    if hint == object:
        hint = typing.Any  # type: ignore
    if t == object:
        t = typing.Any  # type: ignore
    if hint == t:
        return {}

    # If it is an instance of OfType[Type[T]], then we should consider it as T
    if isinstance(t, OfType):
        (of_type, ) = typing_inspect.get_args(get_type(t))
        assert issubclass(of_type, typing.Type)
        (t, ) = typing_inspect.get_args(of_type)
        return match_types(hint, t)

    # If the type is an OfType[T] then we should really just consider it as T
    if issubclass(t, OfType) and not issubclass(hint, OfType):
        (t, ) = typing_inspect.get_args(t)
        return match_types(hint, t)
    if issubclass(hint, OfType) and not issubclass(t, OfType):
        (hint, ) = typing_inspect.get_args(hint)
        return match_types(hint, t)

    # Matching an expanded type is like matching just whatever it represents
    if issubclass(t, ExpandedType):
        (t, ) = typing_inspect.get_args(t)

    if typing_inspect.is_typevar(hint):
        return {hint: t}

    # This happens with match rule on conversion, like when the value is TypeVar
    if typing_inspect.is_typevar(t):
        return {}

    # if both are generic sequences, verify they are the same and have the same contents
    if (typing_inspect.is_generic_type(hint)
            and typing_inspect.is_generic_type(t)
            and typing_inspect.get_origin(hint) == collections.abc.Sequence
            and typing_inspect.get_origin(t) == collections.abc.Sequence):
        t_inner = typing_inspect.get_args(t)[0]

        # If t's inner arg is just the default one for seuqnce, it hasn't be initialized so assume
        # it was an empty tuple that created it and just return a match
        if t_inner == typing_inspect.get_args(typing.Sequence)[0]:
            return {}
        return match_types(typing_inspect.get_args(hint)[0], t_inner)

    if typing_inspect.is_union_type(hint):
        # If this is a union, iterate through and use the first that is a subclass
        for inner_type in typing_inspect.get_args(hint):
            if issubclass(t, inner_type):
                hint = inner_type
                break
        else:
            raise TypeError(f"Cannot match concrete type {t} with hint {hint}")

    logger.debug("checking if type subclass hint hint=%s type=%s", hint, t)
    if not issubclass(t, hint):
        logger.debug("not subclass")
        raise TypeError(f"Cannot match concrete type {t} with hint {hint}")
    return merge_typevars(*(match_types(inner_hint, inner_t)
                            for inner_hint, inner_t in zip(
                                get_inner_types(hint), get_inner_types(t))))
    def get(cls, type_or_hint, *, is_argument: bool = False) -> "TypeChecker":
        # This ensures the validity of the type passed (see typing documentation for info)
        type_or_hint = is_valid_type(type_or_hint, "Invalid type.",
                                     is_argument)

        if type_or_hint is Any:
            return AnyTypeChecker()

        if is_type(type_or_hint):
            return TypeTypeChecker.make(type_or_hint, is_argument)

        if is_literal_type(type_or_hint):
            return LiteralTypeChecker.make(type_or_hint, is_argument)

        if is_generic_type(type_or_hint):
            origin = get_origin(type_or_hint)
            if issubclass(origin, MappingCol):
                return MappingTypeChecker.make(type_or_hint, is_argument)

            if issubclass(origin, Collection):
                return CollectionTypeChecker.make(type_or_hint, is_argument)

            # CONSIDER: how to cater for exhaustible generators?
            if issubclass(origin, Iterable):
                raise NotImplementedError(
                    "No type-checker is setup for iterables that exhaust.")

            return GenericTypeChecker.make(type_or_hint, is_argument)

        if is_tuple_type(type_or_hint):
            return TupleTypeChecker.make(type_or_hint, is_argument)

        if is_callable_type(type_or_hint):
            return CallableTypeChecker.make(type_or_hint, is_argument)

        if isclass(type_or_hint):
            if is_typed_dict(type_or_hint):
                return TypedDictChecker.make(type_or_hint, is_argument)
            return ConcreteTypeChecker.make(type_or_hint, is_argument)

        if is_union_type(type_or_hint):
            return UnionTypeChecker.make(type_or_hint, is_argument)

        if is_typevar(type_or_hint):
            bound_type = get_bound(type_or_hint)
            if bound_type:
                return cls.get(bound_type)
            constraints = get_constraints(type_or_hint)
            if constraints:
                union_type_checkers = tuple(
                    cls.get(type_) for type_ in constraints)
                return UnionTypeChecker(Union.__getitem__(constraints),
                                        union_type_checkers)
            else:
                return AnyTypeChecker()

        if is_new_type(type_or_hint):
            super_type = getattr(type_or_hint, "__supertype__", None)
            if super_type is None:
                raise TypeError(
                    f"No supertype for NewType: {type_or_hint}. This is not allowed."
                )
            return cls.get(super_type)

        if is_forward_ref(type_or_hint):
            return ForwardTypeChecker.make(type_or_hint,
                                           is_argument=is_argument)

        if is_classvar(type_or_hint):
            var_type = get_args(type_or_hint, evaluate=True)[0]
            return cls.get(var_type)

        raise NotImplementedError(
            f"No {TypeChecker.__qualname__} is available for type or hint: '{type_or_hint}'"
        )
Example #29
0
def get_parsing_fn(t: Type[T]) -> Callable[[Any], T]:
    """Gets a parsing function for the given type or type annotation.

    Args:
        t (Type[T]): A type or type annotation.

    Returns:
        Callable[[Any], T]: A function that will parse a value of the given type
            from the command-line when available, or a no-op function that
            will return the raw value, when a parsing fn cannot be found or
            constructed.
    """
    if t in _parsing_fns:
        logger.debug(f"The type {t} has a dedicated parsing function.")
        return _parsing_fns[t]

    elif t is Any:
        logger.debug(f"parsing an Any type: {t}")
        return no_op

    # TODO: Do we want to support parsing a Dict from command-line?
    # elif is_dict(t):
    #     logger.debug(f"parsing a Dict field: {t}")
    #     args = get_type_arguments(t)
    #     if len(args) != 2:
    #         args = (Any, Any)
    #     return parse_dict(*args)

    # TODO: This would require some sort of 'postprocessing' step to convert a
    # list to a Set or something like that.
    # elif is_set(t):
    #     logger.debug(f"parsing a Set field: {t}")
    #     args = get_type_arguments(t)
    #     if len(args) != 1:
    #         args = (Any,)
    #     return parse_set(args[0])

    elif is_tuple(t):
        logger.debug(f"parsing a Tuple field: {t}")
        args = get_type_arguments(t)
        if is_homogeneous_tuple_type(t):
            if not args:
                args = (str, ...)
            parsing_fn = get_parsing_fn(args[0])
        else:
            parsing_fn = parse_tuple(args)
            parsing_fn.__name__ = str(t)
        return parsing_fn

    elif is_list(t):
        logger.debug(f"parsing a List field: {t}")
        args = get_type_arguments(t)
        assert len(args) == 1
        return parse_list(args[0])

    elif is_union(t):
        logger.debug(f"parsing a Union field: {t}")
        args = get_type_arguments(t)
        return parse_union(*args)

    elif is_enum(t):
        logger.debug(f"Parsing an Enum field of type {t}")
        return parse_enum(t)
    # import typing_inspect as tpi
    # from .serializable import get_dataclass_type_from_forward_ref, Serializable

    if tpi.is_forward_ref(t):
        forward_arg = tpi.get_forward_arg(t)
        for t, fn in _parsing_fns.items():
            if getattr(t, "__name__", str(t)) == forward_arg:
                return fn

    if tpi.is_typevar(t):
        bound = tpi.get_bound(t)
        logger.debug(f"parsing a typevar: {t}, bound type is {bound}.")
        if bound is not None:
            return get_parsing_fn(bound)

    logger.debug(f"Couldn't find a parsing function for type {t}, will try "
                 f"to use the type directly.")
    return t
Example #30
0
def is_value_of_type(  # noqa: C901 "too complex"
    # pyre-fixme[2]: Parameter annotation cannot be `Any`.
    value: Any,
    # pyre-fixme[2]: Parameter annotation cannot be `Any`.
    expected_type: Any,
    invariant_check: bool = False,
) -> bool:
    """
    This method attempts to verify a given value is of a given type. If the type is
    not supported, it returns True but throws an exception in tests.

    It is similar to typeguard / enforce pypi modules, but neither of those have
    permissive options for types they do not support.

    Supported types for now:
    - List/Set/Iterable
    - Dict/Mapping
    - base types (str, int, etc)
    - Literal
    - Unions
    - Tuples
    - Concrete Classes
    - ClassVar

    Not supported:
    - Callables, which will likely not be used in XHP anyways
    - Generics, Type Vars (treated as Any)
    - Generators
    - Forward Refs -- use `typing.get_type_hints` to resolve these
    - Type[...]
    """
    if is_classvar(expected_type):
        # `ClassVar` (no subscript) is implicitly `ClassVar[Any]`
        if hasattr(expected_type, "__type__"):  # py36
            expected_type = expected_type.__type__ or Any
        else:  # py37+
            classvar_args = get_args(expected_type)
            expected_type = (classvar_args[0] or Any) if classvar_args else Any

    if is_typevar(expected_type):
        # treat this the same as Any
        # TODO: evaluate bounds
        return True

    expected_origin_type = get_origin(expected_type) or expected_type

    if expected_origin_type == Any:
        return True

    elif is_union_type(expected_type):
        return any(
            is_value_of_type(value, subtype) for subtype in expected_type.__args__
        )

    elif isinstance(expected_origin_type, type(Literal)):
        if hasattr(expected_type, "__values__"):  # py36
            literal_values = expected_type.__values__
        else:  # py37+
            literal_values = get_args(expected_type, evaluate=True)
        return any(value == literal for literal in literal_values)

    elif isinstance(expected_origin_type, ForwardRef):
        # not much we can do here for now, lets just return :(
        return True

    # Handle `Tuple[A, B, C]`.
    # We don't want to include Tuple subclasses, like NamedTuple, because they're
    # unlikely to behave similarly.
    elif expected_origin_type in [Tuple, tuple]:  # py36 uses Tuple, py37+ uses tuple
        if not isinstance(value, tuple):
            return False

        type_args = get_args(expected_type, evaluate=True)
        if len(type_args) == 0:
            # `Tuple` (no subscript) is implicitly `Tuple[Any, ...]`
            return True

        if type_args is None:
            return True

        if len(value) != len(type_args):
            return False
        # TODO: Handle `Tuple[T, ...]` like `Iterable[T]`
        for subvalue, subtype in zip(value, type_args):
            if not is_value_of_type(subvalue, subtype):
                return False
            return True

    elif issubclass(expected_origin_type, Mapping):
        # We're expecting *some* kind of Mapping, but we also want to make sure it's
        # the correct Mapping subtype. That means we want {a: b, c: d} to match Mapping,
        # MutableMapping, and Dict, but we don't want MappingProxyType({a: b, c: d}) to
        # match MutableMapping or Dict.
        if not issubclass(type(value), expected_origin_type):
            return False

        type_args = get_args(expected_type, evaluate=True)
        if len(type_args) == 0:
            # `Mapping` (no subscript) is implicitly `Mapping[Any, Any]`.
            return True

        invariant_check = issubclass(expected_origin_type, MutableMapping)

        for subkey, subvalue in value.items():
            if not is_value_of_type(
                subkey,
                type_args[0],
                # key type is always invariant
                invariant_check=True,
            ):
                return False
            if not is_value_of_type(
                subvalue, type_args[1], invariant_check=invariant_check
            ):
                return False
        return True

    # While this does technically work fine for str and bytes (they are iterables), it's
    # better to use the default isinstance behavior for them.
    #
    # Similarly, tuple subclasses tend to have pretty different behavior, and we should
    # fall back to the default check.
    elif issubclass(expected_origin_type, Iterable) and not issubclass(
        expected_origin_type,
        (str, bytes, tuple),
    ):
        # We know this thing is *some* kind of Iterable, but we want to
        # allow subclasses. That means we want [1,2,3] to match both
        # List[int] and Iterable[int], but we do NOT want that
        # to match Set[int].
        if not issubclass(type(value), expected_origin_type):
            return False

        type_args = get_args(expected_type, evaluate=True)
        if len(type_args) == 0:
            # `Iterable` (no subscript) is implicitly `Iterable[Any]`.
            return True

        # We invariant check if its a mutable sequence
        invariant_check = issubclass(expected_origin_type, MutableSequence)
        return all(
            is_value_of_type(subvalue, type_args[0], invariant_check=invariant_check)
            for subvalue in value
        )

    try:
        if not invariant_check:
            if expected_type is float:
                return isinstance(value, (int, float))
            else:
                return isinstance(value, expected_type)
        return type(value) is expected_type
    except Exception as e:
        raise NotImplementedError(
            f"the value {value!r} was compared to type {expected_type!r} "
            + f"but support for that has not been implemented yet! Exception: {e!r}"
        )