def json_to(type: Type[_V]) -> Callable[[object], _V]: # pylint: disable=redefined-builtin, too-many-return-statements try: return type.json_to # type: ignore except AttributeError: if is_optional_type(type): # needs to come before 'issubclass' return _json_to_optional(get_args(type)[0]) # type: ignore if is_generic_type(type): # needs to come before 'issubclass' if get_origin(type) == list: return _json_to_list(get_args(type)[0]) # type: ignore if get_origin(type) == dict: return _json_to_dict(get_args(type)[1]) # type: ignore if issubclass(type, bool): # needs to come before 'int' return _json_to_bool # type: ignore if issubclass(type, int): return _json_to_int # type: ignore if issubclass(type, str): return _json_to_str # type: ignore if issubclass(type, list): raise TypeError('no element type specified for list') if issubclass(type, dict): raise TypeError('no value type specified for dict') raise TypeError('not a JSON value type')
def is_leq_informative_parameterized_right(left, right): generic_right = get_origin(right) if is_parameterized(left): if not is_leq_informative(get_origin(left), generic_right): return False type_parameters_left = get_args(left) type_parameters_right = get_args(right) if len(type_parameters_left) != len(type_parameters_right): return False return all( is_leq_informative(t_left, t_right) for t_left, t_right in zip(type_parameters_left, type_parameters_right) ) elif is_parametrical(left): return False else: return is_leq_informative(left, generic_right)
def get_inner_types(t: typing.Type) -> typing.Iterable[typing.Type]: """ Returns the inner types for a type. Like `typing_inspect.get_args` but special cases callable, so it returns the return type, then all the arg types. """ if t == typing.Callable: # type: ignore return [] if typing_inspect.get_origin(t) == collections.abc.Callable: arg_types, return_type = typing_inspect.get_args(t) return [return_type] + arg_types return typing_inspect.get_args(t)
def decode_generic_dict(decoder, typ, json_value): if not (inspect.is_generic_type(typ) and inspect.get_origin(typ) == dict): return Unsupported check_type(dict, json_value) key_type, value_type = inspect.get_args(typ) if key_type != str: raise JsonError( f'Dict key type {key_type} is not supported for JSON deserialization - key should be str' ) return { key: decoder.decode(value_type, value) for (key, value) in json_value.items() }
def _load(cls: Type[T], res: Response) -> T: if cls == Any: return res.json() else: if typing_inspect.get_origin(cls) == list: cls, = typing_inspect.get_args(cls) many = True else: many = False schema = get_schema(cls, many=many) obj = schema.load(res.json()) return cast(T, obj)
def is_consistent(type1, type2): if not isinstance(type1, type) and isinstance(type2, type): raise ValueError('Both parameters need to be types') if type1 is Unknown or type2 is Unknown: return True elif is_parameterized(type1) and is_parameterized(type2): generic1 = get_origin(type1) generic2 = get_origin(type2) if not is_consistent(generic1, generic2): return False type_parameters1 = get_args(type1) type_parameters2 = get_args(type2) return len(type_parameters1) == len(type_parameters2) and all( is_consistent(t1, t2) for t1, t2 in zip(type_parameters1, type_parameters2) ) else: return type1 is type2
def _generate_field_schema(field_name: str, field: Field, schemas: Dict[str, Schema]) -> Tuple[bool, Schema]: is_optional, annotation = extract_optional_annotation(field.annotation) if is_schema(annotation): field_schema_name = _generate_schema(annotation, schemas) field_schema = Schema(all_of=[_make_schema_ref(field_schema_name)]) elif is_generic_type(annotation): origin = get_origin(annotation) if origin in _LIST_TYPES: arguments = _get_args(annotation) if arguments and is_schema(arguments[0]): item_schema_name = _generate_schema(arguments[0], schemas) field_schema = Schema("array", items=_make_schema_ref(item_schema_name)) else: field_schema = _generate_primitive_schema(annotation) elif origin in _DICT_TYPES: # TODO: Add support for additionalFields. field_schema = _generate_primitive_schema(dict) else: # pragma: no cover raise ValueError( f"Unsupported type {origin} for field {field.name!r}.") else: field_schema = _generate_primitive_schema(annotation) if field_schema is not None: field_schema.description = field.description if field.request_name != field.response_name: if field_name == field.request_name: field_schema.write_only = True else: field_schema.read_only = True elif field.response_only: field_schema.read_only = True elif field.request_only: field_schema.write_only = True for option, value in field.validator_options.items(): if option in Schema._FIELDS: setattr(field_schema, option, value) return is_optional, field_schema
def _check_annotation(f_type, f_fullname, f_default): if typing_inspect.is_tuple_type(f_type): if f_default is not None: raise RuntimeError(f'invalid type annotation on {f_fullname}: ' f'default is defined for tuple type') f_default = tuple elif typing_inspect.is_union_type(f_type): for t in typing_inspect.get_args(f_type, evaluate=True): _check_annotation(t, f_fullname, f_default) elif typing_inspect.is_generic_type(f_type): if f_default is not None: raise RuntimeError(f'invalid type annotation on {f_fullname}: ' f'default is defined for container type ' f'{f_type!r}') ot = typing_inspect.get_origin(f_type) if ot is None: raise RuntimeError( f'cannot find origin of a generic type {f_type}') if ot in (list, List, collections.abc.Sequence): f_default = list elif ot in (set, Set): f_default = set elif ot in (frozenset, FrozenSet): f_default = frozenset elif ot in (dict, Dict): f_default = dict else: raise RuntimeError(f'invalid type annotation on {f_fullname}: ' f'{f_type!r} is not supported') elif f_type is not None: if f_type is Any: f_type = object if not isinstance(f_type, type): raise RuntimeError(f'invalid type annotation on {f_fullname}: ' f'{f_type!r} is not a type') if typeutils.is_container_type(f_type): if f_default is not None: raise RuntimeError(f'invalid type annotation on {f_fullname}: ' f'default is defined for container type ' f'{f_type!r}') f_default = f_type return f_default
def _init_parametric_user(cls) -> None: """Initialize an indirect descendant of ParametricType.""" # For ParametricType grandchildren we have to deal with possible # TypeVar remapping and generally check for type sanity. ob = getattr(cls, '__orig_bases__', ()) for b in ob: if (isinstance(b, type) and issubclass(b, ParametricType) and b is not ParametricType): raise TypeError( f'{cls.__name__}: missing one or more type arguments for' f' base {b.__name__!r}') if not typing_inspect.is_generic_type(b): continue org = typing_inspect.get_origin(b) if not isinstance(org, type): continue if not issubclass(org, ParametricType): continue base_params = getattr(org, '__parameters__', ()) args = typing_inspect.get_args(b) expected = len(base_params) if len(args) != expected: raise TypeError( f'{b.__name__} expects {expected} type arguments' f' got {len(args)}') base_map = dict(cls._type_param_map) subclass_map = {} for i, arg in enumerate(args): if not typing_inspect.is_typevar(arg): raise TypeError(f'{b.__name__} expects all arguments to be' f' TypeVars') base_typevar = base_params[i] attr = base_map.get(base_typevar) if attr is not None: subclass_map[arg] = attr if len(subclass_map) != len(base_map): raise TypeError( f'{cls.__name__}: missing one or more type arguments for' f' base {org.__name__!r}') cls._type_param_map = subclass_map
def get_argument_to_typevar(cls: Type, generic_base_class: Type, typevar: TypeVar): """ Gets the argument given to a type variable parameterising a generic base class in a particular sub-class of that base. :param cls: The sub-class specifying the argument. :param generic_base_class: The generic base-class specifying the type variable. :param typevar: The type variable to get the argument for. :return: The argument to the type variable. """ # Check the arguments typevar_index: int = check_args(cls, generic_base_class, typevar) # Get the decendency path from derived to base class bases = [cls] while bases[-1] is not generic_base_class: # Try and find a generic base for base in typing_inspect.get_generic_bases(bases[-1]): origin = typing_inspect.get_origin(base) if issubclass(origin, generic_base_class): bases.append(base) bases.append(origin) break # Search the dependency path for the type variable's final argument arg = None while len(bases) > 1: # Get the arguments to the generic base class args = typing_inspect.get_args(bases[-2]) # If no arguments are given, the signature stays the same if len(args) == 0: bases = bases[:-2] + bases[-1:] continue # Get the argument to this typevar arg = args[typevar_index] # If it's another type variable, keep looking for the argument to this # type variable if typing_inspect.is_typevar(arg): parameters = typing_inspect.get_parameters(bases[-2]) typevar_index = parameters.index(arg) bases = bases[:-2] continue # Otherwise return the argument to the type variable return arg return arg
def __init__( self, dependency: Union[T, str], *, namespace: str = None, group: str = None, exclude_groups: Sequence[str] = None, lazy: bool = False, ): optional = False multiple = False if typing_inspect.is_optional_type(dependency): dependency = typing_inspect.get_args(dependency, evaluate=True)[0] optional = True elif typing_inspect.is_union_type(dependency): raise TypeError( f"Autowired Union can only be used to indicate" f" optional autowiring in the forms 'Union[T, None]' or" f" 'Optional[T]'") if is_sequence(typing_inspect.get_origin(dependency) or dependency): subscripted_types = typing_inspect.get_args(dependency, evaluate=True) if subscripted_types == typing_inspect.get_args(Sequence): raise TypeError(f"Type not defined for Autowired list") subscripted_type = subscripted_types[0] if typing_inspect.is_optional_type(subscripted_type): raise TypeError( f"List of Optional is invalid for autowiring. Use" f" 'Autowired(Optional[List[...]])' instead.") elif typing_inspect.is_union_type(subscripted_type): raise TypeError( f"Only one type should be defined for Autowired list") dependency = subscripted_type multiple = True elif is_raw_sequence(dependency): if len(dependency) != 1: raise TypeError( f"Only one type should be defined for Autowired" f" {dependency.__class__.__qualname__}") dependency = dependency[0] multiple = True self.optional = optional self.multiple = multiple self.dependency = sanitize_if_forward_ref(dependency) self.namespace = namespace self.group = group self.exclude_groups = exclude_groups self.lazy = lazy
def issubclass(cls, classinfo): if classinfo is dataclass: return original_isinstance(cls, type) and is_dataclass(cls) if cls is dataclass: return issubclass(object, classinfo) if original_isinstance(cls, GenericMeta): origin = get_origin(cls) bases = get_generic_bases(origin) or (origin, ) return classinfo in bases classinfo_origin = get_origin(classinfo) if classinfo_origin is None and original_isinstance( classinfo, GenericMeta): classinfo_origin = classinfo if classinfo_origin in issubclass_generic_funcs: return issubclass_generic_funcs[classinfo_origin](cls, classinfo) if not original_isinstance(cls, type): return False return original_issubclass(cls, classinfo)
def get_openapi_type(self, type_annotation: Type) -> Schema: type_origin = typing_inspect.get_origin(type_annotation) if typing_inspect.is_union_type(type_annotation): return self._handle_union_type(type_annotation) elif inspect.isclass(type_annotation) and issubclass(type_annotation, BaseModel): return self._handle_pydantic_model(type_annotation) elif inspect.isclass(type_annotation) and issubclass(type_annotation, Enum): return self._handle_enums(type_annotation) elif inspect.isclass(type_origin) and issubclass(type_origin, typing.Mapping): return self._handle_dictionary(type_annotation) elif inspect.isclass(type_origin) and issubclass(type_origin, typing.Sequence): return self._handle_list(type_annotation) # Fallback to primitive types return self.python_to_openapi_types.get(type_annotation, Schema(type="object"))
def _parse_val(val: str, ftype: type) -> FieldValue: ftype_type = type(ftype) if ftype_type == type: if issubclass(ftype, str): return val elif issubclass(ftype, bool): return _parse_bool(val) elif issubclass(ftype, int): return int(val, 10) elif issubclass(ftype, float): return float(val) elif issubclass(ftype, pl.Path): return pl.Path(val) else: raise TypeError(ftype) elif typing_inspect.get_origin(ftype) in (list, typ.List): return _parse_list_val(val, ftype) elif typing_inspect.get_origin(ftype) in (set, typ.Set): return set(_parse_list_val(val, ftype)) elif callable(ftype): return ftype(val) else: raise TypeError(ftype)
def is_parameterized(type_): is_parametrical_generic = any( p(type_) for p in (is_generic_type, is_callable_type, is_tuple_type, is_union_type)) if is_parametrical_generic: if NEW_TYPING: return not (getattr(type_, '_special', False) or (is_union_type(type_) and not hasattr(type_, '__args__')) or getattr(type_, '_is_protocol', False)) else: return get_origin(type_) is not type_ else: return False
def is_leq_informative(left, right): if not (is_type(left) and is_type(right)): raise ValueError('Both parameters need to be types') if ( get_origin(left) is Generic or get_origin(right) is Generic ): raise ValueError("typing Generic not supported") if left is right: result = True elif left is Unknown: result = True elif right is Unknown: result = False elif right is Any: result = True elif left is Any: result = False elif is_union_type(right): result = is_leq_informative_union(left, right) elif is_union_type(left): result = False elif is_parameterized(right): result = is_leq_informative_parameterized_right(left, right) elif is_parametrical(right): if is_parameterized(left): left = get_origin(left) result = issubclass(left, right) elif is_parametrical(left) or is_parameterized(left): result = False elif left in type_order and right in type_order[left]: result = True else: result = issubclass(left, right) return result
def _build_return_value_wrapper(self, url_method_properties: MethodProperties) -> Optional[Dict[str, MediaType]]: return_type = inspect.signature(url_method_properties.function).return_annotation if return_type is None or return_type == inspect.Signature.empty: return None return_properties: Optional[Dict[str, Schema]] = None if return_type == ReturnValue or is_generic_type(return_type) and get_origin(return_type) == ReturnValue: # Dealing with the special case of ReturnValue[...] links_type = self.type_converter.get_openapi_type(Dict[str, str]) links_type.title = "Links" links_type.nullable = True warnings_type = self.type_converter.get_openapi_type(List[str]) warnings_type.title = "Warnings" return_properties = { "links": links_type, "metadata": Schema( title="Metadata", nullable=True, type="object", properties={ "warnings": warnings_type, }, ), } type_args = get_args(return_type, evaluate=True) if not type_args or len(type_args) != 1: raise RuntimeError( "ReturnValue definition should take one type Argument, e.g. ReturnValue[None]. " f"Got this instead: {type_args}" ) if not url_method_properties.envelope: raise RuntimeError("Methods returning a ReturnValue object should always have an envelope") if type_args[0] != NoneType: return_properties[url_method_properties.envelope_key] = self.type_converter.get_openapi_type(type_args[0]) else: openapi_return_type = self.type_converter.get_openapi_type(return_type) if url_method_properties.envelope: return_properties = {url_method_properties.envelope_key: openapi_return_type} return {"application/json": MediaType(schema=Schema(type="object", properties=return_properties))}
def _generate_primitive_schema(annotation: Any) -> Optional[Schema]: try: arguments = _PRIMITIVE_ANNOTATION_MAP[annotation] return Schema(*arguments) except KeyError: origin = get_origin(annotation) if origin in _LIST_TYPES: arguments = get_args(annotation) if not arguments or is_typevar(arguments[0]): return Schema("array", items=_ANY_VALUE) else: return Schema("array", items=_generate_primitive_schema(arguments[0])) # TODO: Add support for additionalFields. return Schema("string")
def _get_type_name(t: Type) -> str: if hasattr(t, "__name__"): return t.__name__ else: origin = get_origin(t) if origin: name = MappingException._get_type_name(origin) elif is_forward_ref(t): name = get_forward_arg(t) else: name = t._name args = list(map(lambda x: MappingException._get_type_name(x), get_args(t))) if len(args) != 0: return f"{name}[{', '.join(args)}]" else: return name
def _get_type_from_hint(hint): if ti.get_origin(hint) in [ # Py<3.7. typing.List, typing.Iterable, typing.Sequence, # Py>=3.7 list, collections_abc.Iterable, collections_abc.Sequence]: [type_] = ti.get_args(hint) return _Type(type_, list) elif ti.is_union_type(hint): # For Union[type, NoneType], just use type. args = ti.get_args(hint) if len(args) == 2: type_, none = args if none == type(None): return _Type(type_, None) return _Type(hint, None)
def isinstance(o, t): if t is dataclass: return original_isinstance(o, type) and is_dataclass(o) if original_isinstance(t, GenericMeta): if t is Dict: return original_isinstance(o, dict) if get_origin(t) in (dict, Dict): key_type, value_type = get_args(t) return original_isinstance(o, dict) and all( isinstance(key, key_type) and isinstance(value, value_type) for key, value in o.items()) return original_isinstance(o, t)
def test_origin(self): T = TypeVar('T') self.assertEqual(get_origin(int), None) if WITH_CLASSVAR: self.assertEqual(get_origin(ClassVar[int]), None) self.assertEqual(get_origin(Generic), Generic) self.assertEqual(get_origin(Generic[T]), Generic) # Cannot use assertEqual on Py3.5.2. self.assertIs(get_origin(Literal[42]), Literal) if PY39: self.assertEqual(get_origin(list[int]), list) if GENERIC_TUPLE_PARAMETRIZABLE: tp = List[Tuple[T, T]][int] self.assertEqual(get_origin(tp), list if NEW_TYPING else List)
def is_list_type(tp) -> bool: """ Test if the type is a generic list type, including subclasses excluding non-generic classes. Examples:: is_list_type(int) == False is_list_type(list) == False is_list_type(List) == True is_list_type(List[str, int]) == True class MyClass(List[str]): ... is_list_type(MyClass) == True """ return is_generic_type(tp) and issubclass(get_origin(tp) or tp, List)
def build_schema(cls: typing.Type, *, is_nested: bool = False, many=None) -> Schema: # TODO: is_nested if many is None: origin = typing_inspect.get_origin(cls) if origin is None: pass elif origin == list: cls, = typing_inspect.get_args(cls) many = True else: raise TypeError(cls) return schema(cls, many=bool(many))
def _init_parametric_base(cls) -> None: """Initialize a direct subclass of ParametricType""" # Direct subclasses of ParametricType must declare # ClassVar attributes corresponding to the Generic type vars. # For example: # class P(ParametricType, Generic[T, V]): # t: ClassVar[Type[T]] # v: ClassVar[Type[V]] params = getattr(cls, '__parameters__', None) if not params: raise TypeError(f'{cls} must be declared as Generic') mod = sys.modules[cls.__module__] annos = get_type_hints(cls, mod.__dict__) param_map = {} for attr, t in annos.items(): if not typing_inspect.is_classvar(t): continue args = typing_inspect.get_args(t) # ClassVar constructor should have the check, but be extra safe. assert len(args) == 1 arg = args[0] if typing_inspect.get_origin(arg) != type: continue arg_args = typing_inspect.get_args(arg) # Likewise, rely on Type checking its stuff in the constructor assert len(arg_args) == 1 if not typing_inspect.is_typevar(arg_args[0]): continue if arg_args[0] in params: param_map[arg_args[0]] = attr for param in params: if param not in param_map: raise TypeError(f'{cls.__name__}: missing ClassVar for' f' generic parameter {param}') cls._type_param_map = param_map
def dataclass_field_types(cls, require_bound=False): if not hasattr(cls, "__parameters__"): type_hints = get_type_hints(cls) flds = fields(cls) return ((fld, type_hints[fld.name]) for fld in flds) if require_bound and cls.__parameters__: raise TypeError("Cannot find types of unbound generic {}".format(cls)) origin = get_origin(cls) type_mapping = dict(zip(origin.__parameters__, get_args(cls))) type_hints = get_type_hints(origin) flds = fields(origin) return ((fld, bind(type_mapping, type_hints[fld.name])) for fld in flds)
def replace_type_variable(type_, type_hint, type_var=None): if (isinstance(type_hint, TypeVar) and type_hint == type_var): return type_ elif is_parameterized(type_hint): new_args = replace_type_variable(type_, get_args(type_hint), type_var=type_var) new_args = tuple(new_args) origin = get_origin(type_hint) return replace_type_variable_fix_python36_37(type_hint, origin, new_args) elif isinstance(type_hint, Iterable): return [ replace_type_variable(type_, arg, type_var=type_var) for arg in type_hint ] else: return type_hint
def kind_to_py(kind): if kind is None or kind is typing.Any: return 'None', '', False name = "" if typing_inspect.is_generic_type(kind): origin = typing_inspect.get_origin(kind) name = origin.__name__ else: name = kind.__name__ if (kind in basic_types or type(kind) in basic_types): return name, type_mapping.get(name) or name, True if (name in type_mapping): return name, type_mapping[name], True suffix = name.lstrip("~") return suffix, "(dict, {})".format(suffix), True
def get_json_lines(annotation: ResourceAnnotation, field: str, route: str, request: bool = False) -> List: """Generate documentation lines for the given annotation. This only documents schemas of type "object", or type "list" where each "item" is an object. Other types are ignored (but a warning is logged). :param doctor.resource.ResourceAnnotation annotation: Annotation object for the associated handler method. :param str field: Sphinx field type to use (e.g. '<json'). :param str route: The route the annotation is attached to. :param bool request: Whether the resource annotation is for the request or not. :returns: list of strings, one for each line. """ url_params = URL_PARAMS_RE.findall(route) if not request: return_type = annotation.logic._doctor_signature.return_annotation # Check if our return annotation is a Response that supplied a # type we can use to document. If so, use that type for api docs. # e.g. def logic() -> Response[MyType] if get_origin(return_type) == Response: if return_type.__args__ is not None: return_type = return_type.__args__[0] if issubclass(return_type, Array): if issubclass(return_type.items, Object): properties = return_type.items.properties field += 'arr' else: return [] elif issubclass(return_type, Object): properties = return_type.properties else: return [] else: # If we defined a req_obj_type for the logic, use that type's # properties instead of the function signature. if annotation.logic._doctor_req_obj_type: properties = annotation.logic._doctor_req_obj_type.properties else: parameters = annotation.annotated_parameters properties = {k: p.annotation for k, p in parameters.items()} return get_json_object_lines(annotation, properties, field, url_params, request)
def _lookup_converter(self, annotation: Type[Any]) -> 'Callable[[Any, Context, str], Any]': """ Looks up a converter for the specified annotation. """ origin = typing_inspect.get_origin(annotation) if origin is not None: annotation = origin if annotation in self._converters: return self._converters[annotation] if annotation is inspect.Parameter.empty: return lambda ann, ctx, i: i # str etc if callable(annotation): return annotation return lambda ann, ctx, i: i